├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .readthedocs.yaml ├── AUTHORS.rst ├── CODE_OF_CONDUCT.rst ├── CONTRIBUTING.rst ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── SECURITY.md ├── dev-requirements.txt ├── docs ├── Makefile ├── adapters.rst ├── authentication.rst ├── conf.py ├── contributing.rst ├── deprecated.rst ├── downloadutils.rst ├── dumputils.rst ├── exceptions.rst ├── formdata.rst ├── index.rst ├── make.bat ├── requirements.txt ├── security.rst ├── sessions.rst ├── threading.rst ├── uploading-data.rst ├── user-agent.rst └── user.rst ├── examples ├── monitor │ └── progress_bar.py └── threading │ ├── threaded.py │ └── threaded_simplified.py ├── requests_toolbelt ├── __init__.py ├── _compat.py ├── adapters │ ├── __init__.py │ ├── fingerprint.py │ ├── host_header_ssl.py │ ├── socket_options.py │ ├── source.py │ ├── ssl.py │ └── x509.py ├── auth │ ├── __init__.py │ ├── _digest_auth_compat.py │ ├── guess.py │ ├── handler.py │ ├── http_bearer.py │ └── http_proxy_digest.py ├── cookies │ ├── __init__.py │ └── forgetful.py ├── downloadutils │ ├── __init__.py │ ├── stream.py │ └── tee.py ├── exceptions.py ├── multipart │ ├── __init__.py │ ├── decoder.py │ └── encoder.py ├── sessions.py ├── streaming_iterator.py ├── threaded │ ├── __init__.py │ ├── pool.py │ └── thread.py └── utils │ ├── __init__.py │ ├── deprecated.py │ ├── dump.py │ ├── formdata.py │ └── user_agent.py ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── cassettes │ ├── file_for_download.json │ ├── http2bin_cookies.json │ ├── http2bin_fingerprint.json │ ├── httpbin_bearer_auth.json │ ├── httpbin_guess_auth_basic.json │ ├── httpbin_guess_auth_digest.json │ ├── httpbin_guess_auth_none.json │ ├── klevas_vu_lt_ssl3.json │ ├── redirect_request_for_dump_all.json │ ├── simple_get_request.json │ ├── stream_response_to_file.json │ ├── stream_response_without_content_length_to_file.json │ ├── test_x509_adapter_der.json │ └── test_x509_adapter_pem.json ├── conftest.py ├── test_auth.py ├── test_auth_bearer.py ├── test_auth_handler.py ├── test_downloadutils.py ├── test_dump.py ├── test_fingerprintadapter.py ├── test_forgetfulcookiejar.py ├── test_formdata.py ├── test_host_header_ssl_adapter.py ├── test_multipart_decoder.py ├── test_multipart_encoder.py ├── test_multipart_monitor.py ├── test_proxy_digest_auth.py ├── test_sessions.py ├── test_socket_options_adapter.py ├── test_source_adapter.py ├── test_ssladapter.py ├── test_streaming_iterator.py ├── test_user_agent.py ├── test_x509_adapter.py └── threaded │ ├── __init__.py │ ├── test_api.py │ ├── test_pool.py │ └── test_thread.py └── tox.ini /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | defaults: 6 | run: 7 | shell: bash 8 | 9 | jobs: 10 | package: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: "Checkout repository" 15 | uses: "actions/checkout@v3" 16 | 17 | - name: "Setup Python" 18 | uses: "actions/setup-python@v4" 19 | with: 20 | python-version: "3.x" 21 | cache: "pip" 22 | cache-dependency-path: '**/setup.py' 23 | 24 | - name: "Check packages" 25 | run: | 26 | python -m pip install -U setuptools wheel twine 27 | python setup.py sdist bdist_wheel 28 | python -m twine check --strict dist/* 29 | 30 | test: 31 | strategy: 32 | fail-fast: false 33 | matrix: 34 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 35 | os: 36 | - macos-latest 37 | - windows-latest 38 | - ubuntu-latest 39 | 40 | runs-on: ${{ matrix.os }} 41 | name: ${{ fromJson('{"macos-latest":"macOS","windows-latest":"Windows","ubuntu-latest":"Ubuntu"}')[matrix.os] }} ${{ matrix.python-version }} 42 | steps: 43 | - name: "Checkout repository" 44 | uses: "actions/checkout@v3" 45 | 46 | - name: "Setup Python ${{ matrix.python-version }}" 47 | uses: "actions/setup-python@v4" 48 | with: 49 | python-version: ${{ matrix.python-version }} 50 | # Fails on Python 2 + Windows 51 | # cache: "pip" 52 | # cache-dependency-path: '**/setup.py' 53 | 54 | - name: Install dependencies 55 | run: | 56 | python -m pip install --upgrade pip 57 | python -m pip install tox tox-gh-actions 58 | 59 | - name: Test with tox 60 | run: tox 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | docs/_build 4 | bin/ 5 | include/ 6 | lib/ 7 | lib64/ 8 | dist/ 9 | *.egg-info/ 10 | .coverage 11 | htmlcov/ 12 | .tox/ 13 | venv/ 14 | venv*/ 15 | build/ 16 | *.egg 17 | .env 18 | .cache/ 19 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.12" 12 | # You can also specify other tool versions: 13 | # nodejs: "20" 14 | # rust: "1.70" 15 | # golang: "1.20" 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: docs/conf.py 20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs 21 | # builder: "dirhtml" 22 | # Fail on all warnings to avoid broken references 23 | fail_on_warning: true 24 | 25 | # Optionally build your docs in additional formats such as PDF and ePub 26 | # formats: 27 | # - pdf 28 | # - epub 29 | 30 | # Optional but recommended, declare the Python requirements required 31 | # to build your documentation 32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 33 | python: 34 | install: 35 | - requirements: docs/requirements.txt 36 | - method: pip 37 | path: . 38 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | Requests-toolbelt is written and maintained by Ian Cordasco, Cory Benfield and 2 | various contributors: 3 | 4 | Development Lead 5 | ```````````````` 6 | 7 | - Ian Cordasco 8 | 9 | - Cory Benfield 10 | 11 | 12 | Requests 13 | ```````` 14 | 15 | - Kenneth Reitz and various contributors 16 | 17 | 18 | Urllib3 19 | ``````` 20 | 21 | - Andrey Petrov 22 | 23 | 24 | Patches and Suggestions 25 | ``````````````````````` 26 | 27 | - Jay De Lanoy 28 | 29 | - Zhaoyu Luo 30 | 31 | - Markus Unterwaditzer 32 | 33 | - Bryce Boe (@bboe) 34 | 35 | - Dan Lipsitt (https://github.com/DanLipsitt) 36 | 37 | - Cea Stapleton (http://www.ceastapleton.com) 38 | 39 | - Patrick Creech 40 | 41 | - Mike Lambert (@mikelambert) 42 | 43 | - Ryan Barrett (https://snarfed.org/) 44 | 45 | - Victor Grau Serrat (@lacabra) 46 | 47 | - Yorgos Pagles 48 | 49 | - Thomas Hauk 50 | 51 | - Achim Herwig 52 | 53 | - Ryan Ashley 54 | 55 | - Sam Bull (@greatestape) 56 | 57 | - Chris van Marle (https://github.com/qistoph) 58 | 59 | - Florence Blanc-Renaud (@flo-renaud) -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.rst: -------------------------------------------------------------------------------- 1 | Contributor Code of Conduct 2 | --------------------------- 3 | 4 | As contributors and maintainers of this project, and in the interest of 5 | fostering an open and welcoming community, we pledge to respect all 6 | people who contribute through reporting issues, posting feature 7 | requests, updating documentation, submitting pull requests or patches, 8 | and other activities. 9 | 10 | We are committed to making participation in this project a 11 | harassment-free experience for everyone, regardless of level of 12 | experience, gender, gender identity and expression, sexual orientation, 13 | disability, personal appearance, body size, race, ethnicity, age, 14 | religion, or nationality. 15 | 16 | Examples of unacceptable behavior by participants include: 17 | 18 | * The use of sexualized language or imagery 19 | * Personal attacks 20 | * Trolling or insulting/derogatory comments 21 | * Public or private harassment 22 | * Publishing other's private information, such as physical or electronic 23 | addresses, without explicit permission 24 | * Other unethical or unprofessional conduct 25 | 26 | Project maintainers have the right and responsibility to remove, edit, 27 | or reject comments, commits, code, wiki edits, issues, and other 28 | contributions that are not aligned to this Code of Conduct, or to ban 29 | temporarily or permanently any contributor for other behaviors that they 30 | deem inappropriate, threatening, offensive, or harmful. 31 | 32 | By adopting this Code of Conduct, project maintainers commit themselves 33 | to fairly and consistently applying these principles to every aspect of 34 | managing this project. Project maintainers who do not follow or enforce 35 | the Code of Conduct may be permanently removed from the project team. 36 | 37 | This code of conduct applies both within project spaces and in public 38 | spaces when an individual is representing the project or its community. 39 | 40 | Instances of abusive, harassing, or otherwise unacceptable behavior may 41 | be reported by contacting a project maintainer at graffatcolmingov@gmail.com. 42 | All complaints will be reviewed and investigated and will 43 | result in a response that is deemed necessary and appropriate to the 44 | circumstances. Maintainers are obligated to maintain confidentiality 45 | with regard to the reporter of an incident. 46 | 47 | This Code of Conduct is adapted from the `Contributor Covenant`_, version 48 | 1.3.0, available at https://www.contributor-covenant.org/version/1/3/0/ 49 | 50 | .. _Contributor Covenant: https://www.contributor-covenant.org/ 51 | 52 | .. 53 | Re-formatted to reStructuredText from 54 | https://raw.githubusercontent.com/CoralineAda/contributor_covenant/master/CODE_OF_CONDUCT.md 55 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Please read the `documentation 2 | `_ to understand 3 | the suggested workflow to contribute to this project. 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2014 Ian Cordasco, Cory Benfield 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | https://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include LICENSE 3 | include HISTORY.rst 4 | include AUTHORS.rst 5 | include CODE_OF_CONDUCT.rst 6 | include tox.ini 7 | include dev-requirements.txt 8 | 9 | recursive-include requests_toolbelt * 10 | recursive-include docs * 11 | recursive-include tests * 12 | 13 | prune docs/_build 14 | global-exclude *.py[cdo] __pycache__ *.so *.pyd 15 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | The Requests Toolbelt 2 | ===================== 3 | 4 | This is just a collection of utilities for `python-requests`_, but don't 5 | really belong in ``requests`` proper. The minimum tested requests version is 6 | ``2.1.0``. In reality, the toolbelt should work with ``2.0.1`` as well, but 7 | some idiosyncracies prevent effective or sane testing on that version. 8 | 9 | ``pip install requests-toolbelt`` to get started! 10 | 11 | 12 | multipart/form-data Encoder 13 | --------------------------- 14 | 15 | The main attraction is a streaming multipart form-data object, ``MultipartEncoder``. 16 | Its API looks like this: 17 | 18 | .. code-block:: python 19 | 20 | from requests_toolbelt import MultipartEncoder 21 | import requests 22 | 23 | m = MultipartEncoder( 24 | fields={'field0': 'value', 'field1': 'value', 25 | 'field2': ('filename', open('file.py', 'rb'), 'text/plain')} 26 | ) 27 | 28 | r = requests.post('http://httpbin.org/post', data=m, 29 | headers={'Content-Type': m.content_type}) 30 | 31 | 32 | You can also use ``multipart/form-data`` encoding for requests that don't 33 | require files: 34 | 35 | .. code-block:: python 36 | 37 | from requests_toolbelt import MultipartEncoder 38 | import requests 39 | 40 | m = MultipartEncoder(fields={'field0': 'value', 'field1': 'value'}) 41 | 42 | r = requests.post('http://httpbin.org/post', data=m, 43 | headers={'Content-Type': m.content_type}) 44 | 45 | 46 | Or, you can just create the string and examine the data: 47 | 48 | .. code-block:: python 49 | 50 | # Assuming `m` is one of the above 51 | m.to_string() # Always returns unicode 52 | 53 | 54 | User-Agent constructor 55 | ---------------------- 56 | 57 | You can easily construct a requests-style ``User-Agent`` string:: 58 | 59 | from requests_toolbelt import user_agent 60 | 61 | headers = { 62 | 'User-Agent': user_agent('my_package', '0.0.1') 63 | } 64 | 65 | r = requests.get('https://api.github.com/users', headers=headers) 66 | 67 | 68 | SSLAdapter 69 | ---------- 70 | 71 | The ``SSLAdapter`` was originally published on `Cory Benfield's blog`_. 72 | This adapter allows the user to choose one of the SSL protocols made available 73 | in Python's ``ssl`` module for outgoing HTTPS connections: 74 | 75 | .. code-block:: python 76 | 77 | from requests_toolbelt import SSLAdapter 78 | import requests 79 | import ssl 80 | 81 | s = requests.Session() 82 | s.mount('https://', SSLAdapter(ssl.PROTOCOL_TLSv1)) 83 | 84 | cookies/ForgetfulCookieJar 85 | -------------------------- 86 | 87 | The ``ForgetfulCookieJar`` prevents a particular requests session from storing 88 | cookies: 89 | 90 | .. code-block:: python 91 | 92 | from requests_toolbelt.cookies.forgetful import ForgetfulCookieJar 93 | 94 | session = requests.Session() 95 | session.cookies = ForgetfulCookieJar() 96 | 97 | Contributing 98 | ------------ 99 | 100 | Please read the `suggested workflow 101 | `_ for 102 | contributing to this project. 103 | 104 | Please report any bugs on the `issue tracker`_ 105 | 106 | .. _Cory Benfield's blog: https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/ 107 | .. _python-requests: https://github.com/kennethreitz/requests 108 | .. _issue tracker: https://github.com/requests/toolbelt/issues 109 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | See https://toolbelt.readthedocs.io/en/latest/security.html 2 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | mock;python_version<"3.3" 3 | pyopenssl 4 | git+https://github.com/betamaxpy/betamax 5 | trustme 6 | -------------------------------------------------------------------------------- /docs/authentication.rst: -------------------------------------------------------------------------------- 1 | .. _authentication: 2 | 3 | Authentication 4 | ============== 5 | 6 | requests supports Basic Authentication and HTTP Digest Authentication by 7 | default. There are also a number of third-party libraries for authentication 8 | with: 9 | 10 | - `OAuth `_ 11 | 12 | - `NTLM `_ 13 | 14 | - `Kerberos `_ 15 | 16 | The :mod:`requests_toolbelt.auth` provides extra authentication features in 17 | addition to those. It provides the following authentication classes: 18 | 19 | - :class:`requests_toolbelt.auth.guess.GuessAuth` 20 | 21 | - :class:`requests_toolbelt.auth.http_proxy_digest.HTTPProxyDigestAuth` 22 | 23 | - :class:`requests_toolbelt.auth.handler.AuthHandler` 24 | 25 | AuthHandler 26 | ----------- 27 | 28 | The :class:`~requests_toolbelt.auth.handler.AuthHandler` is a way of using a 29 | single session with multiple websites that require authentication. If you know 30 | what websites require a certain kind of authentication and what your 31 | credentials are. 32 | 33 | Take for example a session that needs to authenticate to GitHub's API and 34 | GitLab's API, you would set up and use your 35 | :class:`~requests_toolbelt.auth.handler.AuthHandler` like so: 36 | 37 | .. code-block:: python 38 | 39 | import requests 40 | from requests_toolbelt.auth.handler import AuthHandler 41 | 42 | def gitlab_auth(request): 43 | request.headers['PRIVATE-TOKEN'] = 'asecrettoken' 44 | return request 45 | 46 | handler = AuthHandler({ 47 | 'https://api.github.com': ('sigmavirus24', 'apassword'), 48 | 'https://gitlab.com': gitlab_auth, 49 | }) 50 | 51 | session = requests.Session() 52 | session.auth = handler 53 | r = session.get('https://api.github.com/user') 54 | # assert r.ok 55 | r2 = session.get('https://gitlab.com/api/v3/projects') 56 | # assert r2.ok 57 | 58 | .. note:: 59 | 60 | You **must** provide both the scheme and domain for authentication. The 61 | :class:`~requests_toolbelt.auth.handler.AuthHandler` class will check both 62 | the scheme and host to ensure your data is not accidentally exposed. 63 | 64 | .. autoclass:: requests_toolbelt.auth.handler.AuthHandler 65 | :members: 66 | 67 | GuessAuth 68 | --------- 69 | 70 | The :class:`~requests_toolbelt.auth.guess.GuessAuth` authentication class 71 | automatically detects whether to use basic auth or digest auth: 72 | 73 | .. code-block:: python 74 | 75 | import requests 76 | from requests_toolbelt.auth import GuessAuth 77 | 78 | requests.get('http://httpbin.org/basic-auth/user/passwd', 79 | auth=GuessAuth('user', 'passwd')) 80 | requests.get('http://httpbin.org/digest-auth/auth/user/passwd', 81 | auth=GuessAuth('user', 'passwd')) 82 | 83 | Detection of the auth type is done via the ``WWW-Authenticate`` header sent by 84 | the server. This requires an additional request in case of basic auth, as 85 | usually basic auth is sent preemptively. If the server didn't explicitly 86 | require authentication, no credentials are sent. 87 | 88 | .. autoclass:: requests_toolbelt.auth.guess.GuessAuth 89 | 90 | 91 | GuessProxyAuth 92 | -------------- 93 | 94 | The :class:`~requests_toolbelt.auth.guess.GuessProxyAuth` handler will 95 | automatically detect whether to use basic authentication or digest authentication 96 | when authenticating to the provided proxy. 97 | 98 | .. code-block:: python 99 | 100 | import requests 101 | from requests_toolbelt.auth.guess import GuessProxyAuth 102 | 103 | proxies = { 104 | "http": "http://PROXYSERVER:PROXYPORT", 105 | "https": "http://PROXYSERVER:PROXYPORT", 106 | } 107 | requests.get('http://httpbin.org/basic-auth/user/passwd', 108 | auth=GuessProxyAuth('user', 'passwd', 'proxyusr', 'proxypass'), 109 | proxies=proxies) 110 | requests.get('http://httpbin.org/digest-auth/auth/user/passwd', 111 | auth=GuessProxyAuth('user', 'passwd', 'proxyusr', 'proxypass'), 112 | proxies=proxies) 113 | 114 | Detection of the auth type is done via the ``Proxy-Authenticate`` header sent by 115 | the server. This requires an additional request in case of basic auth, as 116 | usually basic auth is sent preemptively. If the server didn't explicitly 117 | require authentication, no credentials are sent. 118 | 119 | .. autoclass:: requests_toolbelt.auth.guess.GuessProxyAuth 120 | 121 | HTTPProxyDigestAuth 122 | ------------------- 123 | 124 | The ``HTTPProxyDigestAuth`` use digest authentication between the client and 125 | the proxy. 126 | 127 | .. code-block:: python 128 | 129 | import requests 130 | from requests_toolbelt.auth.http_proxy_digest import HTTPProxyDigestAuth 131 | 132 | 133 | proxies = { 134 | "http": "http://PROXYSERVER:PROXYPORT", 135 | "https": "https://PROXYSERVER:PROXYPORT", 136 | } 137 | url = "https://toolbelt.readthedocs.io/" 138 | auth = HTTPProxyDigestAuth("USERNAME", "PASSWORD") 139 | requests.get(url, proxies=proxies, auth=auth) 140 | 141 | Program would raise error if the username or password is rejected by the proxy. 142 | 143 | .. autoclass:: requests_toolbelt.auth.http_proxy_digest.HTTPProxyDigestAuth 144 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing to this project 2 | ============================ 3 | 4 | Checklist 5 | --------- 6 | 7 | #. All potential contributors must read the :ref:`code-of-conduct` and follow 8 | it 9 | 10 | #. Fork the repository on `GitHub`_ or `GitLab`_ 11 | 12 | #. Create a new branch, e.g., ``git checkout -b bug/12345`` 13 | 14 | #. Fix the bug and add tests (if applicable [#]_, see :ref:`how-to-add-tests`) 15 | 16 | #. Run the tests (see :ref:`how-to-run-tests` below) 17 | 18 | #. Add documentation (as necessary) for your change 19 | 20 | #. Build the documentation to check for errors and formatting (see 21 | :ref:`how-to-build-the-docs` below) 22 | 23 | #. Add yourself to the :file:`AUTHORS.rst` (unless you're already there) 24 | 25 | #. Commit it. Follow these rules in your commit message: 26 | 27 | * Keep the subject line under 50 characters 28 | 29 | * Use an imperative verb to start the commit 30 | 31 | * Use an empty line between the subject and the message 32 | 33 | * Describe the *why* in detail in the message portion of the commit 34 | 35 | * Wrap the lines of the message at 72 characters 36 | 37 | * Add the appropriate "Closes #12345" syntax to autoclose the issue it 38 | fixed (if it closes an issue) 39 | 40 | * See :ref:`example-commit-message` below 41 | 42 | #. Push it to your fork 43 | 44 | #. Create a request for us to merge your contribution 45 | 46 | After this last step, it is possible that we may leave feedback in the form of 47 | review comments. When addressing these comments, you can follow two 48 | strategies: 49 | 50 | * Amend/rebase your changes into an existing commit 51 | 52 | * Create a new commit with a different message [#]_ describing the changes in 53 | that commit and push it to your branch 54 | 55 | This project is not opinionated about which approach you should prefer. We 56 | only ask that you are aware of the following: 57 | 58 | * Neither GitHub nor GitLab notifies us that you have pushed new changes. A 59 | friendly ping is encouraged 60 | 61 | * If you continue to use the same branch that you created the request from, 62 | both GitHub and GitLab will update the request on the website. You do 63 | **not** need to create a new request for the new changes. 64 | 65 | 66 | .. _code-of-conduct: 67 | 68 | .. include:: ../CODE_OF_CONDUCT.rst 69 | 70 | .. _how-to-add-tests: 71 | 72 | How To Add Tests 73 | ---------------- 74 | 75 | We use `pytest`_ to run tests and to simplify how we write tests. If you're 76 | fixing a bug in an existing please find tests for that module or feature and 77 | add to them. Most tests live in the ``tests`` directory. If you're adding a 78 | new feature in a new submodule, please create a new module of test code. For 79 | example, if you're adding a submodule named ``foo`` then you would create 80 | ``tests/test_foo.py`` which will contain the tests for the ``foo`` submodule. 81 | 82 | .. _how-to-run-tests: 83 | 84 | How To Run The Tests 85 | -------------------- 86 | 87 | Run the tests in this project using `tox`_. Before you run the tests, ensure 88 | you have installed tox either using your system package manager (e.g., apt, 89 | yum, etc.), or your prefered python installer (e.g., pip). 90 | 91 | Then run the tests on at least Python 2.7 and Python 3.x, e.g., 92 | 93 | .. code:: 94 | 95 | $ tox -e py27,py34 96 | 97 | Finally run one, or both, of the flake8 style enforcers, e.g., 98 | 99 | .. code:: 100 | 101 | $ tox -e py27-flake8 102 | # or 103 | $ tox -e py34-flake8 104 | 105 | It is preferable if you run both to catch syntax errors that might occur in 106 | Python 2 or Python 3 (based on how familiar you are with the common subset of 107 | language from both). 108 | 109 | Tox will manage virtual environments and dependencies for you so it will be 110 | the only dependency you need to install to contribute to this project. 111 | 112 | .. _how-to-build-the-docs: 113 | 114 | How To Build The Documentation 115 | ------------------------------ 116 | 117 | To build the docs, you need to ensure tox is installed and then you may run 118 | 119 | .. code:: 120 | 121 | $ tox -e docs 122 | 123 | This will build the documentation into ``docs/_build/html``. If you then run 124 | 125 | .. code:: 126 | 127 | $ python2.7 -m SimpleHTTPServer 128 | # or 129 | $ python3.4 -m http.server 130 | 131 | from that directory, you can view the docs locally at http://localhost:8000/. 132 | 133 | .. _example-commit-message: 134 | 135 | Example Commit Message 136 | ---------------------- 137 | 138 | :: 139 | 140 | Allow users to use the frob when uploading data 141 | 142 | When uploading data with FooBar, users may need to use the frob method 143 | to ensure that pieces of data are not munged. 144 | 145 | Closes #1234567 146 | 147 | Releasing a new version 148 | ----------------------- 149 | 150 | #. Prepare the release pull request 151 | 152 | * In ``requests_toolbelt/__init__.py``, bump the version number 153 | 154 | * In ``HISTORY.rst``, include noteworthy changes under the relevant sections 155 | 156 | * Open a pull request with the above changes, and make any changes required 157 | to get it approved 158 | 159 | #. "Squash and merge" the pull request in GitHub, which will give you a release 160 | commit 161 | 162 | #. Tag the release with that commit: 163 | 164 | * Create the tag: ``git tag -s -a $VERSION -m "Release v$VERSION"`` 165 | 166 | * Push it the main repository (not you fork!): ``git push --tags`` 167 | 168 | #. Upload it on PyPI with ``tox -e release`` 169 | 170 | #. [Optional] Announce it where relevant (social media, GitHub issues...) 171 | 172 | Footnotes 173 | --------- 174 | 175 | .. [#] You might not need tests if you're updating documentation, fixing a 176 | typo, or updating a docstring. If you're fixing a bug, please add 177 | tests. 178 | 179 | .. [#] If each commit has the same message, the reviewer may ask you to 180 | squash your commits or may squash them for you and perform a manual 181 | merge. 182 | 183 | .. _GitHub: https://github.com/requests/toolbelt 184 | .. _GitLab: https://gitlab.com/sigmavirus24/toolbelt 185 | .. _tox: https://tox.readthedocs.io/ 186 | .. _pytest: https://docs.pytest.org/ 187 | -------------------------------------------------------------------------------- /docs/deprecated.rst: -------------------------------------------------------------------------------- 1 | .. _deprecated: 2 | 3 | Deprecated Requests Utilities 4 | ============================= 5 | 6 | Requests has `decided`_ to deprecate some utility functions in 7 | :mod:`requests.utils`. To ease users' lives, they've been moved to 8 | :mod:`requests_toolbelt.utils.deprecated`. 9 | 10 | .. automodule:: requests_toolbelt.utils.deprecated 11 | :members: 12 | 13 | .. _decided: https://github.com/kennethreitz/requests/issues/2266 14 | -------------------------------------------------------------------------------- /docs/downloadutils.rst: -------------------------------------------------------------------------------- 1 | .. _downloadutils: 2 | 3 | Utilities for Downloading Streaming Responses 4 | ============================================= 5 | 6 | .. autofunction:: 7 | requests_toolbelt.downloadutils.stream.stream_response_to_file 8 | 9 | .. autofunction:: 10 | requests_toolbelt.downloadutils.tee.tee 11 | 12 | .. autofunction:: 13 | requests_toolbelt.downloadutils.tee.tee_to_bytearray 14 | 15 | .. autofunction:: 16 | requests_toolbelt.downloadutils.tee.tee_to_file 17 | -------------------------------------------------------------------------------- /docs/dumputils.rst: -------------------------------------------------------------------------------- 1 | .. _dumputils: 2 | 3 | Utilities for Dumping Information About Responses 4 | ================================================= 5 | 6 | Occasionally, it is helpful to know almost exactly what data was sent to a 7 | server and what data was received. It can also be challenging at times to 8 | gather all of that data from requests because of all of the different places 9 | you may need to look to find it. In :mod:`requests_toolbelt.utils.dump` there 10 | are two functions that will return a :class:`bytearray` with the information 11 | retrieved from a response object. 12 | 13 | .. autofunction:: 14 | requests_toolbelt.utils.dump.dump_all 15 | 16 | .. autofunction:: 17 | requests_toolbelt.utils.dump.dump_response 18 | -------------------------------------------------------------------------------- /docs/exceptions.rst: -------------------------------------------------------------------------------- 1 | .. _exceptions: 2 | 3 | Custom Toolbelt Exceptions 4 | ========================== 5 | 6 | Below are the exception classes used by the toolbelt to provide error details 7 | to the user of the toolbelt. 8 | 9 | .. automodule:: requests_toolbelt.exceptions 10 | :members: 11 | -------------------------------------------------------------------------------- /docs/formdata.rst: -------------------------------------------------------------------------------- 1 | .. _formdatautils: 2 | 3 | Utilities for Enhanced Form-Data Serialization 4 | ============================================== 5 | 6 | .. autofunction:: 7 | requests_toolbelt.utils.formdata.urlencode 8 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. requests_toolbelt documentation master file, created by 2 | sphinx-quickstart on Sun Jan 12 21:24:39 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | requests toolbelt 7 | ================= 8 | 9 | This is a collection of utilities that some users of python-requests might need 10 | but do not belong in requests proper. The library is actively maintained by 11 | members of the requests core development team, and so reflects the 12 | functionality most requested by users of the requests library. 13 | 14 | To get an overview of what the library contains, consult the :ref:`user ` 15 | documentation. 16 | 17 | Overview 18 | -------- 19 | 20 | .. toctree:: 21 | :maxdepth: 1 22 | 23 | user 24 | contributing 25 | security 26 | 27 | Full Documentation 28 | ------------------ 29 | 30 | .. toctree:: 31 | :maxdepth: 2 32 | 33 | adapters 34 | authentication 35 | deprecated 36 | downloadutils 37 | dumputils 38 | formdata 39 | exceptions 40 | sessions 41 | threading 42 | uploading-data 43 | user-agent 44 | 45 | Indices and tables 46 | ================== 47 | 48 | * :ref:`genindex` 49 | * :ref:`modindex` 50 | * :ref:`search` 51 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=1.3.0 2 | sphinx_rtd_theme 3 | pyopenssl 4 | -------------------------------------------------------------------------------- /docs/security.rst: -------------------------------------------------------------------------------- 1 | Security 2 | ======== 3 | 4 | We take the security of ``requests-toolbelt`` seriously. The following are a set of 5 | policies we have adopted to ensure that security issues are addressed in a 6 | timely fashion. 7 | 8 | Known vulnerabilities 9 | --------------------- 10 | 11 | A list of all known vulnerabilities in ``requests-toolbelt`` can be found on 12 | `osv.dev`_, as well as other ecosystem vulnerability databases. They can 13 | automatically be scanned for using tools such as `pip-audit`_ or `osv-scan`_. 14 | 15 | What is a security issue? 16 | ------------------------- 17 | 18 | Anytime it's possible to write code using ``requests-toolbelt``'s public API which 19 | does not provide the guarantees that a reasonable developer would expect it to 20 | based on our documentation. 21 | 22 | That's a bit academic, but basically it means the scope of what we consider a 23 | vulnerability is broad, and we do not require a proof of concept or even a 24 | specific exploit, merely a reasonable threat model under which ``requests-toolbelt`` 25 | could be attacked. 26 | 27 | In general, if you're unsure, we request that you to default to treating things 28 | as security issues and handling them sensitively, the worst thing that can 29 | happen is that we'll ask you to file a public issue. 30 | 31 | Reporting a security issue 32 | -------------------------- 33 | 34 | We ask that you do not report security issues to our normal GitHub issue 35 | tracker. 36 | 37 | If you believe you've identified a security issue with ``requests-toolbelt``, 38 | please report it via our `security advisory page`_. 39 | 40 | Once you've submitted an issue, you should receive an acknowledgment and 41 | depending on the action to be taken, you may receive further follow-up. 42 | 43 | Supported Versions 44 | ------------------ 45 | 46 | At any given time, we will provide security support for the `default`_ branch 47 | as well as the most recent release. 48 | 49 | Disclosure Process 50 | ------------------ 51 | 52 | When we become aware of a security bug in ``requests-toolbelt``, we will endeavor to 53 | fix it and issue a release as quickly as possible. We will generally issue a new 54 | release for any security issue. 55 | 56 | Credits 57 | ------- 58 | 59 | This policy is largely borrowed from `pyca/cryptography`_ and edited to 60 | represent the guarantees provided by the ``requests-toolbelt`` maintainers. 61 | 62 | .. _`osv.dev`: https://osv.dev/list?ecosystem=PyPI&q=requests-toolbelt 63 | .. _`pip-audit`: https://pypi.org/project/pip-audit/ 64 | .. _`osv-scan`: https://google.github.io/osv-scanner/ 65 | .. _`security advisory page`: https://github.com/requests/toolbelt/security/advisories/new 66 | .. _`default`: https://github.com/requests/toolbelt 67 | .. _`pyca/cryptography`: https://github.com/pyca/cryptography 68 | -------------------------------------------------------------------------------- /docs/sessions.rst: -------------------------------------------------------------------------------- 1 | .. _sessions: 2 | 3 | Specialized Sessions 4 | ==================== 5 | 6 | The toolbelt provides specialized session classes in the 7 | :mod:`requests_toolbelt.sessions` module. 8 | 9 | .. automodule:: requests_toolbelt.sessions 10 | :members: 11 | 12 | 13 | BaseUrlSession 14 | -------------- 15 | 16 | .. versionadded:: 0.7.0 17 | 18 | Many people have written Session subclasses that allow a "base URL" to be 19 | specified so all future requests need not specify the complete URL. To create 20 | one simplified and easy to configure version, we've added the 21 | :class:`requests_toolbelt.sessions.BaseUrlSession` object to the Toolbelt. 22 | 23 | .. class:: requests_toolbelt.sessions.BaseUrlSession 24 | -------------------------------------------------------------------------------- /docs/threading.rst: -------------------------------------------------------------------------------- 1 | .. _threading: 2 | 3 | Using requests with Threading 4 | ============================= 5 | 6 | .. versionadded:: 0.4.0 7 | 8 | The toolbelt provides a simple API for using requests with threading. 9 | 10 | A requests Session is documented as threadsafe but there are still a couple 11 | corner cases where it isn't perfectly threadsafe. The best way to use a 12 | Session is to use one per thread. 13 | 14 | The implementation provided by the toolbelt is naïve. This means that we use 15 | one session per thread and we make no effort to synchronize attributes (e.g., 16 | authentication, cookies, etc.). It also means that we make no attempt to 17 | direct a request to a session that has already handled a request to the same 18 | domain. In other words, if you're making requests to multiple domains, the 19 | toolbelt's Pool will not try to send requests to the same domain to the same 20 | thread. 21 | 22 | This module provides three classes: 23 | 24 | - :class:`~requests_toolbelt.threaded.pool.Pool` 25 | - :class:`~requests_toolbelt.threaded.pool.ThreadResponse` 26 | - :class:`~requests_toolbelt.threaded.pool.ThreadException` 27 | 28 | In 98% of the situations you'll want to just use a 29 | :class:`~requests_toolbelt.threaded.pool.Pool` and you'll treat a 30 | :class:`~requests_toolbelt.threaded.pool.ThreadResponse` as if it were a 31 | regular :class:`requests.Response`. 32 | 33 | Here's an example: 34 | 35 | .. code-block:: python 36 | 37 | # This example assumes Python 3 38 | import queue 39 | from requests_toolbelt.threaded import pool 40 | 41 | jobs = queue.Queue() 42 | urls = [ 43 | # My list of URLs to get 44 | ] 45 | 46 | for url in urls: 47 | jobs.put({'method': 'GET', 'url': url}) 48 | 49 | p = pool.Pool(job_queue=jobs) 50 | p.join_all() 51 | 52 | for response in p.responses(): 53 | print('GET {}. Returned {}.'.format(response.request_kwargs['url'], 54 | response.status_code)) 55 | 56 | This is clearly a bit underwhelming. This is why there's a short-cut class 57 | method to create a :class:`~requests_toolbelt.threaded.pool.Pool` from a list 58 | of URLs. 59 | 60 | .. code-block:: python 61 | 62 | from requests_toolbelt.threaded import pool 63 | 64 | urls = [ 65 | # My list of URLs to get 66 | ] 67 | 68 | p = pool.Pool.from_urls(urls) 69 | p.join_all() 70 | 71 | for response in p.responses(): 72 | print('GET {}. Returned {}.'.format(response.request_kwargs['url'], 73 | response.status_code)) 74 | 75 | If one of the URLs in your list throws an exception, it will be accessible 76 | from the :meth:`~Pool.exceptions` generator. 77 | 78 | .. code-block:: python 79 | 80 | from requests_toolbelt.threaded import pool 81 | 82 | urls = [ 83 | # My list of URLs to get 84 | ] 85 | 86 | p = pool.Pool.from_urls(urls) 87 | p.join_all() 88 | 89 | for exc in p.exceptions(): 90 | print('GET {}. Raised {}.'.format(exc.request_kwargs['url'], 91 | exc.message)) 92 | 93 | If instead, you want to retry the exceptions that have been raised you can do 94 | the following: 95 | 96 | .. code-block:: python 97 | 98 | from requests_toolbelt.threaded import pool 99 | 100 | urls = [ 101 | # My list of URLs to get 102 | ] 103 | 104 | p = pool.Pool.from_urls(urls) 105 | p.join_all() 106 | 107 | new_pool = pool.Pool.from_exceptions(p.exceptions()) 108 | new_pool.join_all() 109 | 110 | Not all requests are advisable to retry without checking if they should be 111 | retried. You would normally check if you want to retry it. 112 | 113 | The :class:`~Pool` object takes 4 other keyword arguments: 114 | 115 | - ``initializer`` 116 | 117 | This is a callback that will initialize things on every session created. The 118 | callback must return the session. 119 | 120 | - ``auth_generator`` 121 | 122 | This is a callback that is called *after* the initializer callback has 123 | modified the session. This callback must also return the session. 124 | 125 | - ``num_processes`` 126 | 127 | By passing a positive integer that indicates how many threads to use. It is 128 | ``None`` by default, and will use the result of 129 | ``multiproccessing.cpu_count()``. 130 | 131 | - ``session`` 132 | 133 | You can pass an alternative constructor or any callable that returns a 134 | :class:`requests.Sesssion` like object. It will not be passed any arguments 135 | because a :class:`requests.Session` does not accept any arguments. 136 | 137 | Finally, if you don't want to worry about Queue or Pool management, you can 138 | try the following: 139 | 140 | .. code-block:: python 141 | 142 | from requests_toolbelt import threaded 143 | 144 | requests = [{ 145 | 'method': 'GET', 146 | 'url': 'https://httpbin.org/get', 147 | # ... 148 | }, { 149 | # ... 150 | }, { 151 | # ... 152 | }] 153 | 154 | responses_generator, exceptions_generator = threaded.map(requests) 155 | for response in responses_generator: 156 | # Do something 157 | 158 | API and Module Auto-Generated Documentation 159 | ------------------------------------------- 160 | 161 | .. automodule:: requests_toolbelt.threaded 162 | 163 | .. autoclass:: requests_toolbelt.threaded.pool.Pool 164 | :members: 165 | 166 | .. autoclass:: requests_toolbelt.threaded.pool.ThreadResponse 167 | :members: 168 | 169 | .. autoclass:: requests_toolbelt.threaded.pool.ThreadException 170 | :members: 171 | -------------------------------------------------------------------------------- /docs/uploading-data.rst: -------------------------------------------------------------------------------- 1 | .. _uploading-data: 2 | 3 | Uploading Data 4 | ============== 5 | 6 | Streaming Multipart Data Encoder 7 | -------------------------------- 8 | 9 | Requests has `support for multipart uploads`_, but the API means that using 10 | that functionality to build exactly the Multipart upload you want can be 11 | difficult or impossible. Additionally, when using Requests' Multipart upload 12 | functionality all the data must be read into memory before being sent to the 13 | server. In extreme cases, this can make it impossible to send a file as part of 14 | a ``multipart/form-data`` upload. 15 | 16 | The toolbelt contains a class that allows you to build multipart request bodies 17 | in exactly the format you need, and to avoid reading files into memory. An 18 | example of how to use it is like this: 19 | 20 | .. code-block:: python 21 | 22 | import requests 23 | from requests_toolbelt.multipart.encoder import MultipartEncoder 24 | 25 | m = MultipartEncoder( 26 | fields={'field0': 'value', 'field1': 'value', 27 | 'field2': ('filename', open('file.py', 'rb'), 'text/plain')} 28 | ) 29 | 30 | r = requests.post('http://httpbin.org/post', data=m, 31 | headers={'Content-Type': m.content_type}) 32 | 33 | The :class:`~requests_toolbelt.multipart.encoder.MultipartEncoder` has the 34 | ``.to_string()`` convenience method, as well. This method renders the 35 | multipart body into a string. This is useful when developing your code, 36 | allowing you to confirm that the multipart body has the form you expect before 37 | you send it on. 38 | 39 | The toolbelt also provides a way to monitor your streaming uploads with 40 | the :class:`~requests_toolbelt.multipart.encoder.MultipartEncoderMonitor`. 41 | 42 | .. autoclass:: requests_toolbelt.multipart.encoder.MultipartEncoder 43 | 44 | .. _support for multipart uploads: http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file 45 | 46 | Monitoring Your Streaming Multipart Upload 47 | ------------------------------------------ 48 | 49 | If you need to stream your ``multipart/form-data`` upload then you're probably 50 | in the situation where it might take a while to upload the content. In these 51 | cases, it might make sense to be able to monitor the progress of the upload. 52 | For this reason, the toolbelt provides the 53 | :class:`~requests_toolbelt.multipart.encoder.MultipartEncoderMonitor`. The 54 | monitor wraps an instance of a 55 | :class:`~requests_toolbelt.multipart.encoder.MultipartEncoder` and is used 56 | exactly like the encoder. It provides a similar API with some additions: 57 | 58 | - The monitor accepts a function as a callback. The function is called every 59 | time ``requests`` calls ``read`` on the monitor and passes in the monitor as 60 | an argument. 61 | 62 | - The monitor tracks how many bytes have been read in the course of the 63 | upload. 64 | 65 | You might use the monitor to create a progress bar for the upload. Here is `an 66 | example using clint`_ which displays the progress bar. 67 | 68 | To use the monitor you would follow a pattern like this: 69 | 70 | .. code-block:: python 71 | 72 | import requests 73 | from requests_toolbelt.multipart import encoder 74 | 75 | def my_callback(monitor): 76 | # Your callback function 77 | pass 78 | 79 | e = encoder.MultipartEncoder( 80 | fields={'field0': 'value', 'field1': 'value', 81 | 'field2': ('filename', open('file.py', 'rb'), 'text/plain')} 82 | ) 83 | m = encoder.MultipartEncoderMonitor(e, my_callback) 84 | 85 | r = requests.post('http://httpbin.org/post', data=m, 86 | headers={'Content-Type': m.content_type}) 87 | 88 | If you have a very simple use case you can also do: 89 | 90 | .. code-block:: python 91 | 92 | import requests 93 | from requests_toolbelt.multipart.encoder import MultipartEncoderMonitor 94 | 95 | def my_callback(monitor): 96 | # Your callback function 97 | pass 98 | 99 | m = MultipartEncoderMonitor.from_fields( 100 | fields={'field0': 'value', 'field1': 'value', 101 | 'field2': ('filename', open('file.py', 'rb'), 'text/plain')}, 102 | callback=my_callback 103 | ) 104 | 105 | r = requests.post('http://httpbin.org/post', data=m, 106 | headers={'Content-Type': m.content_type}) 107 | 108 | 109 | .. autoclass:: requests_toolbelt.multipart.encoder.MultipartEncoderMonitor 110 | 111 | .. _an example using clint: 112 | https://github.com/requests/toolbelt/blob/master/examples/monitor/progress_bar.py 113 | 114 | Streaming Data from a Generator 115 | ------------------------------- 116 | 117 | There are cases where you, the user, have a generator of some large quantity 118 | of data and you already know the size of that data. If you pass the generator 119 | to ``requests`` via the ``data`` parameter, ``requests`` will assume that you 120 | want to upload the data in chunks and set a ``Transfer-Encoding`` header value 121 | of ``chunked``. Often times, this causes the server to behave poorly. If you 122 | want to avoid this, you can use the 123 | :class:`~requests.toolbelt.streaming_iterator.StreamingIterator`. You pass it 124 | the size of the data and the generator. 125 | 126 | .. code-block:: python 127 | 128 | import requests 129 | from requests_toolbelt.streaming_iterator import StreamingIterator 130 | 131 | generator = some_function() # Create your generator 132 | size = some_function_size() # Get your generator's size 133 | content_type = content_type() # Get the content-type of the data 134 | 135 | streamer = StreamingIterator(size, generator) 136 | r = requests.post('https://httpbin.org/post', data=streamer, 137 | headers={'Content-Type': content_type}) 138 | 139 | The streamer will handle your generator for you and buffer the data before 140 | passing it to ``requests``. 141 | 142 | .. versionchanged:: 0.4.0 143 | 144 | File-like objects can be passed instead of a generator. 145 | 146 | If, for example, you need to upload data being piped into standard in, you 147 | might otherwise do: 148 | 149 | .. code-block:: python 150 | 151 | import requests 152 | import sys 153 | 154 | r = requests.post(url, data=sys.stdin) 155 | 156 | This would stream the data but would use a chunked transfer-encoding. If 157 | instead, you know the length of the data that is being sent to ``stdin`` and 158 | you want to prevent the data from being uploaded in chunks, you can use the 159 | :class:`~requests_toolbelt.streaming_iterator.StreamingIterator` to stream the 160 | contents of the file without relying on chunking. 161 | 162 | .. code-block:: python 163 | 164 | import requests 165 | from requests_toolbelt.streaming_iterator import StreamingIterator 166 | import sys 167 | 168 | stream = StreamingIterator(size, sys.stdin) 169 | r = requests.post(url, data=stream, 170 | headers={'Content-Type': content_type}) 171 | 172 | .. autoclass:: requests_toolbelt.streaming_iterator.StreamingIterator 173 | -------------------------------------------------------------------------------- /docs/user-agent.rst: -------------------------------------------------------------------------------- 1 | .. _user-agent: 2 | 3 | User-Agent Constructor 4 | ====================== 5 | 6 | Having well-formed user-agent strings is important for the proper functioning 7 | of the web. Make server administators happy by generating yourself a nice 8 | user-agent string, just like Requests does! The output of the user-agent 9 | generator looks like this:: 10 | 11 | >>> import requests_toolbelt 12 | >>> requests_toolbelt.user_agent('mypackage', '0.0.1') 13 | 'mypackage/0.0.1 CPython/2.7.5 Darwin/13.0.0' 14 | 15 | The Python type and version, and the platform type and version, will accurately 16 | reflect the system that your program is running on. You can drop this easily 17 | into your program like this:: 18 | 19 | from requests_toolbelt import user_agent 20 | from requests import Session 21 | 22 | s = Session() 23 | s.headers = { 24 | 'User-Agent': user_agent('my_package', '0.0.1') 25 | } 26 | 27 | r = s.get('https://api.github.com/users') 28 | 29 | This will override the default Requests user-agent string for all of your HTTP 30 | requests, replacing it with your own. 31 | 32 | Adding Extra Information to Your User-Agent String 33 | -------------------------------------------------- 34 | 35 | .. versionadded:: 0.5.0 36 | 37 | If you feel it necessary, you can also include versions for other things that 38 | your client is using. For example if you were building a package and wanted to 39 | include the package name and version number as well as the version of requests 40 | and requests-toolbelt you were using you could do the following: 41 | 42 | .. code-block:: python 43 | 44 | import requests 45 | import requests_toolbelt 46 | from requests_toolbelt.utils import user_agent as ua 47 | 48 | user_agent = ua.user_agent('mypackage', '0.0.1', 49 | extras=[('requests', requests.__version__), 50 | ('requests-toolbelt', requests_toolbelt.__version__)]) 51 | 52 | s = requests.Session() 53 | s.headers['User-Agent'] = user_agent 54 | 55 | 56 | Your user agent will now look like:: 57 | 58 | mypackage/0.0.1 requests/2.7.0 requests-toolbelt/0.5.0 CPython/2.7.10 Darwin/13.0.0 59 | 60 | Selecting Only What You Want 61 | ---------------------------- 62 | 63 | .. versionadded:: 0.8.0 64 | 65 | While most people will find the ``user_agent`` function sufficient for their 66 | usage, others will want to control exactly what information is included in the 67 | User-Agent. For those people, the 68 | :class:`~requests_toolbelt.utils.user_agent.UserAgentBuilder` is the correct 69 | tool. This is the tool that the toolbelt uses inside of 70 | :func:`~requests_toolbelt.utils.user_agent.user_agent`. For example, let's say 71 | you *only* want your package, its versions, and some extra information, in 72 | that case you would do: 73 | 74 | .. code-block:: python 75 | 76 | import requests 77 | from requests_toolbelt.utils import user_agent as ua 78 | 79 | s = requests.Session() 80 | s.headers['User-Agent'] = ua.UserAgentBuilder( 81 | 'mypackage', '0.0.1', 82 | ).include_extras([ 83 | ('requests', requests.__version__), 84 | ]).build() 85 | 86 | Your user agent will now look like:: 87 | 88 | mypackage/0.0.1 requests/2.7.0 89 | 90 | You can also optionally include the Python version information and System 91 | information the same way that our ``user_agent`` function does. 92 | 93 | .. autoclass:: requests_toolbelt.utils.user_agent.UserAgentBuilder 94 | :members: 95 | -------------------------------------------------------------------------------- /docs/user.rst: -------------------------------------------------------------------------------- 1 | .. _user: 2 | 3 | .. include:: ../README.rst 4 | -------------------------------------------------------------------------------- /examples/monitor/progress_bar.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # ############################################################################ 4 | # This example demonstrates how to use the MultipartEncoderMonitor to create a 5 | # progress bar using clint. 6 | # ############################################################################ 7 | 8 | from clint.textui.progress import Bar as ProgressBar 9 | from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor 10 | 11 | import requests 12 | 13 | 14 | def create_callback(encoder): 15 | encoder_len = encoder.len 16 | bar = ProgressBar(expected_size=encoder_len, filled_char='=') 17 | 18 | def callback(monitor): 19 | bar.show(monitor.bytes_read) 20 | 21 | return callback 22 | 23 | 24 | def create_upload(): 25 | return MultipartEncoder({ 26 | 'form_field': 'value', 27 | 'another_form_field': 'another value', 28 | 'first_file': ('progress_bar.py', open(__file__, 'rb'), 'text/plain'), 29 | 'second_file': ('progress_bar.py', open(__file__, 'rb'), 30 | 'text/plain'), 31 | }) 32 | 33 | 34 | if __name__ == '__main__': 35 | encoder = create_upload() 36 | callback = create_callback(encoder) 37 | monitor = MultipartEncoderMonitor(encoder, callback) 38 | r = requests.post('https://httpbin.org/post', data=monitor, 39 | headers={'Content-Type': monitor.content_type}) 40 | print('\nUpload finished! (Returned status {} {})'.format( 41 | r.status_code, r.reason 42 | )) 43 | -------------------------------------------------------------------------------- /examples/threading/threaded.py: -------------------------------------------------------------------------------- 1 | try: 2 | import Queue as queue 3 | except ImportError: 4 | import queue 5 | 6 | from requests_toolbelt.threaded import pool 7 | 8 | q = queue.Queue() 9 | q.put({ 10 | 'method': 'GET', 11 | 'url': 'https://httpbin.org/get', 12 | 'params': {'foo': 'bar'} 13 | }) 14 | q.put({ 15 | 'method': 'POST', 16 | 'url': 'https://httpbin.org/post', 17 | 'json': {'foo': 'bar'} 18 | }) 19 | q.put({ 20 | 'method': 'POST', 21 | 'url': 'https://httpbin.org/post', 22 | 'data': {'foo': 'bar'} 23 | }) 24 | q.put({ 25 | 'method': 'PUT', 26 | 'url': 'https://httpbin.org/put', 27 | 'files': {'foo': ('', 'bar')} 28 | }) 29 | q.put({ 30 | 'method': 'GET', 31 | 'url': 'https://httpbin.org/stream/100', 32 | 'stream': True 33 | }) 34 | q.put({ 35 | 'method': 'GET', 36 | 'url': 'https://httpbin.org/delay/10', 37 | 'timeout': 5.0 38 | }) 39 | 40 | for i in range(30): 41 | q.put({ 42 | 'method': 'GET', 43 | 'url': 'https://httpbin.org/get', 44 | 'params': {'i': str(i)}, 45 | }) 46 | 47 | p = pool.Pool(q) 48 | p.join_all() 49 | 50 | responses = list(p.responses()) 51 | exceptions = list(p.exceptions()) 52 | -------------------------------------------------------------------------------- /examples/threading/threaded_simplified.py: -------------------------------------------------------------------------------- 1 | from requests_toolbelt import threaded 2 | 3 | requests = [{ 4 | 'method': 'GET', 5 | 'url': 'https://httpbin.org/get', 6 | 'params': {'foo': 'bar'} 7 | }, { 8 | 'method': 'POST', 9 | 'url': 'https://httpbin.org/post', 10 | 'json': {'foo': 'bar'} 11 | }, { 12 | 'method': 'POST', 13 | 'url': 'https://httpbin.org/post', 14 | 'data': {'foo': 'bar'} 15 | }, { 16 | 'method': 'PUT', 17 | 'url': 'https://httpbin.org/put', 18 | 'files': {'foo': ('', 'bar')} 19 | }, { 20 | 'method': 'GET', 21 | 'url': 'https://httpbin.org/stream/100', 22 | 'stream': True 23 | }, { 24 | 'method': 'GET', 25 | 'url': 'https://httpbin.org/delay/10', 26 | 'timeout': 2.0 27 | }] 28 | 29 | url = 'https://httpbin.org/get' 30 | requests.extend([ 31 | {'method': 'GET', 'url': url, 'params': {'i': str(i)}} 32 | for i in range(30) 33 | ]) 34 | 35 | responses, exceptions = threaded.map(requests) 36 | -------------------------------------------------------------------------------- /requests_toolbelt/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | requests-toolbelt 4 | ================= 5 | 6 | See https://toolbelt.readthedocs.io/ for documentation 7 | 8 | :copyright: (c) 2014 by Ian Cordasco and Cory Benfield 9 | :license: Apache v2.0, see LICENSE for more details 10 | """ 11 | 12 | from .adapters import SSLAdapter, SourceAddressAdapter 13 | from .auth.guess import GuessAuth 14 | from .multipart import ( 15 | MultipartEncoder, MultipartEncoderMonitor, MultipartDecoder, 16 | ImproperBodyPartContentException, NonMultipartContentTypeException 17 | ) 18 | from .streaming_iterator import StreamingIterator 19 | from .utils.user_agent import user_agent 20 | 21 | __title__ = 'requests-toolbelt' 22 | __authors__ = 'Ian Cordasco, Cory Benfield' 23 | __license__ = 'Apache v2.0' 24 | __copyright__ = 'Copyright 2014 Ian Cordasco, Cory Benfield' 25 | __version__ = '1.0.0' 26 | __version_info__ = tuple(int(i) for i in __version__.split('.')) 27 | 28 | __all__ = [ 29 | 'GuessAuth', 'MultipartEncoder', 'MultipartEncoderMonitor', 30 | 'MultipartDecoder', 'SSLAdapter', 'SourceAddressAdapter', 31 | 'StreamingIterator', 'user_agent', 'ImproperBodyPartContentException', 32 | 'NonMultipartContentTypeException', '__title__', '__authors__', 33 | '__license__', '__copyright__', '__version__', '__version_info__', 34 | ] 35 | -------------------------------------------------------------------------------- /requests_toolbelt/adapters/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | requests-toolbelt.adapters 4 | ========================== 5 | 6 | See https://toolbelt.readthedocs.io/ for documentation 7 | 8 | :copyright: (c) 2014 by Ian Cordasco and Cory Benfield 9 | :license: Apache v2.0, see LICENSE for more details 10 | """ 11 | 12 | from .ssl import SSLAdapter 13 | from .source import SourceAddressAdapter 14 | 15 | __all__ = ['SSLAdapter', 'SourceAddressAdapter'] 16 | -------------------------------------------------------------------------------- /requests_toolbelt/adapters/fingerprint.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Submodule containing the implementation for the FingerprintAdapter. 3 | 4 | This file contains an implementation of a Transport Adapter that validates 5 | the fingerprints of SSL certificates presented upon connection. 6 | """ 7 | from requests.adapters import HTTPAdapter 8 | 9 | from .._compat import poolmanager 10 | 11 | 12 | class FingerprintAdapter(HTTPAdapter): 13 | """ 14 | A HTTPS Adapter for Python Requests that verifies certificate fingerprints, 15 | instead of certificate hostnames. 16 | 17 | Example usage: 18 | 19 | .. code-block:: python 20 | 21 | import requests 22 | import ssl 23 | from requests_toolbelt.adapters.fingerprint import FingerprintAdapter 24 | 25 | twitter_fingerprint = '...' 26 | s = requests.Session() 27 | s.mount( 28 | 'https://twitter.com', 29 | FingerprintAdapter(twitter_fingerprint) 30 | ) 31 | 32 | The fingerprint should be provided as a hexadecimal string, optionally 33 | containing colons. 34 | """ 35 | 36 | __attrs__ = HTTPAdapter.__attrs__ + ['fingerprint'] 37 | 38 | def __init__(self, fingerprint, **kwargs): 39 | self.fingerprint = fingerprint 40 | 41 | super(FingerprintAdapter, self).__init__(**kwargs) 42 | 43 | def init_poolmanager(self, connections, maxsize, block=False): 44 | self.poolmanager = poolmanager.PoolManager( 45 | num_pools=connections, 46 | maxsize=maxsize, 47 | block=block, 48 | assert_fingerprint=self.fingerprint) 49 | -------------------------------------------------------------------------------- /requests_toolbelt/adapters/host_header_ssl.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | requests_toolbelt.adapters.host_header_ssl 4 | ========================================== 5 | 6 | This file contains an implementation of the HostHeaderSSLAdapter. 7 | """ 8 | 9 | from requests.adapters import HTTPAdapter 10 | 11 | 12 | class HostHeaderSSLAdapter(HTTPAdapter): 13 | """ 14 | A HTTPS Adapter for Python Requests that sets the hostname for certificate 15 | verification based on the Host header. 16 | 17 | This allows requesting the IP address directly via HTTPS without getting 18 | a "hostname doesn't match" exception. 19 | 20 | Example usage: 21 | 22 | >>> s.mount('https://', HostHeaderSSLAdapter()) 23 | >>> s.get("https://93.184.216.34", headers={"Host": "example.org"}) 24 | 25 | """ 26 | 27 | def send(self, request, **kwargs): 28 | # HTTP headers are case-insensitive (RFC 7230) 29 | host_header = None 30 | for header in request.headers: 31 | if header.lower() == "host": 32 | host_header = request.headers[header] 33 | break 34 | 35 | connection_pool_kwargs = self.poolmanager.connection_pool_kw 36 | 37 | if host_header: 38 | connection_pool_kwargs["assert_hostname"] = host_header 39 | connection_pool_kwargs["server_hostname"] = host_header 40 | elif "assert_hostname" in connection_pool_kwargs: 41 | # an assert_hostname from a previous request may have been left 42 | connection_pool_kwargs.pop("assert_hostname", None) 43 | connection_pool_kwargs.pop("server_hostname", None) 44 | 45 | return super(HostHeaderSSLAdapter, self).send(request, **kwargs) 46 | -------------------------------------------------------------------------------- /requests_toolbelt/adapters/socket_options.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """The implementation of the SocketOptionsAdapter.""" 3 | import socket 4 | import warnings 5 | import sys 6 | 7 | import requests 8 | from requests import adapters 9 | 10 | from .._compat import connection 11 | from .._compat import poolmanager 12 | from .. import exceptions as exc 13 | 14 | 15 | class SocketOptionsAdapter(adapters.HTTPAdapter): 16 | """An adapter for requests that allows users to specify socket options. 17 | 18 | Since version 2.4.0 of requests, it is possible to specify a custom list 19 | of socket options that need to be set before establishing the connection. 20 | 21 | Example usage:: 22 | 23 | >>> import socket 24 | >>> import requests 25 | >>> from requests_toolbelt.adapters import socket_options 26 | >>> s = requests.Session() 27 | >>> opts = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 0)] 28 | >>> adapter = socket_options.SocketOptionsAdapter(socket_options=opts) 29 | >>> s.mount('http://', adapter) 30 | 31 | You can also take advantage of the list of default options on this class 32 | to keep using the original options in addition to your custom options. In 33 | that case, ``opts`` might look like:: 34 | 35 | >>> opts = socket_options.SocketOptionsAdapter.default_options + opts 36 | 37 | """ 38 | 39 | if connection is not None: 40 | default_options = getattr( 41 | connection.HTTPConnection, 42 | 'default_socket_options', 43 | [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] 44 | ) 45 | else: 46 | default_options = [] 47 | warnings.warn(exc.RequestsVersionTooOld, 48 | "This version of Requests is only compatible with a " 49 | "version of urllib3 which is too old to support " 50 | "setting options on a socket. This adapter is " 51 | "functionally useless.") 52 | 53 | def __init__(self, **kwargs): 54 | self.socket_options = kwargs.pop('socket_options', 55 | self.default_options) 56 | 57 | super(SocketOptionsAdapter, self).__init__(**kwargs) 58 | 59 | def init_poolmanager(self, connections, maxsize, block=False): 60 | if requests.__build__ >= 0x020400: 61 | # NOTE(Ian): Perhaps we should raise a warning 62 | self.poolmanager = poolmanager.PoolManager( 63 | num_pools=connections, 64 | maxsize=maxsize, 65 | block=block, 66 | socket_options=self.socket_options 67 | ) 68 | else: 69 | super(SocketOptionsAdapter, self).init_poolmanager( 70 | connections, maxsize, block 71 | ) 72 | 73 | 74 | class TCPKeepAliveAdapter(SocketOptionsAdapter): 75 | """An adapter for requests that turns on TCP Keep-Alive by default. 76 | 77 | The adapter sets 4 socket options: 78 | 79 | - ``SOL_SOCKET`` ``SO_KEEPALIVE`` - This turns on TCP Keep-Alive 80 | - ``IPPROTO_TCP`` ``TCP_KEEPINTVL`` 20 - Sets the keep alive interval 81 | - ``IPPROTO_TCP`` ``TCP_KEEPCNT`` 5 - Sets the number of keep alive probes 82 | - ``IPPROTO_TCP`` ``TCP_KEEPIDLE`` 60 - Sets the keep alive time if the 83 | socket library has the ``TCP_KEEPIDLE`` constant 84 | 85 | The latter three can be overridden by keyword arguments (respectively): 86 | 87 | - ``interval`` 88 | - ``count`` 89 | - ``idle`` 90 | 91 | You can use this adapter like so:: 92 | 93 | >>> from requests_toolbelt.adapters import socket_options 94 | >>> tcp = socket_options.TCPKeepAliveAdapter(idle=120, interval=10) 95 | >>> s = requests.Session() 96 | >>> s.mount('http://', tcp) 97 | 98 | """ 99 | 100 | def __init__(self, **kwargs): 101 | socket_options = kwargs.pop('socket_options', 102 | SocketOptionsAdapter.default_options) 103 | idle = kwargs.pop('idle', 60) 104 | interval = kwargs.pop('interval', 20) 105 | count = kwargs.pop('count', 5) 106 | socket_options = socket_options + [ 107 | (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) 108 | ] 109 | 110 | # NOTE(Ian): OSX does not have these constants defined, so we 111 | # set them conditionally. 112 | if getattr(socket, 'TCP_KEEPINTVL', None) is not None: 113 | socket_options += [(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 114 | interval)] 115 | elif sys.platform == 'darwin': 116 | # On OSX, TCP_KEEPALIVE from netinet/tcp.h is not exported 117 | # by python's socket module 118 | TCP_KEEPALIVE = getattr(socket, 'TCP_KEEPALIVE', 0x10) 119 | socket_options += [(socket.IPPROTO_TCP, TCP_KEEPALIVE, interval)] 120 | 121 | if getattr(socket, 'TCP_KEEPCNT', None) is not None: 122 | socket_options += [(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, count)] 123 | 124 | if getattr(socket, 'TCP_KEEPIDLE', None) is not None: 125 | socket_options += [(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, idle)] 126 | 127 | super(TCPKeepAliveAdapter, self).__init__( 128 | socket_options=socket_options, **kwargs 129 | ) 130 | -------------------------------------------------------------------------------- /requests_toolbelt/adapters/source.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | requests_toolbelt.source_adapter 4 | ================================ 5 | 6 | This file contains an implementation of the SourceAddressAdapter originally 7 | demonstrated on the Requests GitHub page. 8 | """ 9 | from requests.adapters import HTTPAdapter 10 | 11 | from .._compat import poolmanager, basestring 12 | 13 | 14 | class SourceAddressAdapter(HTTPAdapter): 15 | """ 16 | A Source Address Adapter for Python Requests that enables you to choose the 17 | local address to bind to. This allows you to send your HTTP requests from a 18 | specific interface and IP address. 19 | 20 | Two address formats are accepted. The first is a string: this will set the 21 | local IP address to the address given in the string, and will also choose a 22 | semi-random high port for the local port number. 23 | 24 | The second is a two-tuple of the form (ip address, port): for example, 25 | ``('10.10.10.10', 8999)``. This will set the local IP address to the first 26 | element, and the local port to the second element. If ``0`` is used as the 27 | port number, a semi-random high port will be selected. 28 | 29 | .. warning:: Setting an explicit local port can have negative interactions 30 | with connection-pooling in Requests: in particular, it risks 31 | the possibility of getting "Address in use" errors. The 32 | string-only argument is generally preferred to the tuple-form. 33 | 34 | Example usage: 35 | 36 | .. code-block:: python 37 | 38 | import requests 39 | from requests_toolbelt.adapters.source import SourceAddressAdapter 40 | 41 | s = requests.Session() 42 | s.mount('http://', SourceAddressAdapter('10.10.10.10')) 43 | s.mount('https://', SourceAddressAdapter(('10.10.10.10', 8999))) 44 | """ 45 | def __init__(self, source_address, **kwargs): 46 | if isinstance(source_address, basestring): 47 | self.source_address = (source_address, 0) 48 | elif isinstance(source_address, tuple): 49 | self.source_address = source_address 50 | else: 51 | raise TypeError( 52 | "source_address must be IP address string or (ip, port) tuple" 53 | ) 54 | 55 | super(SourceAddressAdapter, self).__init__(**kwargs) 56 | 57 | def init_poolmanager(self, connections, maxsize, block=False): 58 | self.poolmanager = poolmanager.PoolManager( 59 | num_pools=connections, 60 | maxsize=maxsize, 61 | block=block, 62 | source_address=self.source_address) 63 | 64 | def proxy_manager_for(self, *args, **kwargs): 65 | kwargs['source_address'] = self.source_address 66 | return super(SourceAddressAdapter, self).proxy_manager_for( 67 | *args, **kwargs) 68 | -------------------------------------------------------------------------------- /requests_toolbelt/adapters/ssl.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | requests_toolbelt.ssl_adapter 5 | ============================= 6 | 7 | This file contains an implementation of the SSLAdapter originally demonstrated 8 | in this blog post: 9 | https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/ 10 | 11 | """ 12 | import requests 13 | 14 | from requests.adapters import HTTPAdapter 15 | 16 | from .._compat import poolmanager 17 | 18 | 19 | class SSLAdapter(HTTPAdapter): 20 | """ 21 | A HTTPS Adapter for Python Requests that allows the choice of the SSL/TLS 22 | version negotiated by Requests. This can be used either to enforce the 23 | choice of high-security TLS versions (where supported), or to work around 24 | misbehaving servers that fail to correctly negotiate the default TLS 25 | version being offered. 26 | 27 | Example usage: 28 | 29 | >>> import requests 30 | >>> import ssl 31 | >>> from requests_toolbelt import SSLAdapter 32 | >>> s = requests.Session() 33 | >>> s.mount('https://', SSLAdapter(ssl.PROTOCOL_TLSv1)) 34 | 35 | You can replace the chosen protocol with any that are available in the 36 | default Python SSL module. All subsequent requests that match the adapter 37 | prefix will use the chosen SSL version instead of the default. 38 | 39 | This adapter will also attempt to change the SSL/TLS version negotiated by 40 | Requests when using a proxy. However, this may not always be possible: 41 | prior to Requests v2.4.0 the adapter did not have access to the proxy setup 42 | code. In earlier versions of Requests, this adapter will not function 43 | properly when used with proxies. 44 | """ 45 | 46 | __attrs__ = HTTPAdapter.__attrs__ + ['ssl_version'] 47 | 48 | def __init__(self, ssl_version=None, **kwargs): 49 | self.ssl_version = ssl_version 50 | 51 | super(SSLAdapter, self).__init__(**kwargs) 52 | 53 | def init_poolmanager(self, connections, maxsize, block=False): 54 | self.poolmanager = poolmanager.PoolManager( 55 | num_pools=connections, 56 | maxsize=maxsize, 57 | block=block, 58 | ssl_version=self.ssl_version) 59 | 60 | if requests.__build__ >= 0x020400: 61 | # Earlier versions of requests either don't have this method or, worse, 62 | # don't allow passing arbitrary keyword arguments. As a result, only 63 | # conditionally define this method. 64 | def proxy_manager_for(self, *args, **kwargs): 65 | kwargs['ssl_version'] = self.ssl_version 66 | return super(SSLAdapter, self).proxy_manager_for(*args, **kwargs) 67 | -------------------------------------------------------------------------------- /requests_toolbelt/auth/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/requests/toolbelt/bcd5f7be229e14089052be7e3b527ebcea0ae7b8/requests_toolbelt/auth/__init__.py -------------------------------------------------------------------------------- /requests_toolbelt/auth/_digest_auth_compat.py: -------------------------------------------------------------------------------- 1 | """Provide a compatibility layer for requests.auth.HTTPDigestAuth.""" 2 | import requests 3 | 4 | 5 | class _ThreadingDescriptor(object): 6 | def __init__(self, prop, default): 7 | self.prop = prop 8 | self.default = default 9 | 10 | def __get__(self, obj, objtype=None): 11 | return getattr(obj._thread_local, self.prop, self.default) 12 | 13 | def __set__(self, obj, value): 14 | setattr(obj._thread_local, self.prop, value) 15 | 16 | 17 | class _HTTPDigestAuth(requests.auth.HTTPDigestAuth): 18 | init = _ThreadingDescriptor('init', True) 19 | last_nonce = _ThreadingDescriptor('last_nonce', '') 20 | nonce_count = _ThreadingDescriptor('nonce_count', 0) 21 | chal = _ThreadingDescriptor('chal', {}) 22 | pos = _ThreadingDescriptor('pos', None) 23 | num_401_calls = _ThreadingDescriptor('num_401_calls', 1) 24 | 25 | 26 | if requests.__build__ < 0x020800: 27 | HTTPDigestAuth = requests.auth.HTTPDigestAuth 28 | else: 29 | HTTPDigestAuth = _HTTPDigestAuth 30 | -------------------------------------------------------------------------------- /requests_toolbelt/auth/guess.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """The module containing the code for GuessAuth.""" 3 | from requests import auth 4 | from requests import cookies 5 | 6 | from . import _digest_auth_compat as auth_compat, http_proxy_digest 7 | 8 | 9 | class GuessAuth(auth.AuthBase): 10 | """Guesses the auth type by the WWW-Authentication header.""" 11 | def __init__(self, username, password): 12 | self.username = username 13 | self.password = password 14 | self.auth = None 15 | self.pos = None 16 | 17 | def _handle_basic_auth_401(self, r, kwargs): 18 | if self.pos is not None: 19 | r.request.body.seek(self.pos) 20 | 21 | # Consume content and release the original connection 22 | # to allow our new request to reuse the same one. 23 | r.content 24 | r.raw.release_conn() 25 | prep = r.request.copy() 26 | if not hasattr(prep, '_cookies'): 27 | prep._cookies = cookies.RequestsCookieJar() 28 | cookies.extract_cookies_to_jar(prep._cookies, r.request, r.raw) 29 | prep.prepare_cookies(prep._cookies) 30 | 31 | self.auth = auth.HTTPBasicAuth(self.username, self.password) 32 | prep = self.auth(prep) 33 | _r = r.connection.send(prep, **kwargs) 34 | _r.history.append(r) 35 | _r.request = prep 36 | 37 | return _r 38 | 39 | def _handle_digest_auth_401(self, r, kwargs): 40 | self.auth = auth_compat.HTTPDigestAuth(self.username, self.password) 41 | try: 42 | self.auth.init_per_thread_state() 43 | except AttributeError: 44 | # If we're not on requests 2.8.0+ this method does not exist and 45 | # is not relevant. 46 | pass 47 | 48 | # Check that the attr exists because much older versions of requests 49 | # set this attribute lazily. For example: 50 | # https://github.com/kennethreitz/requests/blob/33735480f77891754304e7f13e3cdf83aaaa76aa/requests/auth.py#L59 51 | if (hasattr(self.auth, 'num_401_calls') and 52 | self.auth.num_401_calls is None): 53 | self.auth.num_401_calls = 1 54 | # Digest auth would resend the request by itself. We can take a 55 | # shortcut here. 56 | return self.auth.handle_401(r, **kwargs) 57 | 58 | def handle_401(self, r, **kwargs): 59 | """Resends a request with auth headers, if needed.""" 60 | 61 | www_authenticate = r.headers.get('www-authenticate', '').lower() 62 | 63 | if 'basic' in www_authenticate: 64 | return self._handle_basic_auth_401(r, kwargs) 65 | 66 | if 'digest' in www_authenticate: 67 | return self._handle_digest_auth_401(r, kwargs) 68 | 69 | def __call__(self, request): 70 | if self.auth is not None: 71 | return self.auth(request) 72 | 73 | try: 74 | self.pos = request.body.tell() 75 | except AttributeError: 76 | pass 77 | 78 | request.register_hook('response', self.handle_401) 79 | return request 80 | 81 | 82 | class GuessProxyAuth(GuessAuth): 83 | """ 84 | Guesses the auth type by WWW-Authentication and Proxy-Authentication 85 | headers 86 | """ 87 | def __init__(self, username=None, password=None, 88 | proxy_username=None, proxy_password=None): 89 | super(GuessProxyAuth, self).__init__(username, password) 90 | self.proxy_username = proxy_username 91 | self.proxy_password = proxy_password 92 | self.proxy_auth = None 93 | 94 | def _handle_basic_auth_407(self, r, kwargs): 95 | if self.pos is not None: 96 | r.request.body.seek(self.pos) 97 | 98 | r.content 99 | r.raw.release_conn() 100 | prep = r.request.copy() 101 | if not hasattr(prep, '_cookies'): 102 | prep._cookies = cookies.RequestsCookieJar() 103 | cookies.extract_cookies_to_jar(prep._cookies, r.request, r.raw) 104 | prep.prepare_cookies(prep._cookies) 105 | 106 | self.proxy_auth = auth.HTTPProxyAuth(self.proxy_username, 107 | self.proxy_password) 108 | prep = self.proxy_auth(prep) 109 | _r = r.connection.send(prep, **kwargs) 110 | _r.history.append(r) 111 | _r.request = prep 112 | 113 | return _r 114 | 115 | def _handle_digest_auth_407(self, r, kwargs): 116 | self.proxy_auth = http_proxy_digest.HTTPProxyDigestAuth( 117 | username=self.proxy_username, 118 | password=self.proxy_password) 119 | 120 | try: 121 | self.auth.init_per_thread_state() 122 | except AttributeError: 123 | pass 124 | 125 | return self.proxy_auth.handle_407(r, **kwargs) 126 | 127 | def handle_407(self, r, **kwargs): 128 | proxy_authenticate = r.headers.get('Proxy-Authenticate', '').lower() 129 | 130 | if 'basic' in proxy_authenticate: 131 | return self._handle_basic_auth_407(r, kwargs) 132 | 133 | if 'digest' in proxy_authenticate: 134 | return self._handle_digest_auth_407(r, kwargs) 135 | 136 | def __call__(self, request): 137 | if self.proxy_auth is not None: 138 | request = self.proxy_auth(request) 139 | 140 | try: 141 | self.pos = request.body.tell() 142 | except AttributeError: 143 | pass 144 | 145 | request.register_hook('response', self.handle_407) 146 | return super(GuessProxyAuth, self).__call__(request) 147 | -------------------------------------------------------------------------------- /requests_toolbelt/auth/handler.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | requests_toolbelt.auth.handler 5 | ============================== 6 | 7 | This holds all of the implementation details of the Authentication Handler. 8 | 9 | """ 10 | 11 | from requests.auth import AuthBase, HTTPBasicAuth 12 | from requests.compat import urlparse, urlunparse 13 | 14 | 15 | class AuthHandler(AuthBase): 16 | 17 | """ 18 | 19 | The ``AuthHandler`` object takes a dictionary of domains paired with 20 | authentication strategies and will use this to determine which credentials 21 | to use when making a request. For example, you could do the following: 22 | 23 | .. code-block:: python 24 | 25 | from requests import HTTPDigestAuth 26 | from requests_toolbelt.auth.handler import AuthHandler 27 | 28 | import requests 29 | 30 | auth = AuthHandler({ 31 | 'https://api.github.com': ('sigmavirus24', 'fakepassword'), 32 | 'https://example.com': HTTPDigestAuth('username', 'password') 33 | }) 34 | 35 | r = requests.get('https://api.github.com/user', auth=auth) 36 | # => 37 | r = requests.get('https://example.com/some/path', auth=auth) 38 | # => 39 | 40 | s = requests.Session() 41 | s.auth = auth 42 | r = s.get('https://api.github.com/user') 43 | # => 44 | 45 | .. warning:: 46 | 47 | :class:`requests.auth.HTTPDigestAuth` is not yet thread-safe. If you 48 | use :class:`AuthHandler` across multiple threads you should 49 | instantiate a new AuthHandler for each thread with a new 50 | HTTPDigestAuth instance for each thread. 51 | 52 | """ 53 | 54 | def __init__(self, strategies): 55 | self.strategies = dict(strategies) 56 | self._make_uniform() 57 | 58 | def __call__(self, request): 59 | auth = self.get_strategy_for(request.url) 60 | return auth(request) 61 | 62 | def __repr__(self): 63 | return ''.format(self.strategies) 64 | 65 | def _make_uniform(self): 66 | existing_strategies = list(self.strategies.items()) 67 | self.strategies = {} 68 | 69 | for (k, v) in existing_strategies: 70 | self.add_strategy(k, v) 71 | 72 | @staticmethod 73 | def _key_from_url(url): 74 | parsed = urlparse(url) 75 | return urlunparse((parsed.scheme.lower(), 76 | parsed.netloc.lower(), 77 | '', '', '', '')) 78 | 79 | def add_strategy(self, domain, strategy): 80 | """Add a new domain and authentication strategy. 81 | 82 | :param str domain: The domain you wish to match against. For example: 83 | ``'https://api.github.com'`` 84 | :param str strategy: The authentication strategy you wish to use for 85 | that domain. For example: ``('username', 'password')`` or 86 | ``requests.HTTPDigestAuth('username', 'password')`` 87 | 88 | .. code-block:: python 89 | 90 | a = AuthHandler({}) 91 | a.add_strategy('https://api.github.com', ('username', 'password')) 92 | 93 | """ 94 | # Turn tuples into Basic Authentication objects 95 | if isinstance(strategy, tuple): 96 | strategy = HTTPBasicAuth(*strategy) 97 | 98 | key = self._key_from_url(domain) 99 | self.strategies[key] = strategy 100 | 101 | def get_strategy_for(self, url): 102 | """Retrieve the authentication strategy for a specified URL. 103 | 104 | :param str url: The full URL you will be making a request against. For 105 | example, ``'https://api.github.com/user'`` 106 | :returns: Callable that adds authentication to a request. 107 | 108 | .. code-block:: python 109 | 110 | import requests 111 | a = AuthHandler({'example.com', ('foo', 'bar')}) 112 | strategy = a.get_strategy_for('http://example.com/example') 113 | assert isinstance(strategy, requests.auth.HTTPBasicAuth) 114 | 115 | """ 116 | key = self._key_from_url(url) 117 | return self.strategies.get(key, NullAuthStrategy()) 118 | 119 | def remove_strategy(self, domain): 120 | """Remove the domain and strategy from the collection of strategies. 121 | 122 | :param str domain: The domain you wish remove. For example, 123 | ``'https://api.github.com'``. 124 | 125 | .. code-block:: python 126 | 127 | a = AuthHandler({'example.com', ('foo', 'bar')}) 128 | a.remove_strategy('example.com') 129 | assert a.strategies == {} 130 | 131 | """ 132 | key = self._key_from_url(domain) 133 | if key in self.strategies: 134 | del self.strategies[key] 135 | 136 | 137 | class NullAuthStrategy(AuthBase): 138 | def __repr__(self): 139 | return '' 140 | 141 | def __call__(self, r): 142 | return r 143 | -------------------------------------------------------------------------------- /requests_toolbelt/auth/http_bearer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """The module containing HTTPBearerAuth.""" 3 | 4 | from requests.auth import AuthBase 5 | 6 | 7 | class HTTPBearerAuth(AuthBase): 8 | """HTTP Bearer Token Authentication 9 | """ 10 | 11 | def __init__(self, token): 12 | self.token = token 13 | 14 | def __eq__(self, other): 15 | return self.token == getattr(other, 'token', None) 16 | 17 | def __ne__(self, other): 18 | return not self == other 19 | 20 | def __call__(self, r): 21 | r.headers['Authorization'] = 'Bearer ' + self.token 22 | return r 23 | -------------------------------------------------------------------------------- /requests_toolbelt/auth/http_proxy_digest.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """The module containing HTTPProxyDigestAuth.""" 3 | import re 4 | 5 | from requests import cookies, utils 6 | 7 | from . import _digest_auth_compat as auth 8 | 9 | 10 | class HTTPProxyDigestAuth(auth.HTTPDigestAuth): 11 | """HTTP digest authentication between proxy 12 | 13 | :param stale_rejects: The number of rejects indicate that: 14 | the client may wish to simply retry the request 15 | with a new encrypted response, without reprompting the user for a 16 | new username and password. i.e., retry build_digest_header 17 | :type stale_rejects: int 18 | """ 19 | _pat = re.compile(r'digest ', flags=re.IGNORECASE) 20 | 21 | def __init__(self, *args, **kwargs): 22 | super(HTTPProxyDigestAuth, self).__init__(*args, **kwargs) 23 | self.stale_rejects = 0 24 | 25 | self.init_per_thread_state() 26 | 27 | @property 28 | def stale_rejects(self): 29 | thread_local = getattr(self, '_thread_local', None) 30 | if thread_local is None: 31 | return self._stale_rejects 32 | return thread_local.stale_rejects 33 | 34 | @stale_rejects.setter 35 | def stale_rejects(self, value): 36 | thread_local = getattr(self, '_thread_local', None) 37 | if thread_local is None: 38 | self._stale_rejects = value 39 | else: 40 | thread_local.stale_rejects = value 41 | 42 | def init_per_thread_state(self): 43 | try: 44 | super(HTTPProxyDigestAuth, self).init_per_thread_state() 45 | except AttributeError: 46 | # If we're not on requests 2.8.0+ this method does not exist 47 | pass 48 | 49 | def handle_407(self, r, **kwargs): 50 | """Handle HTTP 407 only once, otherwise give up 51 | 52 | :param r: current response 53 | :returns: responses, along with the new response 54 | """ 55 | if r.status_code == 407 and self.stale_rejects < 2: 56 | s_auth = r.headers.get("proxy-authenticate") 57 | if s_auth is None: 58 | raise IOError( 59 | "proxy server violated RFC 7235:" 60 | "407 response MUST contain header proxy-authenticate") 61 | elif not self._pat.match(s_auth): 62 | return r 63 | 64 | self.chal = utils.parse_dict_header( 65 | self._pat.sub('', s_auth, count=1)) 66 | 67 | # if we present the user/passwd and still get rejected 68 | # https://tools.ietf.org/html/rfc2617#section-3.2.1 69 | if ('Proxy-Authorization' in r.request.headers and 70 | 'stale' in self.chal): 71 | if self.chal['stale'].lower() == 'true': # try again 72 | self.stale_rejects += 1 73 | # wrong user/passwd 74 | elif self.chal['stale'].lower() == 'false': 75 | raise IOError("User or password is invalid") 76 | 77 | # Consume content and release the original connection 78 | # to allow our new request to reuse the same one. 79 | r.content 80 | r.close() 81 | prep = r.request.copy() 82 | cookies.extract_cookies_to_jar(prep._cookies, r.request, r.raw) 83 | prep.prepare_cookies(prep._cookies) 84 | 85 | prep.headers['Proxy-Authorization'] = self.build_digest_header( 86 | prep.method, prep.url) 87 | _r = r.connection.send(prep, **kwargs) 88 | _r.history.append(r) 89 | _r.request = prep 90 | 91 | return _r 92 | else: # give up authenticate 93 | return r 94 | 95 | def __call__(self, r): 96 | self.init_per_thread_state() 97 | # if we have nonce, then just use it, otherwise server will tell us 98 | if self.last_nonce: 99 | r.headers['Proxy-Authorization'] = self.build_digest_header( 100 | r.method, r.url 101 | ) 102 | r.register_hook('response', self.handle_407) 103 | return r 104 | -------------------------------------------------------------------------------- /requests_toolbelt/cookies/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/requests/toolbelt/bcd5f7be229e14089052be7e3b527ebcea0ae7b8/requests_toolbelt/cookies/__init__.py -------------------------------------------------------------------------------- /requests_toolbelt/cookies/forgetful.py: -------------------------------------------------------------------------------- 1 | """The module containing the code for ForgetfulCookieJar.""" 2 | from requests.cookies import RequestsCookieJar 3 | 4 | 5 | class ForgetfulCookieJar(RequestsCookieJar): 6 | def set_cookie(self, *args, **kwargs): 7 | return 8 | -------------------------------------------------------------------------------- /requests_toolbelt/downloadutils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/requests/toolbelt/bcd5f7be229e14089052be7e3b527ebcea0ae7b8/requests_toolbelt/downloadutils/__init__.py -------------------------------------------------------------------------------- /requests_toolbelt/downloadutils/stream.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Utilities for dealing with streamed requests.""" 3 | import os.path 4 | import re 5 | 6 | from .. import exceptions as exc 7 | 8 | # Regular expressions stolen from werkzeug/http.py 9 | # cd2c97bb0a076da2322f11adce0b2731f9193396 L62-L64 10 | _QUOTED_STRING_RE = r'"[^"\\]*(?:\\.[^"\\]*)*"' 11 | _OPTION_HEADER_PIECE_RE = re.compile( 12 | r';\s*(%s|[^\s;=]+)\s*(?:=\s*(%s|[^;]+))?\s*' % (_QUOTED_STRING_RE, 13 | _QUOTED_STRING_RE) 14 | ) 15 | _DEFAULT_CHUNKSIZE = 512 16 | 17 | 18 | def _get_filename(content_disposition): 19 | for match in _OPTION_HEADER_PIECE_RE.finditer(content_disposition): 20 | k, v = match.groups() 21 | if k == 'filename': 22 | # ignore any directory paths in the filename 23 | return os.path.split(v)[1] 24 | return None 25 | 26 | 27 | def get_download_file_path(response, path): 28 | """ 29 | Given a response and a path, return a file path for a download. 30 | 31 | If a ``path`` parameter is a directory, this function will parse the 32 | ``Content-Disposition`` header on the response to determine the name of the 33 | file as reported by the server, and return a file path in the specified 34 | directory. 35 | 36 | If ``path`` is empty or None, this function will return a path relative 37 | to the process' current working directory. 38 | 39 | If path is a full file path, return it. 40 | 41 | :param response: A Response object from requests 42 | :type response: requests.models.Response 43 | :param str path: Directory or file path. 44 | :returns: full file path to download as 45 | :rtype: str 46 | :raises: :class:`requests_toolbelt.exceptions.StreamingError` 47 | """ 48 | path_is_dir = path and os.path.isdir(path) 49 | 50 | if path and not path_is_dir: 51 | # fully qualified file path 52 | filepath = path 53 | else: 54 | response_filename = _get_filename( 55 | response.headers.get('content-disposition', '') 56 | ) 57 | if not response_filename: 58 | raise exc.StreamingError('No filename given to stream response to') 59 | 60 | if path_is_dir: 61 | # directory to download to 62 | filepath = os.path.join(path, response_filename) 63 | else: 64 | # fallback to downloading to current working directory 65 | filepath = response_filename 66 | 67 | return filepath 68 | 69 | 70 | def stream_response_to_file(response, path=None, chunksize=_DEFAULT_CHUNKSIZE): 71 | """Stream a response body to the specified file. 72 | 73 | Either use the ``path`` provided or use the name provided in the 74 | ``Content-Disposition`` header. 75 | 76 | .. warning:: 77 | 78 | If you pass this function an open file-like object as the ``path`` 79 | parameter, the function will not close that file for you. 80 | 81 | .. warning:: 82 | 83 | This function will not automatically close the response object 84 | passed in as the ``response`` parameter. 85 | 86 | If a ``path`` parameter is a directory, this function will parse the 87 | ``Content-Disposition`` header on the response to determine the name of the 88 | file as reported by the server, and return a file path in the specified 89 | directory. If no ``path`` parameter is supplied, this function will default 90 | to the process' current working directory. 91 | 92 | .. code-block:: python 93 | 94 | import requests 95 | from requests_toolbelt import exceptions 96 | from requests_toolbelt.downloadutils import stream 97 | 98 | r = requests.get(url, stream=True) 99 | try: 100 | filename = stream.stream_response_to_file(r) 101 | except exceptions.StreamingError as e: 102 | # The toolbelt could not find the filename in the 103 | # Content-Disposition 104 | print(e.message) 105 | 106 | You can also specify the filename as a string. This will be passed to 107 | the built-in :func:`open` and we will read the content into the file. 108 | 109 | .. code-block:: python 110 | 111 | import requests 112 | from requests_toolbelt.downloadutils import stream 113 | 114 | r = requests.get(url, stream=True) 115 | filename = stream.stream_response_to_file(r, path='myfile') 116 | 117 | If the calculated download file path already exists, this function will 118 | raise a StreamingError. 119 | 120 | Instead, if you want to manage the file object yourself, you need to 121 | provide either a :class:`io.BytesIO` object or a file opened with the 122 | `'b'` flag. See the two examples below for more details. 123 | 124 | .. code-block:: python 125 | 126 | import requests 127 | from requests_toolbelt.downloadutils import stream 128 | 129 | with open('myfile', 'wb') as fd: 130 | r = requests.get(url, stream=True) 131 | filename = stream.stream_response_to_file(r, path=fd) 132 | 133 | print('{} saved to {}'.format(url, filename)) 134 | 135 | .. code-block:: python 136 | 137 | import io 138 | import requests 139 | from requests_toolbelt.downloadutils import stream 140 | 141 | b = io.BytesIO() 142 | r = requests.get(url, stream=True) 143 | filename = stream.stream_response_to_file(r, path=b) 144 | assert filename is None 145 | 146 | :param response: A Response object from requests 147 | :type response: requests.models.Response 148 | :param path: *(optional)*, Either a string with the path to the location 149 | to save the response content, or a file-like object expecting bytes. 150 | :type path: :class:`str`, or object with a :meth:`write` 151 | :param int chunksize: (optional), Size of chunk to attempt to stream 152 | (default 512B). 153 | :returns: The name of the file, if one can be determined, else None 154 | :rtype: str 155 | :raises: :class:`requests_toolbelt.exceptions.StreamingError` 156 | """ 157 | pre_opened = False 158 | fd = None 159 | filename = None 160 | if path and callable(getattr(path, 'write', None)): 161 | pre_opened = True 162 | fd = path 163 | filename = getattr(fd, 'name', None) 164 | else: 165 | filename = get_download_file_path(response, path) 166 | if os.path.exists(filename): 167 | raise exc.StreamingError("File already exists: %s" % filename) 168 | fd = open(filename, 'wb') 169 | 170 | for chunk in response.iter_content(chunk_size=chunksize): 171 | fd.write(chunk) 172 | 173 | if not pre_opened: 174 | fd.close() 175 | 176 | return filename 177 | -------------------------------------------------------------------------------- /requests_toolbelt/downloadutils/tee.py: -------------------------------------------------------------------------------- 1 | """Tee function implementations.""" 2 | import io 3 | 4 | _DEFAULT_CHUNKSIZE = 65536 5 | 6 | __all__ = ['tee', 'tee_to_file', 'tee_to_bytearray'] 7 | 8 | 9 | def _tee(response, callback, chunksize, decode_content): 10 | for chunk in response.raw.stream(amt=chunksize, 11 | decode_content=decode_content): 12 | callback(chunk) 13 | yield chunk 14 | 15 | 16 | def tee(response, fileobject, chunksize=_DEFAULT_CHUNKSIZE, 17 | decode_content=None): 18 | """Stream the response both to the generator and a file. 19 | 20 | This will stream the response body while writing the bytes to 21 | ``fileobject``. 22 | 23 | Example usage: 24 | 25 | .. code-block:: python 26 | 27 | resp = requests.get(url, stream=True) 28 | with open('save_file', 'wb') as save_file: 29 | for chunk in tee(resp, save_file): 30 | # do stuff with chunk 31 | 32 | .. code-block:: python 33 | 34 | import io 35 | 36 | resp = requests.get(url, stream=True) 37 | fileobject = io.BytesIO() 38 | 39 | for chunk in tee(resp, fileobject): 40 | # do stuff with chunk 41 | 42 | :param response: Response from requests. 43 | :type response: requests.Response 44 | :param fileobject: Writable file-like object. 45 | :type fileobject: file, io.BytesIO 46 | :param int chunksize: (optional), Size of chunk to attempt to stream. 47 | :param bool decode_content: (optional), If True, this will decode the 48 | compressed content of the response. 49 | :raises: TypeError if the fileobject wasn't opened with the right mode 50 | or isn't a BytesIO object. 51 | """ 52 | # We will be streaming the raw bytes from over the wire, so we need to 53 | # ensure that writing to the fileobject will preserve those bytes. On 54 | # Python3, if the user passes an io.StringIO, this will fail, so we need 55 | # to check for BytesIO instead. 56 | if not ('b' in getattr(fileobject, 'mode', '') or 57 | isinstance(fileobject, io.BytesIO)): 58 | raise TypeError('tee() will write bytes directly to this fileobject' 59 | ', it must be opened with the "b" flag if it is a file' 60 | ' or inherit from io.BytesIO.') 61 | 62 | return _tee(response, fileobject.write, chunksize, decode_content) 63 | 64 | 65 | def tee_to_file(response, filename, chunksize=_DEFAULT_CHUNKSIZE, 66 | decode_content=None): 67 | """Stream the response both to the generator and a file. 68 | 69 | This will open a file named ``filename`` and stream the response body 70 | while writing the bytes to the opened file object. 71 | 72 | Example usage: 73 | 74 | .. code-block:: python 75 | 76 | resp = requests.get(url, stream=True) 77 | for chunk in tee_to_file(resp, 'save_file'): 78 | # do stuff with chunk 79 | 80 | :param response: Response from requests. 81 | :type response: requests.Response 82 | :param str filename: Name of file in which we write the response content. 83 | :param int chunksize: (optional), Size of chunk to attempt to stream. 84 | :param bool decode_content: (optional), If True, this will decode the 85 | compressed content of the response. 86 | """ 87 | with open(filename, 'wb') as fd: 88 | for chunk in tee(response, fd, chunksize, decode_content): 89 | yield chunk 90 | 91 | 92 | def tee_to_bytearray(response, bytearr, chunksize=_DEFAULT_CHUNKSIZE, 93 | decode_content=None): 94 | """Stream the response both to the generator and a bytearray. 95 | 96 | This will stream the response provided to the function, add them to the 97 | provided :class:`bytearray` and yield them to the user. 98 | 99 | .. note:: 100 | 101 | This uses the :meth:`bytearray.extend` by default instead of passing 102 | the bytearray into the ``readinto`` method. 103 | 104 | Example usage: 105 | 106 | .. code-block:: python 107 | 108 | b = bytearray() 109 | resp = requests.get(url, stream=True) 110 | for chunk in tee_to_bytearray(resp, b): 111 | # do stuff with chunk 112 | 113 | :param response: Response from requests. 114 | :type response: requests.Response 115 | :param bytearray bytearr: Array to add the streamed bytes to. 116 | :param int chunksize: (optional), Size of chunk to attempt to stream. 117 | :param bool decode_content: (optional), If True, this will decode the 118 | compressed content of the response. 119 | """ 120 | if not isinstance(bytearr, bytearray): 121 | raise TypeError('tee_to_bytearray() expects bytearr to be a ' 122 | 'bytearray') 123 | return _tee(response, bytearr.extend, chunksize, decode_content) 124 | -------------------------------------------------------------------------------- /requests_toolbelt/exceptions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Collection of exceptions raised by requests-toolbelt.""" 3 | 4 | 5 | class StreamingError(Exception): 6 | """Used in :mod:`requests_toolbelt.downloadutils.stream`.""" 7 | pass 8 | 9 | 10 | class VersionMismatchError(Exception): 11 | """Used to indicate a version mismatch in the version of requests required. 12 | 13 | The feature in use requires a newer version of Requests to function 14 | appropriately but the version installed is not sufficient. 15 | """ 16 | pass 17 | 18 | 19 | class RequestsVersionTooOld(Warning): 20 | """Used to indicate that the Requests version is too old. 21 | 22 | If the version of Requests is too old to support a feature, we will issue 23 | this warning to the user. 24 | """ 25 | pass 26 | -------------------------------------------------------------------------------- /requests_toolbelt/multipart/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | requests_toolbelt.multipart 3 | =========================== 4 | 5 | See https://toolbelt.readthedocs.io/ for documentation 6 | 7 | :copyright: (c) 2014 by Ian Cordasco and Cory Benfield 8 | :license: Apache v2.0, see LICENSE for more details 9 | """ 10 | 11 | from .encoder import MultipartEncoder, MultipartEncoderMonitor 12 | from .decoder import MultipartDecoder 13 | from .decoder import ImproperBodyPartContentException 14 | from .decoder import NonMultipartContentTypeException 15 | 16 | __title__ = 'requests-toolbelt' 17 | __authors__ = 'Ian Cordasco, Cory Benfield' 18 | __license__ = 'Apache v2.0' 19 | __copyright__ = 'Copyright 2014 Ian Cordasco, Cory Benfield' 20 | 21 | __all__ = [ 22 | 'MultipartEncoder', 23 | 'MultipartEncoderMonitor', 24 | 'MultipartDecoder', 25 | 'ImproperBodyPartContentException', 26 | 'NonMultipartContentTypeException', 27 | '__title__', 28 | '__authors__', 29 | '__license__', 30 | '__copyright__', 31 | ] 32 | -------------------------------------------------------------------------------- /requests_toolbelt/multipart/decoder.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | requests_toolbelt.multipart.decoder 5 | =================================== 6 | 7 | This holds all the implementation details of the MultipartDecoder 8 | 9 | """ 10 | 11 | import sys 12 | import email.parser 13 | from .encoder import encode_with 14 | from requests.structures import CaseInsensitiveDict 15 | 16 | 17 | def _split_on_find(content, bound): 18 | point = content.find(bound) 19 | return content[:point], content[point + len(bound):] 20 | 21 | 22 | class ImproperBodyPartContentException(Exception): 23 | pass 24 | 25 | 26 | class NonMultipartContentTypeException(Exception): 27 | pass 28 | 29 | 30 | def _header_parser(string, encoding): 31 | major = sys.version_info[0] 32 | if major == 3: 33 | string = string.decode(encoding) 34 | headers = email.parser.HeaderParser().parsestr(string).items() 35 | return ( 36 | (encode_with(k, encoding), encode_with(v, encoding)) 37 | for k, v in headers 38 | ) 39 | 40 | 41 | class BodyPart(object): 42 | """ 43 | 44 | The ``BodyPart`` object is a ``Response``-like interface to an individual 45 | subpart of a multipart response. It is expected that these will 46 | generally be created by objects of the ``MultipartDecoder`` class. 47 | 48 | Like ``Response``, there is a ``CaseInsensitiveDict`` object named headers, 49 | ``content`` to access bytes, ``text`` to access unicode, and ``encoding`` 50 | to access the unicode codec. 51 | 52 | """ 53 | 54 | def __init__(self, content, encoding): 55 | self.encoding = encoding 56 | headers = {} 57 | # Split into header section (if any) and the content 58 | if b'\r\n\r\n' in content: 59 | first, self.content = _split_on_find(content, b'\r\n\r\n') 60 | if first != b'': 61 | headers = _header_parser(first.lstrip(), encoding) 62 | else: 63 | raise ImproperBodyPartContentException( 64 | 'content does not contain CR-LF-CR-LF' 65 | ) 66 | self.headers = CaseInsensitiveDict(headers) 67 | 68 | @property 69 | def text(self): 70 | """Content of the ``BodyPart`` in unicode.""" 71 | return self.content.decode(self.encoding) 72 | 73 | 74 | class MultipartDecoder(object): 75 | """ 76 | 77 | The ``MultipartDecoder`` object parses the multipart payload of 78 | a bytestring into a tuple of ``Response``-like ``BodyPart`` objects. 79 | 80 | The basic usage is:: 81 | 82 | import requests 83 | from requests_toolbelt import MultipartDecoder 84 | 85 | response = requests.get(url) 86 | decoder = MultipartDecoder.from_response(response) 87 | for part in decoder.parts: 88 | print(part.headers['content-type']) 89 | 90 | If the multipart content is not from a response, basic usage is:: 91 | 92 | from requests_toolbelt import MultipartDecoder 93 | 94 | decoder = MultipartDecoder(content, content_type) 95 | for part in decoder.parts: 96 | print(part.headers['content-type']) 97 | 98 | For both these usages, there is an optional ``encoding`` parameter. This is 99 | a string, which is the name of the unicode codec to use (default is 100 | ``'utf-8'``). 101 | 102 | """ 103 | def __init__(self, content, content_type, encoding='utf-8'): 104 | #: Original Content-Type header 105 | self.content_type = content_type 106 | #: Response body encoding 107 | self.encoding = encoding 108 | #: Parsed parts of the multipart response body 109 | self.parts = tuple() 110 | self._find_boundary() 111 | self._parse_body(content) 112 | 113 | def _find_boundary(self): 114 | ct_info = tuple(x.strip() for x in self.content_type.split(';')) 115 | mimetype = ct_info[0] 116 | if mimetype.split('/')[0].lower() != 'multipart': 117 | raise NonMultipartContentTypeException( 118 | "Unexpected mimetype in content-type: '{}'".format(mimetype) 119 | ) 120 | for item in ct_info[1:]: 121 | attr, value = _split_on_find( 122 | item, 123 | '=' 124 | ) 125 | if attr.lower() == 'boundary': 126 | self.boundary = encode_with(value.strip('"'), self.encoding) 127 | 128 | @staticmethod 129 | def _fix_first_part(part, boundary_marker): 130 | bm_len = len(boundary_marker) 131 | if boundary_marker == part[:bm_len]: 132 | return part[bm_len:] 133 | else: 134 | return part 135 | 136 | def _parse_body(self, content): 137 | boundary = b''.join((b'--', self.boundary)) 138 | 139 | def body_part(part): 140 | fixed = MultipartDecoder._fix_first_part(part, boundary) 141 | return BodyPart(fixed, self.encoding) 142 | 143 | def test_part(part): 144 | return (part != b'' and 145 | part != b'\r\n' and 146 | part[:4] != b'--\r\n' and 147 | part != b'--') 148 | 149 | parts = content.split(b''.join((b'\r\n', boundary))) 150 | self.parts = tuple(body_part(x) for x in parts if test_part(x)) 151 | 152 | @classmethod 153 | def from_response(cls, response, encoding='utf-8'): 154 | content = response.content 155 | content_type = response.headers.get('content-type', None) 156 | return cls(content, content_type, encoding) 157 | -------------------------------------------------------------------------------- /requests_toolbelt/sessions.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | from ._compat import urljoin 4 | 5 | 6 | class BaseUrlSession(requests.Session): 7 | """A Session with a URL that all requests will use as a base. 8 | 9 | Let's start by looking at a few examples: 10 | 11 | .. code-block:: python 12 | 13 | >>> from requests_toolbelt import sessions 14 | >>> s = sessions.BaseUrlSession( 15 | ... base_url='https://example.com/resource/') 16 | >>> r = s.get('sub-resource/', params={'foo': 'bar'}) 17 | >>> print(r.request.url) 18 | https://example.com/resource/sub-resource/?foo=bar 19 | 20 | Our call to the ``get`` method will make a request to the URL passed in 21 | when we created the Session and the partial resource name we provide. 22 | We implement this by overriding the ``request`` method of the Session. 23 | 24 | Likewise, we override the ``prepare_request`` method so you can construct 25 | a PreparedRequest in the same way: 26 | 27 | .. code-block:: python 28 | 29 | >>> from requests import Request 30 | >>> from requests_toolbelt import sessions 31 | >>> s = sessions.BaseUrlSession( 32 | ... base_url='https://example.com/resource/') 33 | >>> request = Request(method='GET', url='sub-resource/') 34 | >>> prepared_request = s.prepare_request(request) 35 | >>> r = s.send(prepared_request) 36 | >>> print(r.request.url) 37 | https://example.com/resource/sub-resource 38 | 39 | .. note:: 40 | 41 | The base URL that you provide and the path you provide are **very** 42 | important. 43 | 44 | Let's look at another *similar* example 45 | 46 | .. code-block:: python 47 | 48 | >>> from requests_toolbelt import sessions 49 | >>> s = sessions.BaseUrlSession( 50 | ... base_url='https://example.com/resource/') 51 | >>> r = s.get('/sub-resource/', params={'foo': 'bar'}) 52 | >>> print(r.request.url) 53 | https://example.com/sub-resource/?foo=bar 54 | 55 | The key difference here is that we called ``get`` with ``/sub-resource/``, 56 | i.e., there was a leading ``/``. This changes how we create the URL 57 | because we rely on :mod:`urllib.parse.urljoin`. 58 | 59 | To override how we generate the URL, sub-class this method and override the 60 | ``create_url`` method. 61 | 62 | Based on implementation from 63 | https://github.com/kennethreitz/requests/issues/2554#issuecomment-109341010 64 | """ 65 | 66 | base_url = None 67 | 68 | def __init__(self, base_url=None): 69 | if base_url: 70 | self.base_url = base_url 71 | super(BaseUrlSession, self).__init__() 72 | 73 | def request(self, method, url, *args, **kwargs): 74 | """Send the request after generating the complete URL.""" 75 | url = self.create_url(url) 76 | return super(BaseUrlSession, self).request( 77 | method, url, *args, **kwargs 78 | ) 79 | 80 | def prepare_request(self, request, *args, **kwargs): 81 | """Prepare the request after generating the complete URL.""" 82 | request.url = self.create_url(request.url) 83 | return super(BaseUrlSession, self).prepare_request( 84 | request, *args, **kwargs 85 | ) 86 | 87 | def create_url(self, url): 88 | """Create the URL based off this partial path.""" 89 | return urljoin(self.base_url, url) 90 | 91 | def __getstate__(self): 92 | """Save base URL as well during the pickle""" 93 | states = super(BaseUrlSession, self).__getstate__() 94 | states.update({"base_url": self.base_url}) 95 | return states 96 | 97 | def __setstate__(self, state): 98 | """Load base URL as well during the unpickle""" 99 | super(BaseUrlSession, self).__setstate__(state) 100 | if "base_url" in state: 101 | self.base_url = state["base_url"] 102 | -------------------------------------------------------------------------------- /requests_toolbelt/streaming_iterator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | requests_toolbelt.streaming_iterator 5 | ==================================== 6 | 7 | This holds the implementation details for the :class:`StreamingIterator`. It 8 | is designed for the case where you, the user, know the size of the upload but 9 | need to provide the data as an iterator. This class will allow you to specify 10 | the size and stream the data without using a chunked transfer-encoding. 11 | 12 | """ 13 | from requests.utils import super_len 14 | 15 | from .multipart.encoder import CustomBytesIO, encode_with 16 | 17 | 18 | class StreamingIterator(object): 19 | 20 | """ 21 | This class provides a way of allowing iterators with a known size to be 22 | streamed instead of chunked. 23 | 24 | In requests, if you pass in an iterator it assumes you want to use 25 | chunked transfer-encoding to upload the data, which not all servers 26 | support well. Additionally, you may want to set the content-length 27 | yourself to avoid this but that will not work. The only way to preempt 28 | requests using a chunked transfer-encoding and forcing it to stream the 29 | uploads is to mimic a very specific interace. Instead of having to know 30 | these details you can instead just use this class. You simply provide the 31 | size and iterator and pass the instance of StreamingIterator to requests 32 | via the data parameter like so: 33 | 34 | .. code-block:: python 35 | 36 | from requests_toolbelt import StreamingIterator 37 | 38 | import requests 39 | 40 | # Let iterator be some generator that you already have and size be 41 | # the size of the data produced by the iterator 42 | 43 | r = requests.post(url, data=StreamingIterator(size, iterator)) 44 | 45 | You can also pass file-like objects to :py:class:`StreamingIterator` in 46 | case requests can't determize the filesize itself. This is the case with 47 | streaming file objects like ``stdin`` or any sockets. Wrapping e.g. files 48 | that are on disk with ``StreamingIterator`` is unnecessary, because 49 | requests can determine the filesize itself. 50 | 51 | Naturally, you should also set the `Content-Type` of your upload 52 | appropriately because the toolbelt will not attempt to guess that for you. 53 | """ 54 | 55 | def __init__(self, size, iterator, encoding='utf-8'): 56 | #: The expected size of the upload 57 | self.size = int(size) 58 | 59 | if self.size < 0: 60 | raise ValueError( 61 | 'The size of the upload must be a positive integer' 62 | ) 63 | 64 | #: Attribute that requests will check to determine the length of the 65 | #: body. See bug #80 for more details 66 | self.len = self.size 67 | 68 | #: Encoding the input data is using 69 | self.encoding = encoding 70 | 71 | #: The iterator used to generate the upload data 72 | self.iterator = iterator 73 | 74 | if hasattr(iterator, 'read'): 75 | self._file = iterator 76 | else: 77 | self._file = _IteratorAsBinaryFile(iterator, encoding) 78 | 79 | def read(self, size=-1): 80 | return encode_with(self._file.read(size), self.encoding) 81 | 82 | 83 | class _IteratorAsBinaryFile(object): 84 | def __init__(self, iterator, encoding='utf-8'): 85 | #: The iterator used to generate the upload data 86 | self.iterator = iterator 87 | 88 | #: Encoding the iterator is using 89 | self.encoding = encoding 90 | 91 | # The buffer we use to provide the correct number of bytes requested 92 | # during a read 93 | self._buffer = CustomBytesIO() 94 | 95 | def _get_bytes(self): 96 | try: 97 | return encode_with(next(self.iterator), self.encoding) 98 | except StopIteration: 99 | return b'' 100 | 101 | def _load_bytes(self, size): 102 | self._buffer.smart_truncate() 103 | amount_to_load = size - super_len(self._buffer) 104 | bytes_to_append = True 105 | 106 | while amount_to_load > 0 and bytes_to_append: 107 | bytes_to_append = self._get_bytes() 108 | amount_to_load -= self._buffer.append(bytes_to_append) 109 | 110 | def read(self, size=-1): 111 | size = int(size) 112 | if size == -1: 113 | return b''.join(self.iterator) 114 | 115 | self._load_bytes(size) 116 | return self._buffer.read(size) 117 | -------------------------------------------------------------------------------- /requests_toolbelt/threaded/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides the API for ``requests_toolbelt.threaded``. 3 | 4 | The module provides a clean and simple API for making requests via a thread 5 | pool. The thread pool will use sessions for increased performance. 6 | 7 | A simple use-case is: 8 | 9 | .. code-block:: python 10 | 11 | from requests_toolbelt import threaded 12 | 13 | urls_to_get = [{ 14 | 'url': 'https://api.github.com/users/sigmavirus24', 15 | 'method': 'GET', 16 | }, { 17 | 'url': 'https://api.github.com/repos/requests/toolbelt', 18 | 'method': 'GET', 19 | }, { 20 | 'url': 'https://google.com', 21 | 'method': 'GET', 22 | }] 23 | responses, errors = threaded.map(urls_to_get) 24 | 25 | By default, the threaded submodule will detect the number of CPUs your 26 | computer has and use that if no other number of processes is selected. To 27 | change this, always use the keyword argument ``num_processes``. Using the 28 | above example, we would expand it like so: 29 | 30 | .. code-block:: python 31 | 32 | responses, errors = threaded.map(urls_to_get, num_processes=10) 33 | 34 | You can also customize how a :class:`requests.Session` is initialized by 35 | creating a callback function: 36 | 37 | .. code-block:: python 38 | 39 | from requests_toolbelt import user_agent 40 | 41 | def initialize_session(session): 42 | session.headers['User-Agent'] = user_agent('my-scraper', '0.1') 43 | session.headers['Accept'] = 'application/json' 44 | 45 | responses, errors = threaded.map(urls_to_get, 46 | initializer=initialize_session) 47 | 48 | .. autofunction:: requests_toolbelt.threaded.map 49 | 50 | Inspiration is blatantly drawn from the standard library's multiprocessing 51 | library. See the following references: 52 | 53 | - multiprocessing's `pool source`_ 54 | 55 | - map and map_async `inspiration`_ 56 | 57 | .. _pool source: 58 | https://hg.python.org/cpython/file/8ef4f75a8018/Lib/multiprocessing/pool.py 59 | .. _inspiration: 60 | https://hg.python.org/cpython/file/8ef4f75a8018/Lib/multiprocessing/pool.py#l340 61 | """ 62 | from . import pool 63 | from .._compat import queue 64 | 65 | 66 | def map(requests, **kwargs): 67 | r"""Simple interface to the threaded Pool object. 68 | 69 | This function takes a list of dictionaries representing requests to make 70 | using Sessions in threads and returns a tuple where the first item is 71 | a generator of successful responses and the second is a generator of 72 | exceptions. 73 | 74 | :param list requests: 75 | Collection of dictionaries representing requests to make with the Pool 76 | object. 77 | :param \*\*kwargs: 78 | Keyword arguments that are passed to the 79 | :class:`~requests_toolbelt.threaded.pool.Pool` object. 80 | :returns: Tuple of responses and exceptions from the pool 81 | :rtype: (:class:`~requests_toolbelt.threaded.pool.ThreadResponse`, 82 | :class:`~requests_toolbelt.threaded.pool.ThreadException`) 83 | """ 84 | if not (requests and all(isinstance(r, dict) for r in requests)): 85 | raise ValueError('map expects a list of dictionaries.') 86 | 87 | # Build our queue of requests 88 | job_queue = queue.Queue() 89 | for request in requests: 90 | job_queue.put(request) 91 | 92 | # Ensure the user doesn't try to pass their own job_queue 93 | kwargs['job_queue'] = job_queue 94 | 95 | threadpool = pool.Pool(**kwargs) 96 | threadpool.join_all() 97 | return threadpool.responses(), threadpool.exceptions() 98 | -------------------------------------------------------------------------------- /requests_toolbelt/threaded/thread.py: -------------------------------------------------------------------------------- 1 | """Module containing the SessionThread class.""" 2 | import threading 3 | import uuid 4 | 5 | import requests.exceptions as exc 6 | 7 | from .._compat import queue 8 | 9 | 10 | class SessionThread(object): 11 | def __init__(self, initialized_session, job_queue, response_queue, 12 | exception_queue): 13 | self._session = initialized_session 14 | self._jobs = job_queue 15 | self._create_worker() 16 | self._responses = response_queue 17 | self._exceptions = exception_queue 18 | 19 | def _create_worker(self): 20 | self._worker = threading.Thread( 21 | target=self._make_request, 22 | name=uuid.uuid4(), 23 | ) 24 | self._worker.daemon = True 25 | self._worker._state = 0 26 | self._worker.start() 27 | 28 | def _handle_request(self, kwargs): 29 | try: 30 | response = self._session.request(**kwargs) 31 | except exc.RequestException as e: 32 | self._exceptions.put((kwargs, e)) 33 | else: 34 | self._responses.put((kwargs, response)) 35 | finally: 36 | self._jobs.task_done() 37 | 38 | def _make_request(self): 39 | while True: 40 | try: 41 | kwargs = self._jobs.get_nowait() 42 | except queue.Empty: 43 | break 44 | 45 | self._handle_request(kwargs) 46 | 47 | def is_alive(self): 48 | """Proxy to the thread's ``is_alive`` method.""" 49 | return self._worker.is_alive() 50 | 51 | def join(self): 52 | """Join this thread to the master thread.""" 53 | self._worker.join() 54 | -------------------------------------------------------------------------------- /requests_toolbelt/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/requests/toolbelt/bcd5f7be229e14089052be7e3b527ebcea0ae7b8/requests_toolbelt/utils/__init__.py -------------------------------------------------------------------------------- /requests_toolbelt/utils/deprecated.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """A collection of functions deprecated in requests.utils.""" 3 | import re 4 | import sys 5 | 6 | from requests import utils 7 | 8 | find_charset = re.compile( 9 | br']', flags=re.I 10 | ).findall 11 | 12 | find_pragma = re.compile( 13 | br']', flags=re.I 14 | ).findall 15 | 16 | find_xml = re.compile( 17 | br'^<\?xml.*?encoding=["\']*(.+?)["\'>]' 18 | ).findall 19 | 20 | 21 | def get_encodings_from_content(content): 22 | """Return encodings from given content string. 23 | 24 | .. code-block:: python 25 | 26 | import requests 27 | from requests_toolbelt.utils import deprecated 28 | 29 | r = requests.get(url) 30 | encodings = deprecated.get_encodings_from_content(r) 31 | 32 | :param content: bytestring to extract encodings from 33 | :type content: bytes 34 | :return: encodings detected in the provided content 35 | :rtype: list(str) 36 | """ 37 | encodings = (find_charset(content) + find_pragma(content) 38 | + find_xml(content)) 39 | if (3, 0) <= sys.version_info < (4, 0): 40 | encodings = [encoding.decode('utf8') for encoding in encodings] 41 | return encodings 42 | 43 | 44 | def get_unicode_from_response(response): 45 | """Return the requested content back in unicode. 46 | 47 | This will first attempt to retrieve the encoding from the response 48 | headers. If that fails, it will use 49 | :func:`requests_toolbelt.utils.deprecated.get_encodings_from_content` 50 | to determine encodings from HTML elements. 51 | 52 | .. code-block:: python 53 | 54 | import requests 55 | from requests_toolbelt.utils import deprecated 56 | 57 | r = requests.get(url) 58 | text = deprecated.get_unicode_from_response(r) 59 | 60 | :param response: Response object to get unicode content from. 61 | :type response: requests.models.Response 62 | """ 63 | tried_encodings = set() 64 | 65 | # Try charset from content-type 66 | encoding = utils.get_encoding_from_headers(response.headers) 67 | 68 | if encoding: 69 | try: 70 | return str(response.content, encoding) 71 | except UnicodeError: 72 | tried_encodings.add(encoding.lower()) 73 | 74 | encodings = get_encodings_from_content(response.content) 75 | 76 | for _encoding in encodings: 77 | _encoding = _encoding.lower() 78 | if _encoding in tried_encodings: 79 | continue 80 | try: 81 | return str(response.content, _encoding) 82 | except UnicodeError: 83 | tried_encodings.add(_encoding) 84 | 85 | # Fall back: 86 | if encoding: 87 | try: 88 | return str(response.content, encoding, errors='replace') 89 | except TypeError: 90 | pass 91 | return response.text 92 | -------------------------------------------------------------------------------- /requests_toolbelt/utils/dump.py: -------------------------------------------------------------------------------- 1 | """This module provides functions for dumping information about responses.""" 2 | import collections 3 | 4 | from requests import compat 5 | 6 | 7 | __all__ = ('dump_response', 'dump_all') 8 | 9 | HTTP_VERSIONS = { 10 | 9: b'0.9', 11 | 10: b'1.0', 12 | 11: b'1.1', 13 | } 14 | 15 | _PrefixSettings = collections.namedtuple('PrefixSettings', 16 | ['request', 'response']) 17 | 18 | 19 | class PrefixSettings(_PrefixSettings): 20 | def __new__(cls, request, response): 21 | request = _coerce_to_bytes(request) 22 | response = _coerce_to_bytes(response) 23 | return super(PrefixSettings, cls).__new__(cls, request, response) 24 | 25 | 26 | def _get_proxy_information(response): 27 | if getattr(response.connection, 'proxy_manager', False): 28 | proxy_info = {} 29 | request_url = response.request.url 30 | if request_url.startswith('https://'): 31 | proxy_info['method'] = 'CONNECT' 32 | 33 | proxy_info['request_path'] = request_url 34 | return proxy_info 35 | return None 36 | 37 | 38 | def _format_header(name, value): 39 | return (_coerce_to_bytes(name) + b': ' + _coerce_to_bytes(value) + 40 | b'\r\n') 41 | 42 | 43 | def _build_request_path(url, proxy_info): 44 | uri = compat.urlparse(url) 45 | proxy_url = proxy_info.get('request_path') 46 | if proxy_url is not None: 47 | request_path = _coerce_to_bytes(proxy_url) 48 | return request_path, uri 49 | 50 | request_path = _coerce_to_bytes(uri.path) 51 | if uri.query: 52 | request_path += b'?' + _coerce_to_bytes(uri.query) 53 | 54 | return request_path, uri 55 | 56 | 57 | def _dump_request_data(request, prefixes, bytearr, proxy_info=None): 58 | if proxy_info is None: 59 | proxy_info = {} 60 | 61 | prefix = prefixes.request 62 | method = _coerce_to_bytes(proxy_info.pop('method', request.method)) 63 | request_path, uri = _build_request_path(request.url, proxy_info) 64 | 65 | # HTTP/1.1 66 | bytearr.extend(prefix + method + b' ' + request_path + b' HTTP/1.1\r\n') 67 | 68 | # Host: OR host header specified by user 69 | headers = request.headers.copy() 70 | host_header = _coerce_to_bytes(headers.pop('Host', uri.netloc)) 71 | bytearr.extend(prefix + b'Host: ' + host_header + b'\r\n') 72 | 73 | for name, value in headers.items(): 74 | bytearr.extend(prefix + _format_header(name, value)) 75 | 76 | bytearr.extend(prefix + b'\r\n') 77 | if request.body: 78 | if isinstance(request.body, compat.basestring): 79 | bytearr.extend(prefix + _coerce_to_bytes(request.body)) 80 | else: 81 | # In the event that the body is a file-like object, let's not try 82 | # to read everything into memory. 83 | bytearr.extend(b'<< Request body is not a string-like type >>') 84 | bytearr.extend(b'\r\n') 85 | bytearr.extend(b'\r\n') 86 | 87 | 88 | def _dump_response_data(response, prefixes, bytearr): 89 | prefix = prefixes.response 90 | # Let's interact almost entirely with urllib3's response 91 | raw = response.raw 92 | 93 | # Let's convert the version int from httplib to bytes 94 | version_str = HTTP_VERSIONS.get(raw.version, b'?') 95 | 96 | # HTTP/ 97 | bytearr.extend(prefix + b'HTTP/' + version_str + b' ' + 98 | str(raw.status).encode('ascii') + b' ' + 99 | _coerce_to_bytes(response.reason) + b'\r\n') 100 | 101 | headers = raw.headers 102 | for name in headers.keys(): 103 | for value in headers.getlist(name): 104 | bytearr.extend(prefix + _format_header(name, value)) 105 | 106 | bytearr.extend(prefix + b'\r\n') 107 | 108 | bytearr.extend(response.content) 109 | 110 | 111 | def _coerce_to_bytes(data): 112 | if not isinstance(data, bytes) and hasattr(data, 'encode'): 113 | data = data.encode('utf-8') 114 | # Don't bail out with an exception if data is None 115 | return data if data is not None else b'' 116 | 117 | 118 | def dump_response(response, request_prefix=b'< ', response_prefix=b'> ', 119 | data_array=None): 120 | """Dump a single request-response cycle's information. 121 | 122 | This will take a response object and dump only the data that requests can 123 | see for that single request-response cycle. 124 | 125 | Example:: 126 | 127 | import requests 128 | from requests_toolbelt.utils import dump 129 | 130 | resp = requests.get('https://api.github.com/users/sigmavirus24') 131 | data = dump.dump_response(resp) 132 | print(data.decode('utf-8')) 133 | 134 | :param response: 135 | The response to format 136 | :type response: :class:`requests.Response` 137 | :param request_prefix: (*optional*) 138 | Bytes to prefix each line of the request data 139 | :type request_prefix: :class:`bytes` 140 | :param response_prefix: (*optional*) 141 | Bytes to prefix each line of the response data 142 | :type response_prefix: :class:`bytes` 143 | :param data_array: (*optional*) 144 | Bytearray to which we append the request-response cycle data 145 | :type data_array: :class:`bytearray` 146 | :returns: Formatted bytes of request and response information. 147 | :rtype: :class:`bytearray` 148 | """ 149 | data = data_array if data_array is not None else bytearray() 150 | prefixes = PrefixSettings(request_prefix, response_prefix) 151 | 152 | if not hasattr(response, 'request'): 153 | raise ValueError('Response has no associated request') 154 | 155 | proxy_info = _get_proxy_information(response) 156 | _dump_request_data(response.request, prefixes, data, 157 | proxy_info=proxy_info) 158 | _dump_response_data(response, prefixes, data) 159 | return data 160 | 161 | 162 | def dump_all(response, request_prefix=b'< ', response_prefix=b'> '): 163 | """Dump all requests and responses including redirects. 164 | 165 | This takes the response returned by requests and will dump all 166 | request-response pairs in the redirect history in order followed by the 167 | final request-response. 168 | 169 | Example:: 170 | 171 | import requests 172 | from requests_toolbelt.utils import dump 173 | 174 | resp = requests.get('https://httpbin.org/redirect/5') 175 | data = dump.dump_all(resp) 176 | print(data.decode('utf-8')) 177 | 178 | :param response: 179 | The response to format 180 | :type response: :class:`requests.Response` 181 | :param request_prefix: (*optional*) 182 | Bytes to prefix each line of the request data 183 | :type request_prefix: :class:`bytes` 184 | :param response_prefix: (*optional*) 185 | Bytes to prefix each line of the response data 186 | :type response_prefix: :class:`bytes` 187 | :returns: Formatted bytes of request and response information. 188 | :rtype: :class:`bytearray` 189 | """ 190 | data = bytearray() 191 | 192 | history = list(response.history[:]) 193 | history.append(response) 194 | 195 | for response in history: 196 | dump_response(response, request_prefix, response_prefix, data) 197 | 198 | return data 199 | -------------------------------------------------------------------------------- /requests_toolbelt/utils/formdata.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Implementation of nested form-data encoding function(s).""" 3 | from .._compat import basestring 4 | from .._compat import urlencode as _urlencode 5 | 6 | 7 | __all__ = ('urlencode',) 8 | 9 | 10 | def urlencode(query, *args, **kwargs): 11 | """Handle nested form-data queries and serialize them appropriately. 12 | 13 | There are times when a website expects a nested form data query to be sent 14 | but, the standard library's urlencode function does not appropriately 15 | handle the nested structures. In that case, you need this function which 16 | will flatten the structure first and then properly encode it for you. 17 | 18 | When using this to send data in the body of a request, make sure you 19 | specify the appropriate Content-Type header for the request. 20 | 21 | .. code-block:: python 22 | 23 | import requests 24 | from requests_toolbelt.utils import formdata 25 | 26 | query = { 27 | 'my_dict': { 28 | 'foo': 'bar', 29 | 'biz': 'baz", 30 | }, 31 | 'a': 'b', 32 | } 33 | 34 | resp = requests.get(url, params=formdata.urlencode(query)) 35 | # or 36 | resp = requests.post( 37 | url, 38 | data=formdata.urlencode(query), 39 | headers={ 40 | 'Content-Type': 'application/x-www-form-urlencoded' 41 | }, 42 | ) 43 | 44 | Similarly, you can specify a list of nested tuples, e.g., 45 | 46 | .. code-block:: python 47 | 48 | import requests 49 | from requests_toolbelt.utils import formdata 50 | 51 | query = [ 52 | ('my_list', [ 53 | ('foo', 'bar'), 54 | ('biz', 'baz'), 55 | ]), 56 | ('a', 'b'), 57 | ] 58 | 59 | resp = requests.get(url, params=formdata.urlencode(query)) 60 | # or 61 | resp = requests.post( 62 | url, 63 | data=formdata.urlencode(query), 64 | headers={ 65 | 'Content-Type': 'application/x-www-form-urlencoded' 66 | }, 67 | ) 68 | 69 | For additional parameter and return information, see the official 70 | `urlencode`_ documentation. 71 | 72 | .. _urlencode: 73 | https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode 74 | """ 75 | expand_classes = (dict, list, tuple) 76 | original_query_list = _to_kv_list(query) 77 | 78 | if not all(_is_two_tuple(i) for i in original_query_list): 79 | raise ValueError("Expected query to be able to be converted to a " 80 | "list comprised of length 2 tuples.") 81 | 82 | query_list = original_query_list 83 | while any(isinstance(v, expand_classes) for _, v in query_list): 84 | query_list = _expand_query_values(query_list) 85 | 86 | return _urlencode(query_list, *args, **kwargs) 87 | 88 | 89 | def _to_kv_list(dict_or_list): 90 | if hasattr(dict_or_list, 'items'): 91 | return list(dict_or_list.items()) 92 | return dict_or_list 93 | 94 | 95 | def _is_two_tuple(item): 96 | return isinstance(item, (list, tuple)) and len(item) == 2 97 | 98 | 99 | def _expand_query_values(original_query_list): 100 | query_list = [] 101 | for key, value in original_query_list: 102 | if isinstance(value, basestring): 103 | query_list.append((key, value)) 104 | else: 105 | key_fmt = key + '[%s]' 106 | value_list = _to_kv_list(value) 107 | query_list.extend((key_fmt % k, v) for k, v in value_list) 108 | return query_list 109 | -------------------------------------------------------------------------------- /requests_toolbelt/utils/user_agent.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import collections 3 | import platform 4 | import sys 5 | 6 | 7 | def user_agent(name, version, extras=None): 8 | """Return an internet-friendly user_agent string. 9 | 10 | The majority of this code has been wilfully stolen from the equivalent 11 | function in Requests. 12 | 13 | :param name: The intended name of the user-agent, e.g. "python-requests". 14 | :param version: The version of the user-agent, e.g. "0.0.1". 15 | :param extras: List of two-item tuples that are added to the user-agent 16 | string. 17 | :returns: Formatted user-agent string 18 | :rtype: str 19 | """ 20 | if extras is None: 21 | extras = [] 22 | 23 | return UserAgentBuilder( 24 | name, version 25 | ).include_extras( 26 | extras 27 | ).include_implementation( 28 | ).include_system().build() 29 | 30 | 31 | class UserAgentBuilder(object): 32 | """Class to provide a greater level of control than :func:`user_agent`. 33 | 34 | This is used by :func:`user_agent` to build its User-Agent string. 35 | 36 | .. code-block:: python 37 | 38 | user_agent_str = UserAgentBuilder( 39 | name='requests-toolbelt', 40 | version='17.4.0', 41 | ).include_implementation( 42 | ).include_system( 43 | ).include_extras([ 44 | ('requests', '2.14.2'), 45 | ('urllib3', '1.21.2'), 46 | ]).build() 47 | 48 | """ 49 | 50 | format_string = '%s/%s' 51 | 52 | def __init__(self, name, version): 53 | """Initialize our builder with the name and version of our user agent. 54 | 55 | :param str name: 56 | Name of our user-agent. 57 | :param str version: 58 | The version string for user-agent. 59 | """ 60 | self._pieces = collections.deque([(name, version)]) 61 | 62 | def build(self): 63 | """Finalize the User-Agent string. 64 | 65 | :returns: 66 | Formatted User-Agent string. 67 | :rtype: 68 | str 69 | """ 70 | return " ".join([self.format_string % piece for piece in self._pieces]) 71 | 72 | def include_extras(self, extras): 73 | """Include extra portions of the User-Agent. 74 | 75 | :param list extras: 76 | list of tuples of extra-name and extra-version 77 | """ 78 | if any(len(extra) != 2 for extra in extras): 79 | raise ValueError('Extras should be a sequence of two item tuples.') 80 | 81 | self._pieces.extend(extras) 82 | return self 83 | 84 | def include_implementation(self): 85 | """Append the implementation string to the user-agent string. 86 | 87 | This adds the the information that you're using CPython 2.7.13 to the 88 | User-Agent. 89 | """ 90 | self._pieces.append(_implementation_tuple()) 91 | return self 92 | 93 | def include_system(self): 94 | """Append the information about the Operating System.""" 95 | self._pieces.append(_platform_tuple()) 96 | return self 97 | 98 | 99 | def _implementation_tuple(): 100 | """Return the tuple of interpreter name and version. 101 | 102 | Returns a string that provides both the name and the version of the Python 103 | implementation currently running. For example, on CPython 2.7.5 it will 104 | return "CPython/2.7.5". 105 | 106 | This function works best on CPython and PyPy: in particular, it probably 107 | doesn't work for Jython or IronPython. Future investigation should be done 108 | to work out the correct shape of the code for those platforms. 109 | """ 110 | implementation = platform.python_implementation() 111 | 112 | if implementation == 'CPython': 113 | implementation_version = platform.python_version() 114 | elif implementation == 'PyPy': 115 | implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, 116 | sys.pypy_version_info.minor, 117 | sys.pypy_version_info.micro) 118 | if sys.pypy_version_info.releaselevel != 'final': 119 | implementation_version = ''.join([ 120 | implementation_version, sys.pypy_version_info.releaselevel 121 | ]) 122 | elif implementation == 'Jython': 123 | implementation_version = platform.python_version() # Complete Guess 124 | elif implementation == 'IronPython': 125 | implementation_version = platform.python_version() # Complete Guess 126 | else: 127 | implementation_version = 'Unknown' 128 | 129 | return (implementation, implementation_version) 130 | 131 | 132 | def _implementation_string(): 133 | return "%s/%s" % _implementation_tuple() 134 | 135 | 136 | def _platform_tuple(): 137 | try: 138 | p_system = platform.system() 139 | p_release = platform.release() 140 | except IOError: 141 | p_system = 'Unknown' 142 | p_release = 'Unknown' 143 | return (p_system, p_release) 144 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import re 5 | import sys 6 | 7 | from setuptools import setup 8 | 9 | if sys.argv[-1].lower() in ("submit", "publish"): 10 | os.system("python setup.py bdist_wheel sdist upload") 11 | sys.exit() 12 | 13 | 14 | def get_version(): 15 | version = '' 16 | with open('requests_toolbelt/__init__.py', 'r') as fd: 17 | reg = re.compile(r'__version__ = [\'"]([^\'"]*)[\'"]') 18 | for line in fd: 19 | m = reg.match(line) 20 | if m: 21 | version = m.group(1) 22 | break 23 | return version 24 | 25 | __version__ = get_version() 26 | 27 | if not __version__: 28 | raise RuntimeError('Cannot find version information') 29 | 30 | 31 | packages = [ 32 | 'requests_toolbelt', 33 | 'requests_toolbelt.adapters', 34 | 'requests_toolbelt.auth', 35 | 'requests_toolbelt.downloadutils', 36 | 'requests_toolbelt.multipart', 37 | 'requests_toolbelt.threaded', 38 | 'requests_toolbelt.utils', 39 | ] 40 | 41 | setup( 42 | name="requests-toolbelt", 43 | version=__version__, 44 | description="A utility belt for advanced users of python-requests", 45 | long_description="\n\n".join([open("README.rst").read(), 46 | open("HISTORY.rst").read()]), 47 | long_description_content_type="text/x-rst", 48 | license='Apache 2.0', 49 | author='Ian Cordasco, Cory Benfield', 50 | author_email="graffatcolmingov@gmail.com", 51 | url="https://toolbelt.readthedocs.io/", 52 | project_urls={ 53 | "Changelog": "https://github.com/requests/toolbelt/blob/master/HISTORY.rst", 54 | "Source": "https://github.com/requests/toolbelt", 55 | }, 56 | packages=packages, 57 | package_data={'': ['LICENSE', 'AUTHORS.rst']}, 58 | include_package_data=True, 59 | python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', 60 | install_requires=['requests>=2.0.1,<3.0.0'], 61 | classifiers=[ 62 | 'Development Status :: 5 - Production/Stable', 63 | 'License :: OSI Approved :: Apache Software License', 64 | 'Intended Audience :: Developers', 65 | 'Programming Language :: Python', 66 | 'Programming Language :: Python :: 2', 67 | 'Programming Language :: Python :: 2.7', 68 | 'Programming Language :: Python :: 3', 69 | 'Programming Language :: Python :: 3.4', 70 | 'Programming Language :: Python :: 3.5', 71 | 'Programming Language :: Python :: 3.6', 72 | 'Programming Language :: Python :: 3.7', 73 | 'Programming Language :: Python :: 3.8', 74 | 'Programming Language :: Python :: 3.9', 75 | 'Programming Language :: Python :: 3.10', 76 | 'Programming Language :: Python :: 3.11', 77 | 'Programming Language :: Python :: Implementation :: CPython', 78 | 'Programming Language :: Python :: Implementation :: PyPy', 79 | ], 80 | ) 81 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import betamax 3 | 4 | 5 | def get_betamax(session): 6 | return betamax.Betamax( 7 | session, 8 | cassette_library_dir='tests/cassettes') 9 | -------------------------------------------------------------------------------- /tests/cassettes/file_for_download.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"uri": "https://stxnext.com/static/img/logo.830ebe551641.svg", "body": {"encoding": "utf-8", "string": ""}, "method": "GET", "headers": {"User-Agent": ["python-requests/2.2.1 CPython/3.5.2 Darwin/17.3.0"], "Accept-Encoding": ["gzip, deflate, compress"], "Accept": ["*/*"]}}, "recorded_at": "2018-01-04T23:00:12", "response": {"url": "https://stxnext.com/static/img/logo.830ebe551641.svg", "status": {"message": "OK", "code": 200}, "body": {"encoding": null, "string": ""}, "headers": {"date": ["Thu, 04 Jan 2018 23:00:15 GMT"], "strict-transport-security": ["max-age=0; includeSubdomains; preload"], "last-modified": ["Wed, 22 Nov 2017 09:22:00 GMT"], "content-type": ["image/svg+xml"], "content-length": ["5177"]}}}], "recorded_with": "betamax/0.8.0"} -------------------------------------------------------------------------------- /tests/cassettes/http2bin_cookies.json: -------------------------------------------------------------------------------- 1 | {"recorded_with": "betamax/0.5.1", "http_interactions": [{"response": {"status": {"code": 302, "message": "FOUND"}, "body": {"string": "\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: /cookies. If not click the link.", "encoding": "utf-8"}, "url": "https://httpbin.org/cookies/set?cookie0=value0", "headers": {"Location": ["/cookies"], "Content-Length": ["223"], "Date": ["Fri, 13 Nov 2015 00:23:20 GMT"], "Access-Control-Allow-Credentials": ["true"], "Access-Control-Allow-Origin": ["*"], "Connection": ["keep-alive"], "Server": ["nginx"], "Set-Cookie": ["cookie0=value0; Path=/"], "Content-Type": ["text/html; charset=utf-8"]}}, "recorded_at": "2015-11-13T00:23:19", "request": {"uri": "https://httpbin.org/cookies/set?cookie0=value0", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": ["keep-alive"], "User-Agent": ["python-requests/2.8.1"], "Accept-Encoding": ["gzip, deflate"], "Accept": ["*/*"]}}}, {"response": {"status": {"code": 200, "message": "OK"}, "body": {"string": "{\n \"cookies\": {\n \"cookie0\": \"value0\"\n }\n}\n", "encoding": null}, "url": "https://httpbin.org/cookies", "headers": {"Access-Control-Allow-Credentials": ["true"], "Content-Length": ["47"], "Date": ["Fri, 13 Nov 2015 00:23:20 GMT"], "Content-Type": ["application/json"], "Connection": ["keep-alive"], "Server": ["nginx"], "Access-Control-Allow-Origin": ["*"]}}, "recorded_at": "2015-11-13T00:23:19", "request": {"uri": "https://httpbin.org/cookies", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": ["keep-alive"], "User-Agent": ["python-requests/2.8.1"], "Accept-Encoding": ["gzip, deflate"], "Accept": ["*/*"], "Cookie": ["cookie0=value0"]}}}]} -------------------------------------------------------------------------------- /tests/cassettes/http2bin_fingerprint.json: -------------------------------------------------------------------------------- 1 | {"recorded_with": "betamax/0.4.1", "http_interactions": [{"response": {"status": {"message": "OK", "code": 200}, "body": {"string": "{\n \"args\": {}, \n \"headers\": {\n \"Accept\": \"*/*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Connection\": \"keep-alive\", \n \"Host\": \"http2bin.org\", \n \"User-Agent\": \"python-requests/2.5.3 CPython/2.7.9 Darwin/14.1.0\"\n }, \n \"origin\": \"77.99.146.203\", \n \"url\": \"https://http2bin.org/get\"\n}\n", "encoding": null}, "headers": {"access-control-allow-origin": ["*"], "date": ["Tue, 03 Mar 2015 21:29:55 GMT"], "server": ["h2o/1.0.2-alpha1"], "content-length": ["301"], "access-control-allow-credentials": ["true"], "connection": ["keep-alive"], "content-type": ["application/json"]}, "url": "https://http2bin.org/get"}, "recorded_at": "2015-03-03T21:29:55", "request": {"method": "GET", "uri": "https://http2bin.org/get", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept": ["*/*"], "Accept-Encoding": ["gzip, deflate"], "Connection": ["keep-alive"], "User-Agent": ["python-requests/2.5.3 CPython/2.7.9 Darwin/14.1.0"]}}}]} -------------------------------------------------------------------------------- /tests/cassettes/httpbin_bearer_auth.json: -------------------------------------------------------------------------------- 1 | { 2 | "http_interactions": [ 3 | { 4 | "request": { 5 | "body": { 6 | "string": "", 7 | "encoding": "utf-8" 8 | }, 9 | "headers": { 10 | "Accept": [ 11 | "*/*" 12 | ], 13 | "Accept-Encoding": [ 14 | "gzip, deflate, compress" 15 | ], 16 | "Authorization": [ 17 | "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c=" 18 | ], 19 | "User-Agent": [ 20 | "python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH" 21 | ] 22 | }, 23 | "method": "GET", 24 | "uri": "http://httpbin.org/bearer-auth/" 25 | }, 26 | "response": { 27 | "body": { 28 | "string": "{\n \"user\": \"user\",\n \"authenticated\": true\n}", 29 | "encoding": null 30 | }, 31 | "headers": { 32 | "content-length": [ 33 | "45" 34 | ], 35 | "server": [ 36 | "gunicorn/0.17.4" 37 | ], 38 | "connection": [ 39 | "keep-alive" 40 | ], 41 | "date": [ 42 | "Sat, 03 May 2014 17:23:06 GMT" 43 | ], 44 | "access-control-allow-origin": [ 45 | "*" 46 | ], 47 | "content-type": [ 48 | "application/json" 49 | ] 50 | }, 51 | "status": { 52 | "message": "OK", 53 | "code": 200 54 | }, 55 | "url": "http://httpbin.org/bearer-auth/" 56 | }, 57 | "recorded_at": "2014-05-03T17:23:06" 58 | } 59 | ], 60 | "recorded_with": "betamax/{version}" 61 | } -------------------------------------------------------------------------------- /tests/cassettes/httpbin_guess_auth_basic.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept-Encoding": ["gzip, deflate, compress"], "Accept": ["*/*"], "User-Agent": ["python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH"]}, "method": "GET", "uri": "http://httpbin.org/basic-auth/user/passwd"}, "response": {"body": {"string": "", "encoding": null}, "headers": {"content-length": ["0"], "server": ["gunicorn/0.17.4"], "connection": ["keep-alive"], "date": ["Sat, 03 May 2014 17:23:06 GMT"], "access-control-allow-origin": ["*"], "www-authenticate": ["Basic realm=\"Fake Realm\""]}, "status": {"message": "UNAUTHORIZED", "code": 401}, "url": "http://httpbin.org/basic-auth/user/passwd"}, "recorded_at": "2014-05-03T17:23:06"}, {"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept": ["*/*"], "Accept-Encoding": ["gzip, deflate, compress"], "Authorization": ["Basic dXNlcjpwYXNzd2Q="], "User-Agent": ["python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH"]}, "method": "GET", "uri": "http://httpbin.org/basic-auth/user/passwd"}, "response": {"body": {"string": "{\n \"user\": \"user\",\n \"authenticated\": true\n}", "encoding": null}, "headers": {"content-length": ["45"], "server": ["gunicorn/0.17.4"], "connection": ["keep-alive"], "date": ["Sat, 03 May 2014 17:23:06 GMT"], "access-control-allow-origin": ["*"], "content-type": ["application/json"]}, "status": {"message": "OK", "code": 200}, "url": "http://httpbin.org/basic-auth/user/passwd"}, "recorded_at": "2014-05-03T17:23:06"}], "recorded_with": "betamax/{version}"} -------------------------------------------------------------------------------- /tests/cassettes/httpbin_guess_auth_digest.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept-Encoding": ["gzip, deflate, compress"], "Accept": ["*/*"], "User-Agent": ["python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH"]}, "method": "GET", "uri": "http://httpbin.org/digest-auth/auth/user/passwd"}, "response": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"content-length": ["0"], "set-cookie": ["fake=fake_value"], "server": ["gunicorn/0.17.4"], "connection": ["keep-alive"], "date": ["Sat, 03 May 2014 17:23:07 GMT"], "access-control-allow-origin": ["*"], "content-type": ["text/html; charset=utf-8"], "www-authenticate": ["Digest qop=auth, nonce=\"713b4eb6d0ad0ac25d75b50c4d044d5e\", realm=\"me@kennethreitz.com\", opaque=\"d0033bc1960ca78a2fc4497c1e8a8cbd\""]}, "status": {"message": "UNAUTHORIZED", "code": 401}, "url": "http://httpbin.org/digest-auth/auth/user/passwd"}, "recorded_at": "2014-05-03T17:23:07"}, {"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept": ["*/*"], "Cookie": ["fake=fake_value"], "Accept-Encoding": ["gzip, deflate, compress"], "Authorization": ["Digest username=\"user\", realm=\"me@kennethreitz.com\", nonce=\"713b4eb6d0ad0ac25d75b50c4d044d5e\", uri=\"/digest-auth/auth/user/passwd\", response=\"30276b25ef0031e65e3bccc719031388\", opaque=\"d0033bc1960ca78a2fc4497c1e8a8cbd\", qop=\"auth\", nc=00000001, cnonce=\"e94e00be64d66bcb\""], "User-Agent": ["python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH"]}, "method": "GET", "uri": "http://httpbin.org/digest-auth/auth/user/passwd"}, "response": {"body": {"string": "{\n \"user\": \"user\",\n \"authenticated\": true\n}", "encoding": null}, "headers": {"content-length": ["45"], "server": ["gunicorn/0.17.4"], "connection": ["keep-alive"], "date": ["Sat, 03 May 2014 17:23:07 GMT"], "access-control-allow-origin": ["*"], "content-type": ["application/json"]}, "status": {"message": "OK", "code": 200}, "url": "http://httpbin.org/digest-auth/auth/user/passwd"}, "recorded_at": "2014-05-03T17:23:07"}], "recorded_with": "betamax/{version}"} -------------------------------------------------------------------------------- /tests/cassettes/httpbin_guess_auth_none.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Accept-Encoding": ["gzip, deflate, compress"], "Accept": ["*/*"], "User-Agent": ["python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH"]}, "method": "GET", "uri": "http://httpbin.org/get?a=1"}, "response": {"body": {"string": "{\n \"args\": {\n \"a\": \"1\"\n },\n \"url\": \"http://httpbin.org/get?a=1\",\n \"headers\": {\n \"Connection\": \"close\",\n \"Host\": \"httpbin.org\",\n \"Accept-Encoding\": \"gzip, deflate, compress\",\n \"X-Request-Id\": \"f9f71f12-5705-4a0f-85d4-3d63f9140b1f\",\n \"User-Agent\": \"python-requests/2.2.1 CPython/2.7.6 Linux/3.14.1-1-ARCH\",\n \"Accept\": \"*/*\"\n },\n \"origin\": \"62.47.252.115\"\n}", "encoding": null}, "headers": {"content-length": ["381"], "server": ["gunicorn/0.17.4"], "connection": ["keep-alive"], "date": ["Sat, 03 May 2014 17:23:07 GMT"], "access-control-allow-origin": ["*"], "content-type": ["application/json"]}, "status": {"message": "OK", "code": 200}, "url": "http://httpbin.org/get?a=1"}, "recorded_at": "2014-05-03T17:23:07"}], "recorded_with": "betamax/{version}"} -------------------------------------------------------------------------------- /tests/cassettes/klevas_vu_lt_ssl3.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"body": "", "headers": {"Accept-Encoding": "gzip, deflate, compress", "Accept": "*/*", "User-Agent": "python-requests/2.1.0 CPython/2.7.3 Linux/3.2.29"}, "method": "GET", "uri": "https://klevas.vu.lt/"}, "response": {"body": {"string": "\n\nKlevas\n\n\n\n\n\n\n", "encoding": "ISO-8859-1"}, "headers": {"content-length": "204", "accept-ranges": "bytes", "server": "Oracle-Application-Server-10g/10.1.3.1.0 Oracle-HTTP-Server", "last-modified": "Wed, 13 Apr 2011 05:00:23 GMT", "etag": "\"7f9b-cc-4da52de7\"", "date": "Sun, 05 Jan 2014 01:35:40 GMT", "content-type": "text/html"}, "url": "https://klevas.vu.lt/", "status_code": 200}, "recorded_at": "2014-01-05T01:34:40"}], "recorded_with": "betamax"} -------------------------------------------------------------------------------- /tests/cassettes/redirect_request_for_dump_all.json: -------------------------------------------------------------------------------- 1 | {"recorded_with": "betamax/0.5.1", "http_interactions": [{"recorded_at": "2015-11-14T22:53:20", "request": {"uri": "https://httpbin.org/redirect/5", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": "keep-alive", "Accept": "*/*", "User-Agent": "python-requests/2.8.1", "Accept-Encoding": "gzip, deflate"}}, "response": {"url": "https://httpbin.org/redirect/5", "status": {"code": 302, "message": "FOUND"}, "body": {"string": "\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: /relative-redirect/4. If not click the link.", "encoding": "utf-8"}, "headers": {"Location": "/relative-redirect/4", "Access-Control-Allow-Credentials": "true", "Server": "nginx", "Date": "Sat, 14 Nov 2015 22:53:18 GMT", "Content-Length": "247", "Connection": "keep-alive", "Access-Control-Allow-Origin": "*", "Content-Type": "text/html; charset=utf-8"}}}, {"recorded_at": "2015-11-14T22:53:20", "request": {"uri": "https://httpbin.org/relative-redirect/4", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": "keep-alive", "Accept": "*/*", "User-Agent": "python-requests/2.8.1", "Accept-Encoding": "gzip, deflate"}}, "response": {"url": "https://httpbin.org/relative-redirect/4", "status": {"code": 302, "message": "FOUND"}, "body": {"string": "", "encoding": "utf-8"}, "headers": {"Location": "/relative-redirect/3", "Access-Control-Allow-Credentials": "true", "Server": "nginx", "Date": "Sat, 14 Nov 2015 22:53:18 GMT", "Content-Length": "0", "Connection": "keep-alive", "Access-Control-Allow-Origin": "*", "Content-Type": "text/html; charset=utf-8"}}}, {"recorded_at": "2015-11-14T22:53:20", "request": {"uri": "https://httpbin.org/relative-redirect/3", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": "keep-alive", "Accept": "*/*", "User-Agent": "python-requests/2.8.1", "Accept-Encoding": "gzip, deflate"}}, "response": {"url": "https://httpbin.org/relative-redirect/3", "status": {"code": 302, "message": "FOUND"}, "body": {"string": "", "encoding": "utf-8"}, "headers": {"Location": "/relative-redirect/2", "Access-Control-Allow-Credentials": "true", "Server": "nginx", "Date": "Sat, 14 Nov 2015 22:53:18 GMT", "Content-Length": "0", "Connection": "keep-alive", "Access-Control-Allow-Origin": "*", "Content-Type": "text/html; charset=utf-8"}}}, {"recorded_at": "2015-11-14T22:53:20", "request": {"uri": "https://httpbin.org/relative-redirect/2", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": "keep-alive", "Accept": "*/*", "User-Agent": "python-requests/2.8.1", "Accept-Encoding": "gzip, deflate"}}, "response": {"url": "https://httpbin.org/relative-redirect/2", "status": {"code": 302, "message": "FOUND"}, "body": {"string": "", "encoding": "utf-8"}, "headers": {"Location": "/relative-redirect/1", "Access-Control-Allow-Credentials": "true", "Server": "nginx", "Date": "Sat, 14 Nov 2015 22:53:18 GMT", "Content-Length": "0", "Connection": "keep-alive", "Access-Control-Allow-Origin": "*", "Content-Type": "text/html; charset=utf-8"}}}, {"recorded_at": "2015-11-14T22:53:20", "request": {"uri": "https://httpbin.org/relative-redirect/1", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": "keep-alive", "Accept": "*/*", "User-Agent": "python-requests/2.8.1", "Accept-Encoding": "gzip, deflate"}}, "response": {"url": "https://httpbin.org/relative-redirect/1", "status": {"code": 302, "message": "FOUND"}, "body": {"string": "", "encoding": "utf-8"}, "headers": {"Location": "/get", "Access-Control-Allow-Credentials": "true", "Server": "nginx", "Date": "Sat, 14 Nov 2015 22:53:18 GMT", "Content-Length": "0", "Connection": "keep-alive", "Access-Control-Allow-Origin": "*", "Content-Type": "text/html; charset=utf-8"}}}, {"recorded_at": "2015-11-14T22:53:20", "request": {"uri": "https://httpbin.org/get", "method": "GET", "body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": "keep-alive", "Accept": "*/*", "User-Agent": "python-requests/2.8.1", "Accept-Encoding": "gzip, deflate"}}, "response": {"url": "https://httpbin.org/get", "status": {"code": 200, "message": "OK"}, "body": {"string": "{\n \"args\": {}, \n \"headers\": {\n \"Accept\": \"*/*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"python-requests/2.8.1\"\n }, \n \"origin\": \"\", \n \"url\": \"https://httpbin.org/get\"\n}\n", "encoding": null}, "headers": {"Access-Control-Allow-Credentials": "true", "Server": "nginx", "Date": "Sat, 14 Nov 2015 22:53:18 GMT", "Content-Length": "234", "Connection": "keep-alive", "Access-Control-Allow-Origin": "*", "Content-Type": "application/json"}}}]} -------------------------------------------------------------------------------- /tests/cassettes/simple_get_request.json: -------------------------------------------------------------------------------- 1 | {"recorded_with": "betamax/0.5.1", "http_interactions": [{"request": {"body": {"encoding": "utf-8", "string": ""}, "uri": "https://httpbin.org/get", "headers": {"Connection": ["keep-alive"], "User-Agent": ["python-requests/2.8.1"], "Accept": ["*/*"], "Accept-Encoding": ["gzip, deflate"]}, "method": "GET"}, "recorded_at": "2015-11-14T22:33:32", "response": {"status": {"code": 200, "message": "OK"}, "url": "https://httpbin.org/get", "body": {"encoding": null, "string": "{\n \"args\": {}, \n \"headers\": {\n \"Accept\": \"*/*\", \n \"Accept-Encoding\": \"gzip, deflate\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\": \"python-requests/2.8.1\"\n }, \n \"origin\": \"\", \n \"url\": \"https://httpbin.org/get\"\n}\n"}, "headers": {"Content-Type": ["application/json"], "Date": ["Sat, 14 Nov 2015 22:33:30 GMT"], "Connection": ["keep-alive"], "Server": ["nginx"], "Access-Control-Allow-Credentials": ["true"], "Content-Length": ["234"], "Access-Control-Allow-Origin": ["*"]}}}]} 2 | -------------------------------------------------------------------------------- /tests/cassettes/stream_response_without_content_length_to_file.json: -------------------------------------------------------------------------------- 1 | {"recorded_with": "betamax/0.4.1", "http_interactions": [{"request": {"uri": "https://api.github.com/repos/sigmavirus24/github3.py/releases/assets/37944", "method": "GET", "headers": {"Accept": ["application/octet-stream"], "Accept-Encoding": ["gzip, deflate"], "Connection": ["keep-alive"], "User-Agent": ["python-requests/2.5.3 CPython/2.7.9 Darwin/14.1.0"]}, "body": {"base64_string": "", "encoding": "utf-8"}}, "response": {"status": {"code": 302, "message": "Found"}, "url": "https://api.github.com/repos/sigmavirus24/github3.py/releases/assets/37944", "headers": {"access-control-allow-credentials": ["true"], "x-xss-protection": ["1; mode=block"], "vary": ["Accept-Encoding"], "location": ["https://s3.amazonaws.com/github-cloud/releases/3710711/365425c2-4e46-11e3-86fb-bb0d50a886e7.whl?response-content-disposition=attachment%3B%20filename%3Dgithub3.py-0.7.1-py2.py3-none-any.whl&response-content-type=application/octet-stream&AWSAccessKeyId=AKIAISTNZFOVBIJMK3TQ&Expires=1426166613&Signature=78anFgNgXLm3TIbo%2FbTEEk7m%2F34%3D"], "x-content-type-options": ["nosniff"], "content-security-policy": ["default-src 'none'"], "x-ratelimit-limit": ["60"], "status": ["302 Found"], "x-frame-options": ["deny"], "x-served-by": ["8dd185e423974a7e13abbbe6e060031e"], "server": ["GitHub.com"], "access-control-allow-origin": ["*"], "strict-transport-security": ["max-age=31536000; includeSubdomains; preload"], "x-github-request-id": ["48A0C951:54E7:48B5311:55019319"], "date": ["Thu, 12 Mar 2015 13:22:33 GMT"], "access-control-expose-headers": ["ETag, Link, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval"], "x-ratelimit-remaining": ["58"], "content-type": ["text/html;charset=utf-8"], "x-ratelimit-reset": ["1426170017"]}, "body": {"base64_string": "", "encoding": "utf-8"}}, "recorded_at": "2015-03-12T13:22:33"}, {"request": {"uri": "https://s3.amazonaws.com/github-cloud/releases/3710711/365425c2-4e46-11e3-86fb-bb0d50a886e7.whl?response-content-disposition=attachment%3B%20filename%3Dgithub3.py-0.7.1-py2.py3-none-any.whl&response-content-type=application/octet-stream&AWSAccessKeyId=AKIAISTNZFOVBIJMK3TQ&Expires=1426166613&Signature=78anFgNgXLm3TIbo%2FbTEEk7m%2F34%3D", "method": "GET", "headers": {"Accept": ["application/octet-stream"], "Accept-Encoding": ["gzip, deflate"], "Connection": ["keep-alive"], "User-Agent": ["python-requests/2.5.3 CPython/2.7.9 Darwin/14.1.0"]}, "body": {"base64_string": "", "encoding": "utf-8"}}, "response": {"status": {"code": 200, "message": "OK"}, "url": "https://s3.amazonaws.com/github-cloud/releases/3710711/365425c2-4e46-11e3-86fb-bb0d50a886e7.whl?response-content-disposition=attachment%3B%20filename%3Dgithub3.py-0.7.1-py2.py3-none-any.whl&response-content-type=application/octet-stream&AWSAccessKeyId=AKIAISTNZFOVBIJMK3TQ&Expires=1426166613&Signature=78anFgNgXLm3TIbo%2FbTEEk7m%2F34%3D", "headers": {"accept-ranges": ["bytes"], "content-disposition": ["attachment; filename=github3.py-0.7.1-py2.py3-none-any.whl"], "x-amz-id-2": ["9+TuHhbd7y2BUJaEV+mFpaDgjl1g9uSAPiZxwc6b2cYydhlhZSyKSuB7PQyiPBPD"], "x-amz-meta-surrogate-key": ["repository-3710711 user-240830"], "x-amz-request-id": ["4B4BFE6BF5135B8D"], "last-modified": ["Fri, 15 Nov 2013 22:35:23 GMT"], "x-amz-meta-surrogate-control": ["max-age=31557600"], "etag": ["\"6550854f02f7bf10b944070b84f38313\""], "date": ["Thu, 12 Mar 2015 13:22:35 GMT"], "cache-control": ["max-age=31557600"], "content-type": ["application/octet-stream"], "server": ["AmazonS3"]}, "body": {"base64_string": "", "encoding": null}}, "recorded_at": "2015-03-12T13:22:34"}]} 2 | -------------------------------------------------------------------------------- /tests/cassettes/test_x509_adapter_der.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"body": {"encoding": "utf-8", "string": ""}, "headers": {"User-Agent": ["python-requests/2.21.0"], "Accept-Encoding": ["gzip, deflate"], "Accept": ["*/*"], "Connection": ["keep-alive"]}, "method": "GET", "uri": "https://pkiprojecttest01.dev.labs.internal/"}, "response": {"body": {"encoding": "ISO-8859-1", "base64_string": "H4sIAAAAAAAAA7NRdPF3DokMcFXIKMnNseOygVJJ+SmVdlxArqFdSGpxiY0+kAHkFoB5CsGlycmpxcU2+gUgQX2IYqAasBEAYvDs5FMAAAA=", "string": ""}, "headers": {"Server": ["nginx/1.10.3 (Ubuntu)"], "Date": ["Thu, 20 Dec 2018 20:02:30 GMT"], "Content-Type": ["text/html"], "Last-Modified": ["Mon, 19 Nov 2018 20:48:30 GMT"], "Transfer-Encoding": ["chunked"], "Connection": ["keep-alive"], "ETag": ["W/\"5bf3219e-53\""], "Content-Encoding": ["gzip"]}, "status": {"code": 200, "message": "OK"}, "url": "https://pkiprojecttest01.dev.labs.internal/"}, "recorded_at": "2018-12-20T20:02:30"}], "recorded_with": "betamax/0.8.1"} -------------------------------------------------------------------------------- /tests/cassettes/test_x509_adapter_pem.json: -------------------------------------------------------------------------------- 1 | {"http_interactions": [{"request": {"body": {"encoding": "utf-8", "string": ""}, "headers": {"User-Agent": ["python-requests/2.21.0"], "Accept-Encoding": ["gzip, deflate"], "Accept": ["*/*"], "Connection": ["keep-alive"]}, "method": "GET", "uri": "https://pkiprojecttest01.dev.labs.internal/"}, "response": {"body": {"encoding": "ISO-8859-1", "base64_string": "H4sIAAAAAAAAA7NRdPF3DokMcFXIKMnNseOygVJJ+SmVdlxArqFdSGpxiY0+kAHkFoB5CsGlycmpxcU2+gUgQX2IYqAasBEAYvDs5FMAAAA=", "string": ""}, "headers": {"Server": ["nginx/1.10.3 (Ubuntu)"], "Date": ["Thu, 20 Dec 2018 20:02:30 GMT"], "Content-Type": ["text/html"], "Last-Modified": ["Mon, 19 Nov 2018 20:48:30 GMT"], "Transfer-Encoding": ["chunked"], "Connection": ["keep-alive"], "ETag": ["W/\"5bf3219e-53\""], "Content-Encoding": ["gzip"]}, "status": {"code": 200, "message": "OK"}, "url": "https://pkiprojecttest01.dev.labs.internal/"}, "recorded_at": "2018-12-20T20:02:30"}], "recorded_with": "betamax/0.8.1"} -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import sys 4 | 5 | import betamax 6 | 7 | sys.path.insert(0, '.') 8 | 9 | placeholders = { 10 | '': os.environ.get('IPADDR', '127.0.0.1'), 11 | } 12 | 13 | with betamax.Betamax.configure() as config: 14 | for placeholder, value in placeholders.items(): 15 | config.define_cassette_placeholder(placeholder, value) 16 | -------------------------------------------------------------------------------- /tests/test_auth.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import unittest 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | from requests_toolbelt.auth.guess import GuessAuth, GuessProxyAuth 10 | from . import get_betamax 11 | 12 | 13 | class TestGuessAuth(unittest.TestCase): 14 | def setUp(self): 15 | self.session = requests.Session() 16 | self.recorder = get_betamax(self.session) 17 | 18 | def cassette(self, name): 19 | return self.recorder.use_cassette( 20 | 'httpbin_guess_auth_' + name, 21 | match_requests_on=['method', 'uri', 'digest-auth'] 22 | ) 23 | 24 | def test_basic(self): 25 | with self.cassette('basic'): 26 | r = self.session.request( 27 | 'GET', 'http://httpbin.org/basic-auth/user/passwd', 28 | auth=GuessAuth('user', 'passwd')) 29 | 30 | assert r.json() == {'authenticated': True, 'user': 'user'} 31 | 32 | def test_digest(self): 33 | with self.cassette('digest'): 34 | r = self.session.request( 35 | 'GET', 'http://httpbin.org/digest-auth/auth/user/passwd', 36 | auth=GuessAuth('user', 'passwd')) 37 | 38 | assert r.json() == {'authenticated': True, 'user': 'user'} 39 | 40 | def test_no_auth(self): 41 | with self.cassette('none'): 42 | url = 'http://httpbin.org/get?a=1' 43 | r = self.session.request('GET', url, 44 | auth=GuessAuth('user', 'passwd')) 45 | 46 | j = r.json() 47 | assert j['args'] == {'a': '1'} 48 | assert j['url'] == url 49 | assert 'user' not in r.text 50 | assert 'passwd' not in r.text 51 | 52 | 53 | class TestGuessProxyAuth(unittest.TestCase): 54 | 55 | @mock.patch('requests_toolbelt.auth.http_proxy_digest.HTTPProxyDigestAuth.handle_407') 56 | def test_handle_407_header_digest(self, mock_handle_407): 57 | r = requests.Response() 58 | r.headers['Proxy-Authenticate'] = 'Digest nonce="d2b19757d3d656a283c99762cbd1097b", opaque="1c311ad1cc6e6183b83bc75f95a57893", realm="me@kennethreitz.com", qop=auth' 59 | 60 | guess_auth = GuessProxyAuth(None, None, "user", "passwd") 61 | guess_auth.handle_407(r) 62 | 63 | mock_handle_407.assert_called_with(r) 64 | 65 | @mock.patch('requests.auth.HTTPProxyAuth.__call__') 66 | @mock.patch('requests.cookies.extract_cookies_to_jar') 67 | def test_handle_407_header_basic(self, extract_cookies_to_jar, proxy_auth_call): 68 | req = mock.Mock() 69 | r = mock.Mock() 70 | r.headers = dict() 71 | r.request.copy.return_value = req 72 | 73 | proxy_auth_call.return_value = requests.Response() 74 | 75 | kwargs = {} 76 | r.headers['Proxy-Authenticate'] = 'Basic realm="Fake Realm"' 77 | guess_auth = GuessProxyAuth(None, None, "user", "passwd") 78 | guess_auth.handle_407(r, *kwargs) 79 | 80 | proxy_auth_call.assert_called_with(req) 81 | -------------------------------------------------------------------------------- /tests/test_auth_bearer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import unittest 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | from requests_toolbelt.auth.http_bearer import HTTPBearerAuth 10 | from . import get_betamax 11 | 12 | 13 | class TestBearerAuth(unittest.TestCase): 14 | def setUp(self): 15 | self.session = requests.Session() 16 | self.recorder = get_betamax(self.session) 17 | 18 | def cassette(self): 19 | return self.recorder.use_cassette( 20 | 'httpbin_bearer_auth', 21 | match_requests_on=['method', 'uri'] 22 | ) 23 | 24 | def test_bearer(self): 25 | with self.cassette(): 26 | r = self.session.request( 27 | 'GET', 'http://httpbin.org/bearer-auth/', 28 | auth=HTTPBearerAuth('eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c')) 29 | 30 | assert r.json() == {'authenticated': True, 'user': 'user'} 31 | -------------------------------------------------------------------------------- /tests/test_auth_handler.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from requests.auth import HTTPBasicAuth 3 | from requests_toolbelt.auth.handler import AuthHandler 4 | from requests_toolbelt.auth.handler import NullAuthStrategy 5 | 6 | 7 | def test_turns_tuples_into_basic_auth(): 8 | a = AuthHandler({'http://example.com': ('foo', 'bar')}) 9 | strategy = a.get_strategy_for('http://example.com') 10 | assert not isinstance(strategy, NullAuthStrategy) 11 | assert isinstance(strategy, HTTPBasicAuth) 12 | 13 | 14 | def test_uses_null_strategy_for_non_matching_domains(): 15 | a = AuthHandler({'http://api.example.com': ('foo', 'bar')}) 16 | strategy = a.get_strategy_for('http://example.com') 17 | assert isinstance(strategy, NullAuthStrategy) 18 | 19 | 20 | def test_normalizes_domain_keys(): 21 | a = AuthHandler({'https://API.github.COM': ('foo', 'bar')}) 22 | assert 'https://api.github.com' in a.strategies 23 | assert 'https://API.github.COM' not in a.strategies 24 | 25 | 26 | def test_can_add_new_strategies(): 27 | a = AuthHandler({'https://example.com': ('foo', 'bar')}) 28 | a.add_strategy('https://api.github.com', ('fiz', 'baz')) 29 | assert isinstance( 30 | a.get_strategy_for('https://api.github.com'), 31 | HTTPBasicAuth 32 | ) 33 | 34 | 35 | def test_prepares_auth_correctly(): 36 | # Set up our Session and AuthHandler 37 | auth = AuthHandler({ 38 | 'https://api.example.com': ('bar', 'baz'), 39 | 'https://httpbin.org': ('biz', 'fiz'), 40 | }) 41 | s = requests.Session() 42 | s.auth = auth 43 | # Set up a valid GET request to https://api.example.com/users 44 | r1 = requests.Request('GET', 'https://api.example.com/users') 45 | p1 = s.prepare_request(r1) 46 | assert p1.headers['Authorization'] == 'Basic YmFyOmJheg==' 47 | 48 | # Set up a valid POST request to https://httpbin.org/post 49 | r2 = requests.Request('POST', 'https://httpbin.org/post', data='foo') 50 | p2 = s.prepare_request(r2) 51 | assert p2.headers['Authorization'] == 'Basic Yml6OmZpeg==' 52 | 53 | # Set up an *invalid* OPTIONS request to http://api.example.com 54 | # NOTE(sigmavirus24): This is not because of the verb but instead because 55 | # it is the wrong URI scheme. 56 | r3 = requests.Request('OPTIONS', 'http://api.example.com/projects') 57 | p3 = s.prepare_request(r3) 58 | assert p3.headers.get('Authorization') is None 59 | -------------------------------------------------------------------------------- /tests/test_fingerprintadapter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import unittest 4 | 5 | from requests_toolbelt.adapters.fingerprint import FingerprintAdapter 6 | from . import get_betamax 7 | 8 | 9 | class TestFingerprintAdapter(unittest.TestCase): 10 | HTTP2BIN_FINGERPRINT = 'abf8683eeba8521ad2e8dc48e92a1cbea3ff8608f1417948fdad75d7b50eb264' 11 | 12 | def setUp(self): 13 | self.session = requests.Session() 14 | self.session.mount('https://http2bin.org', FingerprintAdapter(self.HTTP2BIN_FINGERPRINT)) 15 | self.recorder = get_betamax(self.session) 16 | 17 | def test_fingerprint(self): 18 | with self.recorder.use_cassette('http2bin_fingerprint'): 19 | r = self.session.get('https://http2bin.org/get') 20 | assert r.status_code == 200 21 | -------------------------------------------------------------------------------- /tests/test_forgetfulcookiejar.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import unittest 4 | 5 | from requests_toolbelt.cookies.forgetful import ForgetfulCookieJar 6 | from . import get_betamax 7 | 8 | 9 | class TestForgetfulCookieJar(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.session = requests.Session() 13 | self.session.cookies = ForgetfulCookieJar() 14 | self.recorder = get_betamax(self.session) 15 | 16 | def test_cookies_are_ignored(self): 17 | with self.recorder.use_cassette('http2bin_cookies'): 18 | url = 'https://httpbin.org/cookies/set' 19 | cookies = { 20 | 'cookie0': 'value0', 21 | } 22 | r = self.session.request( 23 | 'GET', url, 24 | params=cookies 25 | ) 26 | assert 'cookie0' not in self.session.cookies 27 | -------------------------------------------------------------------------------- /tests/test_formdata.py: -------------------------------------------------------------------------------- 1 | """Test module for requests_toolbelt.utils.formdata.""" 2 | try: 3 | from urllib.parse import parse_qs 4 | except ImportError: 5 | from urlparse import parse_qs 6 | 7 | from requests_toolbelt.utils.formdata import urlencode 8 | 9 | import pytest 10 | 11 | dict_query = { 12 | 'first_nested': { 13 | 'second_nested': { 14 | 'third_nested': { 15 | 'fourth0': 'fourth_value0', 16 | 'fourth1': 'fourth_value1', 17 | }, 18 | 'third0': 'third_value0', 19 | }, 20 | 'second0': 'second_value0', 21 | }, 22 | 'outter': 'outter_value', 23 | } 24 | 25 | list_query = [ 26 | ('first_nested', [ 27 | ('second_nested', [ 28 | ('third_nested', [ 29 | ('fourth0', 'fourth_value0'), 30 | ('fourth1', 'fourth_value1'), 31 | ]), 32 | ('third0', 'third_value0'), 33 | ]), 34 | ('second0', 'second_value0'), 35 | ]), 36 | ('outter', 'outter_value'), 37 | ] 38 | 39 | mixed_dict_query = { 40 | 'first_nested': { 41 | 'second_nested': [ 42 | ('third_nested', { 43 | 'fourth0': 'fourth_value0', 44 | 'fourth1': 'fourth_value1', 45 | }), 46 | ('third0', 'third_value0'), 47 | ], 48 | 'second0': 'second_value0', 49 | }, 50 | 'outter': 'outter_value', 51 | } 52 | 53 | expected_parsed_query = { 54 | 'first_nested[second0]': ['second_value0'], 55 | 'first_nested[second_nested][third0]': ['third_value0'], 56 | 'first_nested[second_nested][third_nested][fourth0]': ['fourth_value0'], 57 | 'first_nested[second_nested][third_nested][fourth1]': ['fourth_value1'], 58 | 'outter': ['outter_value'], 59 | } 60 | 61 | 62 | @pytest.mark.parametrize("query", [dict_query, list_query, mixed_dict_query]) 63 | def test_urlencode_flattens_nested_structures(query): 64 | """Show that when parsed, the structure is conveniently flat.""" 65 | parsed = parse_qs(urlencode(query)) 66 | 67 | assert parsed == expected_parsed_query 68 | 69 | 70 | def test_urlencode_catches_invalid_input(): 71 | """Show that queries are loosely validated.""" 72 | with pytest.raises(ValueError): 73 | urlencode(['fo']) 74 | 75 | with pytest.raises(ValueError): 76 | urlencode([('foo', 'bar', 'bogus')]) 77 | -------------------------------------------------------------------------------- /tests/test_host_header_ssl_adapter.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import requests 3 | 4 | from requests_toolbelt.adapters import host_header_ssl as hhssl 5 | 6 | 7 | @pytest.fixture 8 | def session(): 9 | """Create a session with our adapter mounted.""" 10 | session = requests.Session() 11 | session.mount('https://', hhssl.HostHeaderSSLAdapter()) 12 | 13 | 14 | @pytest.mark.skip 15 | class TestHostHeaderSSLAdapter(object): 16 | """Tests for our HostHeaderSNIAdapter.""" 17 | 18 | def test_ssladapter(self, session): 19 | # normal mode 20 | r = session.get('https://example.org') 21 | assert r.status_code == 200 22 | 23 | # accessing IP address directly 24 | r = session.get('https://93.184.216.34', 25 | headers={"Host": "example.org"}) 26 | assert r.status_code == 200 27 | 28 | # vHost 29 | r = session.get('https://93.184.216.34', 30 | headers={'Host': 'example.com'}) 31 | assert r.status_code == 200 32 | 33 | def test_stream(self): 34 | self.session.get('https://54.175.219.8/stream/20', 35 | headers={'Host': 'httpbin.org'}, 36 | stream=True) 37 | 38 | def test_case_insensitive_header(self): 39 | r = self.session.get('https://93.184.216.34', 40 | headers={'hOSt': 'example.org'}) 41 | assert r.status_code == 200 42 | 43 | def test_plain_requests(self): 44 | # test whether the reason for this adapter remains 45 | # (may be implemented into requests in the future) 46 | with pytest.raises(requests.exceptions.SSLError): 47 | requests.get(url='https://93.184.216.34', 48 | headers={'Host': 'example.org'}) 49 | -------------------------------------------------------------------------------- /tests/test_multipart_monitor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import math 3 | import unittest 4 | from requests_toolbelt.multipart.encoder import ( 5 | IDENTITY, MultipartEncoder, MultipartEncoderMonitor 6 | ) 7 | 8 | 9 | class TestMultipartEncoderMonitor(unittest.TestCase): 10 | def setUp(self): 11 | self.fields = {'a': 'b'} 12 | self.boundary = 'thisisaboundary' 13 | self.encoder = MultipartEncoder(self.fields, self.boundary) 14 | self.monitor = MultipartEncoderMonitor(self.encoder) 15 | 16 | def test_content_type(self): 17 | assert self.monitor.content_type == self.encoder.content_type 18 | 19 | def test_length(self): 20 | assert self.encoder.len == self.monitor.len 21 | 22 | def test_read(self): 23 | new_encoder = MultipartEncoder(self.fields, self.boundary) 24 | assert new_encoder.read() == self.monitor.read() 25 | 26 | def test_callback_called_when_reading_everything(self): 27 | callback = Callback(self.monitor) 28 | self.monitor.callback = callback 29 | self.monitor.read() 30 | assert callback.called == 1 31 | 32 | def test_callback(self): 33 | callback = Callback(self.monitor) 34 | self.monitor.callback = callback 35 | chunk_size = int(math.ceil(self.encoder.len / 4.0)) 36 | while self.monitor.read(chunk_size): 37 | pass 38 | assert callback.called == 5 39 | 40 | def test_bytes_read(self): 41 | bytes_to_read = self.encoder.len 42 | self.monitor.read() 43 | assert self.monitor.bytes_read == bytes_to_read 44 | 45 | def test_default_callable_is_the_identity(self): 46 | assert self.monitor.callback == IDENTITY 47 | assert IDENTITY(1) == 1 48 | 49 | def test_from_fields(self): 50 | monitor = MultipartEncoderMonitor.from_fields( 51 | self.fields, self.boundary 52 | ) 53 | assert isinstance(monitor, MultipartEncoderMonitor) 54 | assert isinstance(monitor.encoder, MultipartEncoder) 55 | assert monitor.encoder.boundary_value == self.boundary 56 | 57 | 58 | class Callback(object): 59 | def __init__(self, monitor): 60 | self.called = 0 61 | self.monitor = monitor 62 | 63 | def __call__(self, monitor): 64 | self.called += 1 65 | assert monitor == self.monitor 66 | -------------------------------------------------------------------------------- /tests/test_proxy_digest_auth.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Test proxy digest authentication.""" 3 | 4 | import unittest 5 | try: 6 | from unittest import mock 7 | except ImportError: 8 | import mock 9 | 10 | import requests 11 | from requests_toolbelt.auth import http_proxy_digest 12 | 13 | 14 | class TestProxyDigestAuth(unittest.TestCase): 15 | """Tests for the ProxyDigestAuth class.""" 16 | 17 | def setUp(self): 18 | """Set up variables for each test.""" 19 | self.username = "username" 20 | self.password = "password" 21 | self.auth = http_proxy_digest.HTTPProxyDigestAuth( 22 | self.username, self.password 23 | ) 24 | self.prepared_request = requests.Request( 25 | 'GET', 26 | 'http://host.org/index.html' 27 | ).prepare() 28 | 29 | def test_with_existing_nonce(self): 30 | """Test if it will generate Proxy-Auth header when nonce present. 31 | 32 | Digest authentication's correctness will not be tested here. 33 | """ 34 | self.auth.last_nonce = "bH3FVAAAAAAg74rL3X8AAI3CyBAAAAAA" 35 | self.auth.chal = { 36 | 'nonce': self.auth.last_nonce, 37 | 'realm': 'testrealm@host.org', 38 | 'qop': 'auth' 39 | } 40 | 41 | # prepared_request headers should be clear before calling auth 42 | assert self.prepared_request.headers.get('Proxy-Authorization') is None 43 | self.auth(self.prepared_request) 44 | assert self.prepared_request.headers['Proxy-Authorization'] is not None 45 | 46 | def test_no_challenge(self): 47 | """Test that a response containing no auth challenge is left alone.""" 48 | connection = MockConnection() 49 | first_response = connection.make_response(self.prepared_request) 50 | first_response.status_code = 404 51 | 52 | assert self.auth.last_nonce == '' 53 | final_response = self.auth.handle_407(first_response) 54 | headers = final_response.request.headers 55 | assert self.auth.last_nonce == '' 56 | assert first_response is final_response 57 | assert headers.get('Proxy-Authorization') is None 58 | 59 | def test_digest_challenge(self): 60 | """Test a response with a digest auth challenge causes a new request. 61 | 62 | This ensures that the auth class generates a new request with a 63 | Proxy-Authorization header. 64 | 65 | Digest authentication's correctness will not be tested here. 66 | """ 67 | connection = MockConnection() 68 | first_response = connection.make_response(self.prepared_request) 69 | first_response.status_code = 407 70 | first_response.headers['Proxy-Authenticate'] = ( 71 | 'Digest' 72 | ' realm="Fake Realm", nonce="oS6WVgAAAABw698CAAAAAHAk/HUAAAAA",' 73 | ' qop="auth", stale=false' 74 | ) 75 | 76 | assert self.auth.last_nonce == '' 77 | final_response = self.auth.handle_407(first_response) 78 | headers = final_response.request.headers 79 | assert self.auth.last_nonce != '' 80 | assert first_response is not final_response 81 | assert headers.get('Proxy-Authorization') is not None 82 | 83 | def test_ntlm_challenge(self): 84 | """Test a response without a Digest auth challenge is left alone.""" 85 | connection = MockConnection() 86 | first_response = connection.make_response(self.prepared_request) 87 | first_response.status_code = 407 88 | first_response.headers['Proxy-Authenticate'] = 'NTLM' 89 | 90 | assert self.auth.last_nonce == '' 91 | final_response = self.auth.handle_407(first_response) 92 | headers = final_response.request.headers 93 | assert self.auth.last_nonce == '' 94 | assert first_response is final_response 95 | assert headers.get('Proxy-Authorization') is None 96 | 97 | 98 | class MockConnection(object): 99 | """Fake connection object.""" 100 | 101 | def send(self, request, **kwargs): 102 | """Mock out the send method.""" 103 | return self.make_response(request) 104 | 105 | def make_response(self, request): 106 | """Make a response for us based on the request.""" 107 | response = requests.Response() 108 | response.status_code = 200 109 | response.request = request 110 | response.raw = mock.MagicMock() 111 | response.connection = self 112 | return response 113 | 114 | if __name__ == '__main__': 115 | unittest.main() 116 | -------------------------------------------------------------------------------- /tests/test_sessions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import unittest 3 | import pickle 4 | import pytest 5 | 6 | from requests_toolbelt import sessions 7 | from requests import Request 8 | from . import get_betamax 9 | 10 | 11 | class TestBasedSession(unittest.TestCase): 12 | def test_request_with_base(self): 13 | session = sessions.BaseUrlSession('https://httpbin.org/') 14 | recorder = get_betamax(session) 15 | with recorder.use_cassette('simple_get_request'): 16 | response = session.get('/get') 17 | response.raise_for_status() 18 | 19 | def test_request_without_base(self): 20 | session = sessions.BaseUrlSession() 21 | with pytest.raises(ValueError): 22 | session.get('/') 23 | 24 | def test_request_override_base(self): 25 | session = sessions.BaseUrlSession('https://www.google.com') 26 | recorder = get_betamax(session) 27 | with recorder.use_cassette('simple_get_request'): 28 | response = session.get('https://httpbin.org/get') 29 | response.raise_for_status() 30 | assert response.json()['headers']['Host'] == 'httpbin.org' 31 | 32 | def test_prepared_request_with_base(self): 33 | session = sessions.BaseUrlSession('https://httpbin.org') 34 | request = Request(method="GET", url="/get") 35 | prepared_request = session.prepare_request(request) 36 | recorder = get_betamax(session) 37 | with recorder.use_cassette('simple_get_request'): 38 | response = session.send(prepared_request) 39 | response.raise_for_status() 40 | 41 | def test_prepared_request_without_base(self): 42 | session = sessions.BaseUrlSession() 43 | request = Request(method="GET", url="/") 44 | with pytest.raises(ValueError): 45 | prepared_request = session.prepare_request(request) 46 | session.send(prepared_request) 47 | 48 | def test_prepared_request_override_base(self): 49 | session = sessions.BaseUrlSession('https://www.google.com') 50 | request = Request(method="GET", url="https://httpbin.org/get") 51 | prepared_request = session.prepare_request(request) 52 | recorder = get_betamax(session) 53 | with recorder.use_cassette('simple_get_request'): 54 | response = session.send(prepared_request) 55 | response.raise_for_status() 56 | assert response.json()['headers']['Host'] == 'httpbin.org' 57 | 58 | def test_pickle_unpickle_session(self): 59 | session = sessions.BaseUrlSession('https://www.google.com') 60 | pickled_session = pickle.dumps(session) 61 | unpickled_session = pickle.loads(pickled_session) 62 | assert session.base_url == unpickled_session.base_url 63 | -------------------------------------------------------------------------------- /tests/test_socket_options_adapter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Tests for the SocketOptionsAdapter and TCPKeepAliveAdapter.""" 3 | import contextlib 4 | import platform 5 | import socket 6 | import sys 7 | 8 | import pytest 9 | try: 10 | from unittest import mock 11 | except ImportError: 12 | import mock 13 | import requests 14 | from requests_toolbelt._compat import poolmanager 15 | 16 | from requests_toolbelt.adapters import socket_options 17 | 18 | 19 | @contextlib.contextmanager 20 | def remove_keepidle(): 21 | """A context manager to remove TCP_KEEPIDLE from socket.""" 22 | TCP_KEEPIDLE = getattr(socket, 'TCP_KEEPIDLE', None) 23 | if TCP_KEEPIDLE is not None: 24 | del socket.TCP_KEEPIDLE 25 | 26 | yield 27 | 28 | if TCP_KEEPIDLE is not None: 29 | socket.TCP_KEEPIDLE = TCP_KEEPIDLE 30 | 31 | 32 | @contextlib.contextmanager 33 | def set_keepidle(value): 34 | """A context manager to set TCP_KEEPALIVE on socket always.""" 35 | TCP_KEEPIDLE = getattr(socket, 'TCP_KEEPIDLE', None) 36 | socket.TCP_KEEPIDLE = value 37 | 38 | yield 39 | 40 | if TCP_KEEPIDLE is not None: 41 | socket.TCP_KEEPIDLE = TCP_KEEPIDLE 42 | else: 43 | del socket.TCP_KEEPIDLE 44 | 45 | 46 | @mock.patch.object(requests, '__build__', 0x020500) 47 | @mock.patch.object(poolmanager, 'PoolManager') 48 | def test_options_passing_on_newer_requests(PoolManager): 49 | """Show that options are passed for a new enough version of requests.""" 50 | fake_opts = [('test', 'options', 'fake')] 51 | adapter = socket_options.SocketOptionsAdapter( 52 | socket_options=fake_opts, 53 | pool_connections=10, 54 | pool_maxsize=5, 55 | pool_block=True, 56 | ) 57 | PoolManager.assert_called_once_with( 58 | num_pools=10, maxsize=5, block=True, 59 | socket_options=fake_opts 60 | ) 61 | assert adapter.socket_options == fake_opts 62 | 63 | 64 | @mock.patch.object(requests, '__build__', 0x020300) 65 | @mock.patch.object(poolmanager, 'PoolManager') 66 | def test_options_not_passed_on_older_requests(PoolManager): 67 | """Show that options are not passed for older versions of requests.""" 68 | fake_opts = [('test', 'options', 'fake')] 69 | socket_options.SocketOptionsAdapter( 70 | socket_options=fake_opts, 71 | pool_connections=10, 72 | pool_maxsize=5, 73 | pool_block=True, 74 | ) 75 | assert PoolManager.called is False 76 | 77 | 78 | @pytest.mark.xfail(sys.version_info.major == 2 and platform.system() == "Windows", 79 | reason="Windows does not have TCP_KEEPINTVL in Python 2") 80 | @mock.patch.object(requests, '__build__', 0x020500) 81 | @mock.patch.object(poolmanager, 'PoolManager') 82 | def test_keep_alive_on_newer_requests_no_idle(PoolManager): 83 | """Show that options are generated correctly from kwargs.""" 84 | socket_opts = [ 85 | (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1), 86 | (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), 87 | (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 10), 88 | (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 10), 89 | ] 90 | with remove_keepidle(): 91 | adapter = socket_options.TCPKeepAliveAdapter( 92 | idle=30, interval=10, count=10, 93 | pool_connections=10, 94 | pool_maxsize=5, 95 | pool_block=True, 96 | ) 97 | PoolManager.assert_called_once_with( 98 | num_pools=10, maxsize=5, block=True, 99 | socket_options=socket_opts 100 | ) 101 | assert adapter.socket_options == socket_opts 102 | 103 | 104 | @pytest.mark.xfail(sys.version_info.major == 2 and platform.system() == "Windows", 105 | reason="Windows does not have TCP_KEEPINTVL in Python 2") 106 | @mock.patch.object(requests, '__build__', 0x020500) 107 | @mock.patch.object(poolmanager, 'PoolManager') 108 | def test_keep_alive_on_newer_requests_with_idle(PoolManager): 109 | """Show that options are generated correctly from kwargs with KEEPIDLE.""" 110 | with set_keepidle(3000): 111 | socket_opts = [ 112 | (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1), 113 | (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), 114 | (socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 10), 115 | (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 10), 116 | (socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30), 117 | ] 118 | adapter = socket_options.TCPKeepAliveAdapter( 119 | idle=30, interval=10, count=10, 120 | pool_connections=10, 121 | pool_maxsize=5, 122 | pool_block=True, 123 | ) 124 | 125 | PoolManager.assert_called_once_with( 126 | num_pools=10, maxsize=5, block=True, 127 | socket_options=socket_opts 128 | ) 129 | assert adapter.socket_options == socket_opts 130 | -------------------------------------------------------------------------------- /tests/test_source_adapter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from requests.adapters import DEFAULT_POOLSIZE, DEFAULT_POOLBLOCK 3 | try: 4 | from unittest.mock import patch 5 | except ImportError: 6 | from mock import patch 7 | from requests_toolbelt.adapters.source import SourceAddressAdapter 8 | 9 | import pytest 10 | 11 | 12 | @patch('requests_toolbelt.adapters.source.poolmanager') 13 | def test_source_address_adapter_string(poolmanager): 14 | SourceAddressAdapter('10.10.10.10') 15 | 16 | poolmanager.PoolManager.assert_called_once_with( 17 | num_pools=DEFAULT_POOLSIZE, 18 | maxsize=DEFAULT_POOLSIZE, 19 | block=DEFAULT_POOLBLOCK, 20 | source_address=('10.10.10.10', 0) 21 | ) 22 | 23 | 24 | @patch('requests_toolbelt.adapters.source.poolmanager') 25 | def test_source_address_adapter_tuple(poolmanager): 26 | SourceAddressAdapter(('10.10.10.10', 80)) 27 | 28 | poolmanager.PoolManager.assert_called_once_with( 29 | num_pools=DEFAULT_POOLSIZE, 30 | maxsize=DEFAULT_POOLSIZE, 31 | block=DEFAULT_POOLBLOCK, 32 | source_address=('10.10.10.10', 80) 33 | ) 34 | 35 | 36 | @patch('requests_toolbelt.adapters.source.poolmanager') 37 | def test_source_address_adapter_type_error(poolmanager): 38 | with pytest.raises(TypeError): 39 | SourceAddressAdapter({'10.10.10.10': 80}) 40 | 41 | assert not poolmanager.PoolManager.called 42 | -------------------------------------------------------------------------------- /tests/test_ssladapter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | try: 3 | from unittest import mock 4 | except ImportError: 5 | import mock 6 | import pytest 7 | import requests 8 | import unittest 9 | 10 | from requests_toolbelt import SSLAdapter 11 | from . import get_betamax 12 | 13 | 14 | class TestSSLAdapter(unittest.TestCase): 15 | def setUp(self): 16 | self.session = requests.Session() 17 | self.session.mount('https://', SSLAdapter('SSLv3')) 18 | self.recorder = get_betamax(self.session) 19 | 20 | def test_klevas(self): 21 | with self.recorder.use_cassette('klevas_vu_lt_ssl3'): 22 | r = self.session.get('https://klevas.vu.lt/') 23 | assert r.status_code == 200 24 | 25 | @pytest.mark.skipif(requests.__build__ < 0x020400, 26 | reason="Requires Requests v2.4.0 or later") 27 | @mock.patch('requests.packages.urllib3.poolmanager.ProxyManager') 28 | def test_proxies(self, ProxyManager): 29 | a = SSLAdapter('SSLv3') 30 | a.proxy_manager_for('http://127.0.0.1:8888') 31 | 32 | assert ProxyManager.call_count == 1 33 | kwargs = ProxyManager.call_args_list[0][1] 34 | assert kwargs['ssl_version'] == 'SSLv3' 35 | -------------------------------------------------------------------------------- /tests/test_streaming_iterator.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | from requests_toolbelt.streaming_iterator import StreamingIterator 4 | 5 | import pytest 6 | 7 | @pytest.fixture(params=[True, False]) 8 | def get_iterable(request): 9 | ''' 10 | When this fixture is used, the test is run twice -- once with the iterable 11 | being a file-like object, once being an iterator. 12 | ''' 13 | is_file = request.param 14 | def inner(chunks): 15 | if is_file: 16 | return io.BytesIO(b''.join(chunks)) 17 | return iter(chunks) 18 | return inner 19 | 20 | 21 | class TestStreamingIterator(object): 22 | @pytest.fixture(autouse=True) 23 | def setup(self, get_iterable): 24 | self.chunks = [b'here', b'are', b'some', b'chunks'] 25 | self.size = 17 26 | self.uploader = StreamingIterator(self.size, get_iterable(self.chunks)) 27 | 28 | def test_read_returns_all_chunks_in_one(self): 29 | assert self.uploader.read() == b''.join(self.chunks) 30 | 31 | def test_read_returns_empty_string_after_exhausting_the_iterator(self): 32 | for i in range(0, 4): 33 | self.uploader.read(8192) 34 | 35 | assert self.uploader.read() == b'' 36 | assert self.uploader.read(8192) == b'' 37 | 38 | 39 | class TestStreamingIteratorWithLargeChunks(object): 40 | @pytest.fixture(autouse=True) 41 | def setup(self, get_iterable): 42 | self.letters = [b'a', b'b', b'c', b'd', b'e'] 43 | self.chunks = (letter * 2000 for letter in self.letters) 44 | self.size = 5 * 2000 45 | self.uploader = StreamingIterator(self.size, get_iterable(self.chunks)) 46 | 47 | def test_returns_the_amount_requested(self): 48 | chunk_size = 1000 49 | bytes_read = 0 50 | while True: 51 | b = self.uploader.read(chunk_size) 52 | if not b: 53 | break 54 | assert len(b) == chunk_size 55 | bytes_read += len(b) 56 | 57 | assert bytes_read == self.size 58 | 59 | def test_returns_all_of_the_bytes(self): 60 | chunk_size = 8192 61 | bytes_read = 0 62 | while True: 63 | b = self.uploader.read(chunk_size) 64 | if not b: 65 | break 66 | bytes_read += len(b) 67 | 68 | assert bytes_read == self.size 69 | -------------------------------------------------------------------------------- /tests/test_user_agent.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import unittest 3 | import sys 4 | 5 | try: 6 | from unittest.mock import patch 7 | except ImportError: 8 | from mock import patch 9 | import pytest 10 | 11 | from requests_toolbelt.utils import user_agent as ua 12 | 13 | 14 | class Object(object): 15 | """ 16 | A simple mock object that can have attributes added to it. 17 | """ 18 | pass 19 | 20 | 21 | class TestUserAgentBuilder(unittest.TestCase): 22 | def test_only_user_agent_name(self): 23 | assert 'fake/1.0.0' == ua.UserAgentBuilder('fake', '1.0.0').build() 24 | 25 | def test_includes_extras(self): 26 | expected = 'fake/1.0.0 another-fake/2.0.1 yet-another-fake/17.1.0' 27 | actual = ua.UserAgentBuilder('fake', '1.0.0').include_extras([ 28 | ('another-fake', '2.0.1'), 29 | ('yet-another-fake', '17.1.0'), 30 | ]).build() 31 | assert expected == actual 32 | 33 | @patch('platform.python_implementation', return_value='CPython') 34 | @patch('platform.python_version', return_value='2.7.13') 35 | def test_include_implementation(self, *_): 36 | expected = 'fake/1.0.0 CPython/2.7.13' 37 | actual = ua.UserAgentBuilder('fake', '1.0.0').include_implementation( 38 | ).build() 39 | assert expected == actual 40 | 41 | @patch('platform.system', return_value='Linux') 42 | @patch('platform.release', return_value='4.9.5') 43 | def test_include_system(self, *_): 44 | expected = 'fake/1.0.0 Linux/4.9.5' 45 | actual = ua.UserAgentBuilder('fake', '1.0.0').include_system( 46 | ).build() 47 | assert expected == actual 48 | 49 | 50 | class TestUserAgent(unittest.TestCase): 51 | def test_user_agent_provides_package_name(self): 52 | assert "my-package" in ua.user_agent("my-package", "0.0.1") 53 | 54 | def test_user_agent_provides_package_version(self): 55 | assert "0.0.1" in ua.user_agent("my-package", "0.0.1") 56 | 57 | def test_user_agent_builds_extras_appropriately(self): 58 | assert "extra/1.0.0" in ua.user_agent( 59 | "my-package", "0.0.1", extras=[("extra", "1.0.0")] 60 | ) 61 | 62 | def test_user_agent_checks_extras_for_tuples_of_incorrect_length(self): 63 | with pytest.raises(ValueError): 64 | ua.user_agent("my-package", "0.0.1", extras=[ 65 | ("extra", "1.0.0", "oops") 66 | ]) 67 | 68 | with pytest.raises(ValueError): 69 | ua.user_agent("my-package", "0.0.1", extras=[ 70 | ("extra",) 71 | ]) 72 | 73 | 74 | class TestImplementationString(unittest.TestCase): 75 | @patch('platform.python_implementation') 76 | @patch('platform.python_version') 77 | def test_cpython_implementation(self, mock_version, mock_implementation): 78 | mock_implementation.return_value = 'CPython' 79 | mock_version.return_value = '2.7.5' 80 | assert 'CPython/2.7.5' == ua._implementation_string() 81 | 82 | @patch('platform.python_implementation') 83 | def test_pypy_implementation_final(self, mock_implementation): 84 | mock_implementation.return_value = 'PyPy' 85 | sys.pypy_version_info = Object() 86 | sys.pypy_version_info.major = 2 87 | sys.pypy_version_info.minor = 0 88 | sys.pypy_version_info.micro = 1 89 | sys.pypy_version_info.releaselevel = 'final' 90 | 91 | assert 'PyPy/2.0.1' == ua._implementation_string() 92 | 93 | @patch('platform.python_implementation') 94 | def test_pypy_implementation_non_final(self, mock_implementation): 95 | mock_implementation.return_value = 'PyPy' 96 | sys.pypy_version_info = Object() 97 | sys.pypy_version_info.major = 2 98 | sys.pypy_version_info.minor = 0 99 | sys.pypy_version_info.micro = 1 100 | sys.pypy_version_info.releaselevel = 'beta2' 101 | 102 | assert 'PyPy/2.0.1beta2' == ua._implementation_string() 103 | 104 | @patch('platform.python_implementation') 105 | def test_unknown_implementation(self, mock_implementation): 106 | mock_implementation.return_value = "Lukasa'sSuperPython" 107 | 108 | assert "Lukasa'sSuperPython/Unknown" == ua._implementation_string() 109 | -------------------------------------------------------------------------------- /tests/test_x509_adapter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import unittest 4 | import pytest 5 | 6 | try: 7 | import OpenSSL 8 | except ImportError: 9 | PYOPENSSL_AVAILABLE = False 10 | else: 11 | PYOPENSSL_AVAILABLE = True 12 | from requests_toolbelt.adapters.x509 import X509Adapter 13 | from cryptography import x509 14 | from cryptography.hazmat.primitives.serialization import ( 15 | Encoding, 16 | PrivateFormat, 17 | BestAvailableEncryption, 18 | load_pem_private_key, 19 | ) 20 | import trustme 21 | 22 | from requests_toolbelt import exceptions as exc 23 | from . import get_betamax 24 | 25 | REQUESTS_SUPPORTS_SSL_CONTEXT = requests.__build__ >= 0x021200 26 | 27 | pytestmark = pytest.mark.filterwarnings( 28 | "ignore:'urllib3.contrib.pyopenssl' module is deprecated:DeprecationWarning") 29 | 30 | 31 | class TestX509Adapter(unittest.TestCase): 32 | """Tests a simple requests.get() call using a .p12 cert. 33 | """ 34 | def setUp(self): 35 | self.pkcs12_password_bytes = "test".encode('utf8') 36 | self.session = requests.Session() 37 | 38 | @pytest.mark.skipif(not REQUESTS_SUPPORTS_SSL_CONTEXT, 39 | reason="Requires Requests v2.12.0 or later") 40 | @pytest.mark.skipif(not PYOPENSSL_AVAILABLE, 41 | reason="Requires OpenSSL") 42 | def test_x509_pem(self): 43 | ca = trustme.CA() 44 | cert = ca.issue_cert(u'pkiprojecttest01.dev.labs.internal') 45 | cert_bytes = cert.cert_chain_pems[0].bytes() 46 | pk_bytes = cert.private_key_pem.bytes() 47 | 48 | adapter = X509Adapter(max_retries=3, cert_bytes=cert_bytes, pk_bytes=pk_bytes) 49 | self.session.mount('https://', adapter) 50 | recorder = get_betamax(self.session) 51 | with recorder.use_cassette('test_x509_adapter_pem'): 52 | r = self.session.get('https://pkiprojecttest01.dev.labs.internal/', verify=False) 53 | 54 | assert r.status_code == 200 55 | assert r.text 56 | 57 | @pytest.mark.skipif(not REQUESTS_SUPPORTS_SSL_CONTEXT, 58 | reason="Requires Requests v2.12.0 or later") 59 | @pytest.mark.skipif(not PYOPENSSL_AVAILABLE, 60 | reason="Requires OpenSSL") 61 | def test_x509_der_and_password(self): 62 | ca = trustme.CA() 63 | cert = ca.issue_cert(u'pkiprojecttest01.dev.labs.internal') 64 | cert_bytes = x509.load_pem_x509_certificate( 65 | cert.cert_chain_pems[0].bytes()).public_bytes(Encoding.DER) 66 | pem_pk = load_pem_private_key(cert.private_key_pem.bytes(), password=None) 67 | pk_bytes = pem_pk.private_bytes(Encoding.DER, PrivateFormat.PKCS8, 68 | BestAvailableEncryption(self.pkcs12_password_bytes)) 69 | 70 | adapter = X509Adapter(max_retries=3, cert_bytes=cert_bytes, pk_bytes=pk_bytes, 71 | password=self.pkcs12_password_bytes, encoding=Encoding.DER) 72 | self.session.mount('https://', adapter) 73 | recorder = get_betamax(self.session) 74 | with recorder.use_cassette('test_x509_adapter_der'): 75 | r = self.session.get('https://pkiprojecttest01.dev.labs.internal/', verify=False) 76 | 77 | assert r.status_code == 200 78 | assert r.text 79 | 80 | @pytest.mark.skipif(REQUESTS_SUPPORTS_SSL_CONTEXT, reason="Will not raise exc") 81 | def test_requires_new_enough_requests(self): 82 | with pytest.raises(exc.VersionMismatchError): 83 | X509Adapter() 84 | -------------------------------------------------------------------------------- /tests/threaded/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/requests/toolbelt/bcd5f7be229e14089052be7e3b527ebcea0ae7b8/tests/threaded/__init__.py -------------------------------------------------------------------------------- /tests/threaded/test_api.py: -------------------------------------------------------------------------------- 1 | """Module containing tests for requests_toolbelt.threaded API.""" 2 | 3 | try: 4 | from unittest import mock 5 | except ImportError: 6 | import mock 7 | import pytest 8 | 9 | from requests_toolbelt._compat import queue 10 | from requests_toolbelt import threaded 11 | 12 | 13 | def test_creates_a_pool_for_the_user(): 14 | """Assert a Pool object is used correctly and as we expect. 15 | 16 | This just ensures that we're not jumping through any extra hoops with our 17 | internal usage of a Pool object. 18 | """ 19 | mocked_pool = mock.Mock(spec=['join_all', 'responses', 'exceptions']) 20 | with mock.patch('requests_toolbelt.threaded.pool.Pool') as Pool: 21 | Pool.return_value = mocked_pool 22 | threaded.map([{}, {}]) 23 | 24 | assert Pool.called is True 25 | _, kwargs = Pool.call_args 26 | assert 'job_queue' in kwargs 27 | assert isinstance(kwargs['job_queue'], queue.Queue) 28 | mocked_pool.join_all.assert_called_once_with() 29 | mocked_pool.responses.assert_called_once_with() 30 | mocked_pool.exceptions.assert_called_once_with() 31 | 32 | 33 | def test_raises_a_value_error_for_non_dictionaries(): 34 | """Exercise our lazy valdation.""" 35 | with pytest.raises(ValueError): 36 | threaded.map([[], []]) 37 | 38 | 39 | def test_raises_a_value_error_for_falsey_requests(): 40 | """Assert that the requests param is truthy.""" 41 | with pytest.raises(ValueError): 42 | threaded.map([]) 43 | 44 | with pytest.raises(ValueError): 45 | threaded.map(None) 46 | 47 | 48 | def test_passes_on_kwargs(): 49 | """Verify that we pass on kwargs to the Pool constructor.""" 50 | mocked_pool = mock.Mock(spec=['join_all', 'responses', 'exceptions']) 51 | with mock.patch('requests_toolbelt.threaded.pool.Pool') as Pool: 52 | Pool.return_value = mocked_pool 53 | threaded.map([{}, {}], num_processes=1000, 54 | initializer=test_passes_on_kwargs) 55 | 56 | _, kwargs = Pool.call_args 57 | assert 'job_queue' in kwargs 58 | assert 'num_processes' in kwargs 59 | assert 'initializer' in kwargs 60 | 61 | assert kwargs['num_processes'] == 1000 62 | assert kwargs['initializer'] == test_passes_on_kwargs 63 | -------------------------------------------------------------------------------- /tests/threaded/test_thread.py: -------------------------------------------------------------------------------- 1 | """Module containing the tests for requests_toolbelt.threaded.thread.""" 2 | try: 3 | import queue # Python 3 4 | except ImportError: 5 | import Queue as queue 6 | import threading 7 | import unittest 8 | import uuid 9 | 10 | try: 11 | from unittest import mock 12 | except ImportError: 13 | import mock 14 | import requests.exceptions 15 | 16 | from requests_toolbelt.threaded import thread 17 | 18 | 19 | def _make_mocks(): 20 | return (mock.MagicMock() for _ in range(4)) 21 | 22 | 23 | def _initialize_a_session_thread(session=None, job_queue=None, 24 | response_queue=None, exception_queue=None): 25 | if job_queue is None: 26 | job_queue = queue.Queue() 27 | with mock.patch.object(threading, 'Thread') as Thread: 28 | thread_instance = mock.MagicMock() 29 | Thread.return_value = thread_instance 30 | st = thread.SessionThread( 31 | initialized_session=session, 32 | job_queue=job_queue, 33 | response_queue=response_queue, 34 | exception_queue=exception_queue, 35 | ) 36 | 37 | return (st, thread_instance, Thread) 38 | 39 | 40 | class TestSessionThread(unittest.TestCase): 41 | 42 | """Tests for requests_toolbelt.threaded.thread.SessionThread.""" 43 | 44 | def test_thread_initialization(self): 45 | """Test the way a SessionThread is initialized. 46 | 47 | We want to ensure that we creat a thread with a name generated by the 48 | uuid module, and that we pass the right method to use as a target. 49 | """ 50 | with mock.patch.object(uuid, 'uuid4', return_value='test'): 51 | (st, thread_instance, Thread) = _initialize_a_session_thread() 52 | 53 | Thread.assert_called_once_with(target=st._make_request, name='test') 54 | assert thread_instance.daemon is True 55 | assert thread_instance._state is 0 56 | thread_instance.start.assert_called_once_with() 57 | 58 | def test_is_alive_proxies_to_worker(self): 59 | """Test that we proxy the is_alive method to the Thread.""" 60 | job_queue = queue.Queue() 61 | with mock.patch.object(threading, 'Thread') as Thread: 62 | thread_instance = mock.MagicMock() 63 | Thread.return_value = thread_instance 64 | st = thread.SessionThread(None, job_queue, None, None) 65 | 66 | st.is_alive() 67 | thread_instance.is_alive.assert_called_once_with() 68 | 69 | def test_join_proxies_to_worker(self): 70 | """Test that we proxy the join method to the Thread.""" 71 | st, thread_instance, _ = _initialize_a_session_thread() 72 | 73 | st.join() 74 | thread_instance.join.assert_called_once_with() 75 | 76 | def test_handle_valid_request(self): 77 | """Test that a response is added to the right queue.""" 78 | session, job_queue, response_queue, exception_queue = _make_mocks() 79 | response = mock.MagicMock() 80 | session.request.return_value = response 81 | 82 | st, _, _ = _initialize_a_session_thread( 83 | session, job_queue, response_queue, exception_queue) 84 | 85 | st._handle_request({'method': 'GET', 'url': 'http://example.com'}) 86 | session.request.assert_called_once_with( 87 | method='GET', 88 | url='http://example.com' 89 | ) 90 | 91 | response_queue.put.assert_called_once_with( 92 | ({'method': 'GET', 'url': 'http://example.com'}, response) 93 | ) 94 | assert exception_queue.put.called is False 95 | assert job_queue.get.called is False 96 | assert job_queue.get_nowait.called is False 97 | assert job_queue.get_nowait.called is False 98 | assert job_queue.task_done.called is True 99 | 100 | def test_handle_invalid_request(self): 101 | """Test that exceptions from requests are added to the right queue.""" 102 | session, job_queue, response_queue, exception_queue = _make_mocks() 103 | exception = requests.exceptions.InvalidURL() 104 | 105 | def _side_effect(*args, **kwargs): 106 | raise exception 107 | 108 | # Make the request raise an exception 109 | session.request.side_effect = _side_effect 110 | 111 | st, _, _ = _initialize_a_session_thread( 112 | session, job_queue, response_queue, exception_queue) 113 | 114 | st._handle_request({'method': 'GET', 'url': 'http://example.com'}) 115 | session.request.assert_called_once_with( 116 | method='GET', 117 | url='http://example.com' 118 | ) 119 | 120 | exception_queue.put.assert_called_once_with( 121 | ({'method': 'GET', 'url': 'http://example.com'}, exception) 122 | ) 123 | assert response_queue.put.called is False 124 | assert job_queue.get.called is False 125 | assert job_queue.get_nowait.called is False 126 | assert job_queue.get_nowait.called is False 127 | assert job_queue.task_done.called is True 128 | 129 | def test_make_request(self): 130 | """Test that _make_request exits when the queue is Empty.""" 131 | job_queue = next(_make_mocks()) 132 | job_queue.get_nowait.side_effect = queue.Empty() 133 | 134 | st, _, _ = _initialize_a_session_thread(job_queue=job_queue) 135 | st._make_request() 136 | 137 | job_queue.get_nowait.assert_called_once_with() 138 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{27,37,38,39,310,311,py,py3},py{27,37}-flake8,noopenssl,docstrings 3 | 4 | [gh-actions] 5 | python = 6 | 2.7: py27 7 | 3.7: py37, py37-flake8, noopenssl 8 | 3.8: py38 9 | 3.9: py39 10 | 3.10: py310 11 | 3.11: py311 12 | 13 | [testenv] 14 | pip_pre = False 15 | deps = 16 | requests{env:REQUESTS_VERSION:>=2.0.1,<3.0.0} 17 | pytest 18 | mock;python_version<"3.3" 19 | pyopenssl 20 | ndg-httpsclient 21 | betamax>0.5.0 22 | trustme 23 | commands = 24 | pytest -W error::DeprecationWarning {posargs} 25 | 26 | [testenv:noopenssl] 27 | basepython = python3.7 28 | pip_pre = False 29 | deps = 30 | requests{env:REQUESTS_VERSION:>=2.0.1,<3.0.0} 31 | pytest 32 | mock;python_version<"3.3" 33 | betamax>0.5.0 34 | commands = 35 | pytest -W error::DeprecationWarning {posargs} 36 | 37 | [testenv:py27-flake8] 38 | basepython = python2.7 39 | deps = 40 | flake8 41 | commands = flake8 {posargs} requests_toolbelt 42 | 43 | [testenv:py37-flake8] 44 | basepython = python3.7 45 | deps = 46 | flake8 47 | commands = flake8 {posargs} requests_toolbelt 48 | 49 | [testenv:docstrings] 50 | deps = 51 | flake8 52 | flake8-docstrings 53 | commands = flake8 {posargs} requests_toolbelt 54 | 55 | [testenv:docs] 56 | deps = 57 | -rdocs/requirements.txt 58 | . 59 | commands = 60 | sphinx-build -E -c docs -b html docs/ docs/_build/html 61 | 62 | [testenv:readme] 63 | deps = 64 | readme_renderer 65 | commands = 66 | python setup.py check -m -r -s 67 | 68 | [testenv:release] 69 | deps = 70 | twine >= 1.4.0 71 | wheel 72 | commands = 73 | python setup.py sdist bdist_wheel 74 | twine upload --skip-existing dist/* 75 | 76 | [pytest] 77 | addopts = -q 78 | norecursedirs = *.egg .git .* _* 79 | xfail_strict = true 80 | --------------------------------------------------------------------------------