├── .gitignore ├── .pre-commit-config.yaml ├── .pyup.yml ├── .travis.yml ├── AUTHORS.md ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── bootstrap.sh ├── curequests ├── __about__.py ├── __init__.py ├── adapters.py ├── api.py ├── connection_pool.py ├── cuhttp.py ├── future.py ├── models.py ├── network.py ├── resource_pool.py ├── sessions.py └── utils.py ├── invoke.yaml ├── requirements.txt ├── setup.cfg ├── setup.py ├── tasks.py ├── tests ├── conftest.py ├── test_api.py ├── test_connection_pool.py ├── test_redirect.py ├── test_resource_pool.py ├── test_response.py ├── test_upload.py ├── test_verify_cert.py ├── upload.txt └── utils.py └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *.cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # Jupyter Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # SageMath parsed files 79 | *.sage.py 80 | 81 | # Environments 82 | .env 83 | .venv 84 | env/ 85 | venv/ 86 | ENV/ 87 | 88 | # Spyder project settings 89 | .spyderproject 90 | .spyproject 91 | 92 | # Rope project settings 93 | .ropeproject 94 | 95 | # mkdocs documentation 96 | /site 97 | 98 | # mypy 99 | .mypy_cache/ 100 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: git://github.com/pre-commit/pre-commit-hooks 3 | sha: v1.1.1 4 | hooks: 5 | - id: trailing-whitespace 6 | files: \.(py|pyx|sh|md|txt|in|ini|json|yaml|yml)$ 7 | - id: end-of-file-fixer 8 | files: \.(py|pyx|sh|md|txt|in|ini|json|yaml|yml)$ 9 | - id: check-byte-order-marker 10 | files: \.(py|pyx)$ 11 | - id: check-case-conflict 12 | - id: check-added-large-files 13 | args: 14 | - '--maxkb=2000' 15 | - id: check-merge-conflict 16 | - id: check-symlinks 17 | - id: check-json 18 | - id: check-yaml 19 | - id: debug-statements 20 | - id: check-docstring-first 21 | files: \.(py|pyx)$ 22 | - id: double-quote-string-fixer 23 | files: \.(py|pyx)$ 24 | - id: fix-encoding-pragma 25 | files: \.(py|pyx)$ 26 | args: 27 | - '--remove' 28 | - id: flake8 29 | args: 30 | - '--max-line-length=119' 31 | - '--ignore=E722,W504' 32 | -------------------------------------------------------------------------------- /.pyup.yml: -------------------------------------------------------------------------------- 1 | schedule: "every month" 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "3.6" 5 | - "3.7" 6 | - "nightly" 7 | 8 | install: 9 | - ./bootstrap.sh 10 | - pip freeze 11 | 12 | script: 13 | - inv lint 14 | - inv test --cov 15 | 16 | after_success: 17 | - codecov 18 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | CuRequests is written and maintained by Kenneth Reitz and 2 | various contributors: 3 | 4 | # The Maintainers 5 | 6 | - guyskk / https://github.com/guyskk 7 | 8 | ## Patches and Suggestions 9 | 10 | - Your name :) 11 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 5 | and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). 6 | 7 | ## [0.4.0] - 2017-12-09 8 | ### Added 9 | - Support Redirection and History 10 | 11 | ## [0.3.0] - 2017-11-27 12 | ### Added 13 | - Support POST Multiple Multipart-Encoded Files #8 14 | 15 | ## [0.2.0] - 2017-11-22 16 | ### Added 17 | - Support HTTP Proxy #6 18 | 19 | ## [0.1.0] - 2017-11-12 20 | ### Added 21 | - Connection pool #5 22 | - Support SSL CA Certificates and Client Side Certificates 23 | - Support keep-alive 24 | 25 | ### Fixed 26 | - A bunch of bugs 27 | 28 | ## [0.0.1] - 2017-10-29 29 | ### Added 30 | - The first release of CuRequests 31 | 32 | ## [0.0.0] - 2017-10-01 33 | ### Added 34 | - The idea of CuRequests 35 | - Some basic features 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 guyskk 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md LICENSE 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Curio + Requests: Async HTTP for Humans 2 | 3 | [![PyPI](https://img.shields.io/pypi/pyversions/curequests.svg)](https://pypi.python.org/pypi/curequests) 4 | [![travis-ci](https://api.travis-ci.org/guyskk/curequests.svg?branch=master)](https://travis-ci.org/guyskk/curequests) [![codecov](https://codecov.io/gh/guyskk/curequests/branch/master/graph/badge.svg)](https://codecov.io/gh/guyskk/curequests) 5 | > The same taste as Requests! 6 | 7 | ## Overview 8 | 9 | ```python 10 | from curio import run 11 | from curequests import get, post 12 | 13 | async def main(): 14 | r = await get('https://httpbin.org/get') 15 | print(r.json()) 16 | r = await post('https://httpbin.org/post', json={'hello': 'world'}) 17 | print(r.json()) 18 | 19 | run(main) 20 | ``` 21 | 22 | ## Install 23 | 24 | Python 3.6+ is required. 25 | 26 | ```bash 27 | pip install curequests 28 | ``` 29 | 30 | ## Features 31 | 32 | Follow http://docs.python-requests.org/en/master/#the-user-guide 33 | 34 | > Work in progress, Not production ready! 35 | 36 | ### Quickstart 37 | 38 | - [x] Make a Request 39 | - [x] Passing Parameters In URLs 40 | - [x] Response Content 41 | - [x] Binary Response Content 42 | - [x] JSON Response Content 43 | - [x] Custom Headers 44 | - [x] POST a Multipart-Encoded File 45 | - [x] Response Status Codes 46 | - [x] Response Headers 47 | - [x] Cookies 48 | - [x] Redirection and History 49 | - [x] Timeouts 50 | - [x] Errors and Exceptions 51 | 52 | ### Advanced Usage 53 | 54 | - [x] Session Objects [CuSession] 55 | - [x] Request and Response Objects [CuResponse] 56 | - [x] Prepared Requests [CuRequest, CuPreparedRequest] 57 | - [x] SSL Cert Verification 58 | - [x] Client Side Certificates 59 | - [x] CA Certificates 60 | - [x] Body Content Workflow 61 | - [x] Keep-Alive 62 | - [x] Streaming Uploads 63 | - [x] Chunk-Encoded Requests [Generator / Async Generator] 64 | - [x] POST Multiple Multipart-Encoded Files 65 | - [x] Event Hooks 66 | - [x] Custom Authentication 67 | - [x] Streaming Requests [Async Generator] 68 | - [x] Proxies [HTTP&HTTPS, not support SOCKS currently] 69 | - [x] Compliance 70 | - [x] HTTP Verbs 71 | - [x] Custom Verbs 72 | - [x] Link Headers 73 | - [x] Transport Adapters [CuHTTPAdapter] 74 | - [x] Blocking Or Non-Blocking? 75 | - [x] Header Ordering 76 | - [x] Timeouts 77 | - [x] Authentication 78 | 79 | ### How to Contribute 80 | 81 | 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a *Contributor Friendly* tag for issues that should be ideal for people who are not very familiar with the codebase yet. 82 | 2. Fork the repository on GitHub to start making your changes to the **master** branch (or branch off of it). 83 | 3. Write a test which shows that the bug was fixed or that the feature works as expected. 84 | 4. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to *AUTHORS.md*. 85 | 86 | ### Similar projects 87 | 88 | - https://github.com/littlecodersh/trip 89 | Async HTTP for Humans, Tornado & Requests In Pair 90 | - https://github.com/theelous3/asks 91 | Async requests-like httplib for python 92 | -------------------------------------------------------------------------------- /bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | pip install --upgrade setuptools 5 | pip install -r requirements.txt 6 | pip install -e . 7 | pre-commit install 8 | -------------------------------------------------------------------------------- /curequests/__about__.py: -------------------------------------------------------------------------------- 1 | __title__ = 'curequests' 2 | __description__ = 'Curio + Requests: Async HTTP for Humans.' 3 | __url__ = 'https://github.com/guyskk/curequests' 4 | __version__ = '0.4.1' 5 | __author__ = 'guyskk' 6 | __author_email__ = 'guyskk@qq.com' 7 | __license__ = 'MIT' 8 | -------------------------------------------------------------------------------- /curequests/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | """Concurrent requests by curio 3 | 4 | api: 5 | - request, use curio Session 6 | Session: 7 | - __init__, mount curio HTTPAdapter 8 | - send, almost the same as requests, change some calls to async/await style 9 | - close, async/await style 10 | HTTPAdapter: 11 | - use httptools, implement use curio 12 | - send, return curio Response 13 | - close 14 | Response: 15 | - iter_content 16 | - content 17 | - ... 18 | """ 19 | from requests import * 20 | from .sessions import * 21 | from .models import * 22 | from .api import * 23 | from .__about__ import ( 24 | __title__, 25 | __description__, 26 | __url__, 27 | __version__, 28 | __author__, 29 | __author_email__, 30 | __license__, 31 | ) 32 | -------------------------------------------------------------------------------- /curequests/adapters.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from os.path import isdir, exists 3 | 4 | import yarl 5 | import curio 6 | from curio import ssl 7 | from requests.adapters import BaseAdapter 8 | from requests.adapters import ( 9 | CaseInsensitiveDict, get_encoding_from_headers, extract_cookies_to_jar) 10 | from requests.exceptions import ConnectionError 11 | 12 | from .models import CuResponse, MultipartBody, StreamBody 13 | from .utils import select_proxy, normalize_timeout 14 | from .cuhttp import ResponseParser, RequestSerializer 15 | from .connection_pool import ConnectionPool 16 | 17 | DEFAULT_CONNS_PER_NETLOC = 10 18 | DEFAULT_CONNS_TOTAL = 100 19 | CONTENT_CHUNK_SIZE = 16 * 1024 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | class CuHTTPAdapter(BaseAdapter): 25 | """The built-in HTTP Adapter for urllib3. 26 | 27 | Provides a general-case interface for Requests sessions to contact HTTP and 28 | HTTPS urls by implementing the Transport Adapter interface. This class will 29 | usually be created by the :class:`Session ` class under the 30 | covers. 31 | 32 | :param pool_connections: The number of urllib3 connection pools to cache. 33 | :param pool_maxsize: The maximum number of connections to save in the pool. 34 | :param max_retries: The maximum number of retries each connection 35 | should attempt. Note, this applies only to failed DNS lookups, socket 36 | connections and connection timeouts, never to requests where data has 37 | made it to the server. By default, Requests does not retry failed 38 | connections. If you need granular control over the conditions under 39 | which we retry a request, import urllib3's ``Retry`` class and pass 40 | that instead. 41 | :param pool_block: Whether the connection pool should block for connections. 42 | 43 | Usage:: 44 | 45 | >>> import requests 46 | >>> s = requests.Session() 47 | >>> a = requests.adapters.HTTPAdapter(max_retries=3) 48 | >>> s.mount('http://', a) 49 | """ 50 | 51 | def __init__(self, *, 52 | max_conns_per_netloc=DEFAULT_CONNS_PER_NETLOC, 53 | max_conns_total=DEFAULT_CONNS_TOTAL, 54 | ): 55 | super().__init__() 56 | self._pool = ConnectionPool( 57 | max_conns_per_netloc=max_conns_per_netloc, 58 | max_conns_total=max_conns_total, 59 | ) 60 | 61 | def get_ssl_params(self, url, verify, cert): 62 | if url.scheme != 'https' or (not verify and not cert): 63 | return {'ssl_context': None} 64 | 65 | ssl_params = {} 66 | ssl_context = ssl.create_default_context() 67 | 68 | if verify: 69 | if isinstance(verify, str): 70 | if not exists(verify): 71 | raise FileNotFoundError( 72 | f'Could not find a suitable TLS CA certificate bundle, ' 73 | f'invalid path: {verify}') 74 | if isdir(verify): 75 | ssl_context.load_verify_locations(capath=verify) 76 | else: 77 | ssl_context.load_verify_locations(cafile=verify) 78 | ssl_params['server_hostname'] = url.raw_host 79 | ssl_context.verify_mode = ssl.CERT_REQUIRED 80 | 81 | if cert: 82 | if isinstance(cert, str): 83 | cert_file = cert 84 | key_file = None 85 | else: 86 | cert_file, key_file = cert 87 | if cert_file and not exists(cert_file): 88 | raise FileNotFoundError( 89 | f'Could not find the TLS certificate file, ' 90 | f'invalid path: {cert_file}') 91 | if key_file and not exists(key_file): 92 | raise FileNotFoundError( 93 | f'Could not find the TLS certificate file, ' 94 | f'invalid path: {key_file}') 95 | ssl_context.load_cert_chain(cert_file, key_file) 96 | 97 | ssl_params['ssl_context'] = ssl_context 98 | return ssl_params 99 | 100 | async def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): 101 | """Sends PreparedRequest object. Returns Response object. 102 | 103 | :param request: The :class:`PreparedRequest ` being sent. 104 | :param stream: (optional) Whether to stream the request content. 105 | :param timeout: (optional) How long to wait for the server to send 106 | data before giving up, as a float, or a :ref:`(connect timeout, 107 | read timeout) ` tuple. 108 | :type timeout: float or tuple or urllib3 Timeout object 109 | :param verify: (optional) Either a boolean, in which case it controls whether 110 | we verify the server's TLS certificate, or a string, in which case it 111 | must be a path to a CA bundle to use 112 | :param cert: (optional) Any user-provided SSL certificate to be trusted. 113 | :param proxies: (optional) The proxies dictionary to apply to the request. 114 | :rtype: requests.Response 115 | """ 116 | logger.debug(f'Send request: {request.method} {request.url}') 117 | url = yarl.URL(request.url) 118 | request.headers.setdefault('Host', url.raw_host) 119 | 120 | ssl_params = self.get_ssl_params(url, verify, cert) 121 | timeout = normalize_timeout(timeout) 122 | proxy = select_proxy( 123 | url.scheme, host=url.raw_host, port=url.port, proxies=proxies) 124 | conn = await self._pool.get( 125 | scheme=url.scheme, 126 | host=url.raw_host, 127 | port=url.port, 128 | timeout=timeout.connect, 129 | proxy=proxy, 130 | **ssl_params, 131 | ) 132 | 133 | request_path = url.raw_path 134 | if url.raw_query_string: 135 | request_path += '?' + url.raw_query_string 136 | if conn.proxy and conn.proxy.scheme == 'http' and url.scheme == 'http': 137 | origin = f'{url.scheme}://{url.raw_host}:{url.port}' 138 | request_path = origin + request_path 139 | body = body_stream = None 140 | if isinstance(request.body, (MultipartBody, StreamBody)): 141 | body_stream = request.body 142 | else: 143 | body = request.body 144 | serializer = RequestSerializer( 145 | path=request_path, 146 | method=request.method, 147 | headers=request.headers, 148 | body=body, 149 | body_stream=body_stream, 150 | ) 151 | 152 | sock = conn.sock 153 | try: 154 | try: 155 | async for bytes_to_send in serializer: 156 | await sock.sendall(bytes_to_send) 157 | raw = await ResponseParser(sock, timeout=timeout.read).parse() 158 | except (curio.socket.error) as err: 159 | raise ConnectionError(err, request=request) 160 | except: 161 | await conn.close() 162 | raise 163 | 164 | response = self.build_response(request, raw, conn) 165 | logger.debug(f'Receive response: {response}') 166 | if not stream: 167 | content = [] 168 | async for chunk in raw.stream(CONTENT_CHUNK_SIZE): 169 | content.append(chunk) 170 | content = b''.join(content) 171 | logger.debug(f'Readed response body, length {len(content)}') 172 | if raw.keep_alive: 173 | await conn.release() 174 | else: 175 | await conn.close() 176 | response._content = content 177 | response._content_consumed = True 178 | return response 179 | 180 | def build_response(self, req, resp, conn): 181 | """Builds a :class:`Response ` object from a urllib3 182 | response. This should not be called from user code, and is only exposed 183 | for use when subclassing the 184 | :class:`HTTPAdapter ` 185 | 186 | :param req: The :class:`PreparedRequest ` used to generate the response. 187 | :param resp: The urllib3 response object. 188 | :rtype: requests.Response 189 | """ 190 | response = CuResponse() 191 | 192 | # Fallback to None if there's no status_code, for whatever reason. 193 | response.status_code = getattr(resp, 'status', None) 194 | 195 | # Make headers case-insensitive. 196 | response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) 197 | 198 | # Set encoding. 199 | response.encoding = get_encoding_from_headers(response.headers) 200 | response.raw = resp 201 | response.reason = response.raw.reason 202 | 203 | if isinstance(req.url, bytes): 204 | response.url = req.url.decode('utf-8') 205 | else: 206 | response.url = req.url 207 | 208 | # Add new cookies from the server. 209 | extract_cookies_to_jar(response.cookies, req, resp) 210 | 211 | # Give the Response some context. 212 | response.request = req 213 | response.connection = conn 214 | 215 | return response 216 | 217 | async def close(self): 218 | """Disposes of any internal state. 219 | 220 | Currently, this closes the PoolManager and any active ProxyManager, 221 | which closes any pooled connections. 222 | """ 223 | logger.debug(f'Close adapter {self}') 224 | await self._pool.close() 225 | -------------------------------------------------------------------------------- /curequests/api.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | """ 3 | requests.api 4 | ~~~~~~~~~~~~ 5 | 6 | This module implements the Requests API. 7 | 8 | :copyright: (c) 2012 by Kenneth Reitz. 9 | :license: Apache2, see LICENSE for more details. 10 | """ 11 | 12 | from . import sessions 13 | 14 | 15 | async def request(method, url, **kwargs): 16 | """Constructs and sends a :class:`Request `. 17 | 18 | :param method: method for the new :class:`Request` object. 19 | :param url: URL for the new :class:`Request` object. 20 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. 21 | :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. 22 | :param json: (optional) json data to send in the body of the :class:`Request`. 23 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. 24 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. 25 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. 26 | ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` 27 | or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string 28 | defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers 29 | to add for the file. 30 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. 31 | :param timeout: (optional) How many seconds to wait for the server to send data 32 | before giving up, as a float, or a :ref:`(connect timeout, read 33 | timeout) ` tuple. 34 | :type timeout: float or tuple 35 | :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. 36 | :type allow_redirects: bool 37 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. 38 | :param verify: (optional) Either a boolean, in which case it controls whether we verify 39 | the server's TLS certificate, or a string, in which case it must be a path 40 | to a CA bundle to use. Defaults to ``True``. 41 | :param stream: (optional) if ``False``, the response content will be immediately downloaded. 42 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. 43 | :return: :class:`Response ` object 44 | :rtype: requests.Response 45 | 46 | Usage:: 47 | 48 | >>> import requests 49 | >>> req = requests.request('GET', 'http://httpbin.org/get') 50 | 51 | """ 52 | 53 | # By using the 'with' statement we are sure the session is closed, thus we 54 | # avoid leaving sockets open which can trigger a ResourceWarning in some 55 | # cases, and look like a memory leak in others. 56 | async with sessions.CuSession() as session: 57 | return await session.request(method=method, url=url, **kwargs) 58 | 59 | 60 | def get(url, params=None, **kwargs): 61 | r"""Sends a GET request. 62 | 63 | :param url: URL for the new :class:`Request` object. 64 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. 65 | :param \*\*kwargs: Optional arguments that ``request`` takes. 66 | :return: :class:`Response ` object 67 | :rtype: requests.Response 68 | """ 69 | 70 | kwargs.setdefault('allow_redirects', True) 71 | return request('get', url, params=params, **kwargs) 72 | 73 | 74 | def options(url, **kwargs): 75 | r"""Sends an OPTIONS request. 76 | 77 | :param url: URL for the new :class:`Request` object. 78 | :param \*\*kwargs: Optional arguments that ``request`` takes. 79 | :return: :class:`Response ` object 80 | :rtype: requests.Response 81 | """ 82 | 83 | kwargs.setdefault('allow_redirects', True) 84 | return request('options', url, **kwargs) 85 | 86 | 87 | def head(url, **kwargs): 88 | r"""Sends a HEAD request. 89 | 90 | :param url: URL for the new :class:`Request` object. 91 | :param \*\*kwargs: Optional arguments that ``request`` takes. 92 | :return: :class:`Response ` object 93 | :rtype: requests.Response 94 | """ 95 | 96 | kwargs.setdefault('allow_redirects', False) 97 | return request('head', url, **kwargs) 98 | 99 | 100 | def post(url, data=None, json=None, **kwargs): 101 | r"""Sends a POST request. 102 | 103 | :param url: URL for the new :class:`Request` object. 104 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. 105 | :param json: (optional) json data to send in the body of the :class:`Request`. 106 | :param \*\*kwargs: Optional arguments that ``request`` takes. 107 | :return: :class:`Response ` object 108 | :rtype: requests.Response 109 | """ 110 | 111 | return request('post', url, data=data, json=json, **kwargs) 112 | 113 | 114 | def put(url, data=None, **kwargs): 115 | r"""Sends a PUT request. 116 | 117 | :param url: URL for the new :class:`Request` object. 118 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. 119 | :param json: (optional) json data to send in the body of the :class:`Request`. 120 | :param \*\*kwargs: Optional arguments that ``request`` takes. 121 | :return: :class:`Response ` object 122 | :rtype: requests.Response 123 | """ 124 | 125 | return request('put', url, data=data, **kwargs) 126 | 127 | 128 | def patch(url, data=None, **kwargs): 129 | r"""Sends a PATCH request. 130 | 131 | :param url: URL for the new :class:`Request` object. 132 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. 133 | :param json: (optional) json data to send in the body of the :class:`Request`. 134 | :param \*\*kwargs: Optional arguments that ``request`` takes. 135 | :return: :class:`Response ` object 136 | :rtype: requests.Response 137 | """ 138 | 139 | return request('patch', url, data=data, **kwargs) 140 | 141 | 142 | def delete(url, **kwargs): 143 | r"""Sends a DELETE request. 144 | 145 | :param url: URL for the new :class:`Request` object. 146 | :param \*\*kwargs: Optional arguments that ``request`` takes. 147 | :return: :class:`Response ` object 148 | :rtype: requests.Response 149 | """ 150 | 151 | return request('delete', url, **kwargs) 152 | -------------------------------------------------------------------------------- /curequests/connection_pool.py: -------------------------------------------------------------------------------- 1 | """ConnectionPool 2 | 3 | Usage: 4 | 5 | pool = ConnectionPool() 6 | conn = await pool.get('http', 'httpbin.org', 80) 7 | async with conn: 8 | ... 9 | # connection will close if exception raised 10 | # else connection will release to pool 11 | """ 12 | import logging 13 | from base64 import b64encode 14 | from yarl import URL 15 | from curio.io import WantRead, WantWrite 16 | from requests.exceptions import ProxyError 17 | 18 | from .resource_pool import ResourcePool, ResourcePoolClosedError 19 | from .future import Future 20 | from .cuhttp import RequestSerializer, ResponseParser 21 | from .network import open_connection, ssl_wrap_socket 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | 26 | def _basic_auth_str(username, password): 27 | """Returns a Basic Auth string.""" 28 | auth = ('%s:%s' % (username, password)).encode('utf-8') 29 | return 'Basic ' + b64encode(auth).decode('utf-8').strip() 30 | 31 | 32 | class ConnectionPoolClosedError(ResourcePoolClosedError): 33 | """Connection pool closed""" 34 | 35 | 36 | async def _close_connection_if_need(resource): 37 | if resource is not None: 38 | conn = resource.connection 39 | conn._closed = True 40 | await conn.sock.close() 41 | logger.debug(f'Connection {conn} closed') 42 | 43 | 44 | class Connection: 45 | """Connection 46 | 47 | Attrs: 48 | scheme (str): scheme 49 | host (str): host 50 | port (int): port 51 | sock (curio.Socket): socket 52 | proxy (yarl.URL): proxy url 53 | closed (bool): connection closed or not 54 | released (bool): connection released or not 55 | """ 56 | 57 | def __init__(self, resource_pool, resource, sock, proxy=None): 58 | self._resource_pool = resource_pool 59 | self._resource = resource 60 | resource.connection = self # bind resource & connection 61 | self.scheme, self.host, self.port = resource.key 62 | self.sock = sock 63 | self.proxy = proxy 64 | self._closed = False 65 | self._released = False 66 | 67 | @property 68 | def closed(self): 69 | return self._closed 70 | 71 | @property 72 | def released(self): 73 | return self._released 74 | 75 | def _is_peer_closed(self): 76 | """check if socket in close-wait state""" 77 | # the socket is non-blocking mode, read 1 bytes will return EOF 78 | # which means peer closed, or raise exception means alive 79 | try: 80 | r = self.sock._socket_recv(1) # FIXME: I use a private method, bad! 81 | except WantRead: 82 | return False 83 | except WantWrite: 84 | return False 85 | assert r == b'', "is_peer_closed shouldn't be called at this time!" 86 | return True 87 | 88 | async def _close_or_release(self, close=False): 89 | if self._closed or self._released: 90 | return 91 | action = 'Close' if close else 'Release' 92 | logger.debug(f'{action} connection {self}') 93 | pool_ret = self._resource_pool.put(self._resource, close=close) 94 | self._released = True 95 | await _close_connection_if_need(pool_ret.need_close) 96 | if pool_ret.need_notify is not None: 97 | fut, result = pool_ret.need_notify 98 | await fut.set_result(result) 99 | 100 | async def close(self): 101 | """Close the connection""" 102 | await self._close_or_release(close=True) 103 | 104 | async def release(self): 105 | """Release the connection to connection pool""" 106 | await self._close_or_release(close=False) 107 | 108 | async def __aenter__(self): 109 | return self 110 | 111 | async def __aexit__(self, exc_type, exc_value, traceback): 112 | if exc_value is None: 113 | await self.release() 114 | else: 115 | await self.close() 116 | 117 | def __repr__(self): 118 | scheme, host, port = self.scheme, self.host, self.port 119 | if self.proxy: 120 | proxy = self.proxy 121 | if proxy.password: 122 | proxy = proxy.with_password('***') 123 | proxy = ' proxy={}'.format(proxy) 124 | else: 125 | proxy = '' 126 | status = 'busy' 127 | if self.released: 128 | status = 'idle' 129 | if self.closed: 130 | status = 'closed' 131 | return f'<{type(self).__name__} {scheme}://{host}:{port}{proxy} [{status}]>' 132 | 133 | 134 | class ConnectionPool: 135 | """Connection Pool 136 | 137 | Attrs: 138 | max_conns_per_netloc (int): max connections per netloc 139 | max_conns_total (int): max connections in total 140 | """ 141 | 142 | def __init__(self, max_conns_per_netloc=10, max_conns_total=100): 143 | self.max_conns_per_netloc = max_conns_per_netloc 144 | self.max_conns_total = max_conns_total 145 | self._pool = ResourcePool( 146 | future_class=Future, 147 | max_items_per_key=max_conns_per_netloc, 148 | max_items_total=max_conns_total, 149 | ) 150 | 151 | async def _open_connection(self, resource, proxy=None, timeout=None, **ssl_params): 152 | scheme, host, port = resource.key 153 | if not proxy: 154 | logger.debug(f'Connect to {host}:{port}') 155 | sock = await open_connection( 156 | host=host, 157 | port=port, 158 | timeout=timeout, 159 | **ssl_params 160 | ) 161 | return Connection(self._pool, resource, sock) 162 | proxy = URL(proxy) 163 | logger.debug(f'Connect to proxy {proxy}') 164 | sock = await open_connection( 165 | host=proxy.raw_host, 166 | port=proxy.port, 167 | timeout=timeout, 168 | ) 169 | conn = Connection(self._pool, resource, sock, proxy) 170 | return await self._setup_proxy(conn, proxy, **ssl_params) 171 | 172 | async def _setup_proxy(self, conn, proxy, **ssl_params): 173 | if not ssl_params.get('ssl_context'): 174 | logger.debug(f'Forward HTTP request to {proxy}') 175 | return conn 176 | headers = {} 177 | if proxy.raw_user: 178 | auth = _basic_auth_str(proxy.raw_user, proxy.password) 179 | headers['Proxy-Authorization'] = auth 180 | path = f'{conn.host}:{conn.port}' 181 | logger.debug(f'Setup HTTP tunnel {proxy}') 182 | request = RequestSerializer(path, method='CONNECT', headers=headers) 183 | async for chunk in request: 184 | await conn.sock.sendall(chunk) 185 | response = await ResponseParser(conn.sock).parse() 186 | if response.status != 200: 187 | raise ProxyError(response) 188 | conn.sock = await ssl_wrap_socket(conn.sock, **ssl_params) 189 | return conn 190 | 191 | @property 192 | def num_idle(self): 193 | """Number of idle connections""" 194 | return self._pool.num_idle 195 | 196 | @property 197 | def num_busy(self): 198 | """Number of busy connections""" 199 | return self._pool.num_busy 200 | 201 | @property 202 | def num_total(self): 203 | """Number of total connections""" 204 | return self._pool.num_total 205 | 206 | def __repr__(self): 207 | return f'<{type(self).__name__} idle:{self.num_idle} total:{self.num_total}>' 208 | 209 | async def get(self, scheme, host, port, **kwargs): 210 | """Get a connection 211 | 212 | Params: 213 | scheme (str): connection scheme 214 | host (str): connection host 215 | port (int): connection port 216 | timeout (int): connection timeout in seconds 217 | **kwargs: see curio.open_connection 218 | """ 219 | while True: 220 | conn = await self._get(scheme, host, port, **kwargs) 221 | if conn._is_peer_closed(): 222 | logger.info(f"Detected connection's peer closed, will close the connection: {conn}") 223 | await conn.close() 224 | else: 225 | return conn 226 | 227 | async def _get(self, scheme, host, port, **kwargs): 228 | try: 229 | pool_ret = self._pool.get((scheme, host, port)) 230 | except ResourcePoolClosedError as ex: 231 | raise ConnectionPoolClosedError('Connection pool closed') from ex 232 | await _close_connection_if_need(pool_ret.need_close) 233 | if pool_ret.need_wait is not None: 234 | pool_ret = await pool_ret.need_wait 235 | 236 | if pool_ret.need_open is not None: 237 | conn = await self._open_connection(pool_ret.need_open, **kwargs) 238 | logger.debug(f'Get new connection: {conn}') 239 | else: 240 | conn = pool_ret.idle.connection 241 | conn._released = False 242 | logger.debug(f'Get an idle connection: {conn}') 243 | return conn 244 | 245 | async def close(self, force=False): 246 | """Close the connection pool 247 | 248 | Params: 249 | force (bool): close busy connections or not 250 | """ 251 | logger.debug(f'Close connection pool: {self}') 252 | need_close, need_wait = self._pool.close(force=force) 253 | ex = ConnectionPoolClosedError('Connection pool closed') 254 | for resource in need_close: 255 | await _close_connection_if_need(resource) 256 | for fut in need_wait: 257 | await fut.set_exception(ex) 258 | -------------------------------------------------------------------------------- /curequests/cuhttp.py: -------------------------------------------------------------------------------- 1 | import zlib 2 | from collections import namedtuple 3 | import httptools 4 | from curio import timeout_after, TaskTimeout 5 | from curio.io import StreamBase 6 | from requests.structures import CaseInsensitiveDict 7 | from requests import ReadTimeout as ReadTimeoutError 8 | from urllib3.response import GzipDecoder as GzipDecoderBase 9 | from urllib3.response import DeflateDecoder as DeflateDecoderBase 10 | from urllib3.exceptions import DecodeError 11 | 12 | 13 | class ProtocolError(httptools.HttpParserError): 14 | """ProtocolError""" 15 | 16 | 17 | class _Decoder: 18 | 19 | def decompress(self, *args, **kwargs): 20 | try: 21 | return super().decompress(*args, **kwargs) 22 | except zlib.error as ex: 23 | msg = 'failed to decode response with {}'.format( 24 | type(self).__name__) 25 | raise DecodeError(msg) from ex 26 | 27 | 28 | class GzipDecoder(_Decoder, GzipDecoderBase): 29 | """GzipDecoder""" 30 | 31 | 32 | class DeflateDecoder(_Decoder, DeflateDecoderBase): 33 | """DeflateDecoder""" 34 | 35 | 36 | Response = namedtuple('Response', [ 37 | 'status', 38 | 'reason', 39 | 'version', 40 | 'keep_alive', 41 | 'headers', 42 | 'stream', 43 | ]) 44 | 45 | MAX_BUFFER_SIZE = 64 * 1024 46 | DEFAULT_BUFFER_SIZE = 4 * 1024 47 | 48 | 49 | class ResponseStream(StreamBase): 50 | """Response stream as file object""" 51 | 52 | def __init__(self, sock, gen, buffer_size_setter): 53 | super().__init__(sock) 54 | self._gen = gen 55 | self._set_buffer_size = buffer_size_setter 56 | 57 | async def _read(self, maxbytes=-1): 58 | maxbytes = maxbytes if maxbytes > 0 else MAX_BUFFER_SIZE 59 | self._set_buffer_size(maxbytes) 60 | try: 61 | return await self._gen.__anext__() 62 | except StopAsyncIteration: 63 | return b'' 64 | 65 | 66 | class ResponseParser: 67 | """ 68 | Attrs: 69 | version 70 | status 71 | reason 72 | keep_alive 73 | 74 | headers 75 | body_stream 76 | 77 | started 78 | headers_completed 79 | completed 80 | """ 81 | 82 | def __init__(self, sock, *, buffer_size=DEFAULT_BUFFER_SIZE, timeout=None): 83 | self._sock = sock 84 | self._parser = httptools.HttpResponseParser(self) 85 | 86 | # options 87 | self.buffer_size = buffer_size 88 | self.timeout = timeout 89 | 90 | # primary attrs 91 | self.version = None 92 | self.status = None 93 | self.reason = b'' 94 | self.headers = [] 95 | 96 | # temp attrs 97 | self.current_buffer_size = self.buffer_size 98 | self.header_name = b'' 99 | self.body_chunks = [] 100 | 101 | # state 102 | self.started = False 103 | self.headers_completed = False 104 | self.completed = False 105 | 106 | # ========= httptools callbacks ======== 107 | def on_message_begin(self): 108 | self.started = True 109 | 110 | def on_status(self, status: bytes): 111 | self.reason += status 112 | 113 | def on_header(self, name: bytes, value: bytes or None): 114 | self.header_name += name 115 | if value is not None: 116 | self.headers.append((self.header_name.decode(), value.decode())) 117 | self.header_name = b'' 118 | 119 | def on_headers_complete(self): 120 | self.version = self._parser.get_http_version() 121 | self.status = self._parser.get_status_code() 122 | self.reason = self.reason.decode() 123 | self.keep_alive = self._parser.should_keep_alive() 124 | self.headers = CaseInsensitiveDict(self.headers) 125 | self.headers_completed = True 126 | 127 | def on_body(self, body: bytes): 128 | # Implement Note: a `feed_data` can cause multi `on_body` when data 129 | # is large, eg: len(data) > 8192, so we should store `body` in a list 130 | self.body_chunks.append(body) 131 | 132 | def on_message_complete(self): 133 | self.completed = True 134 | # ========= end httptools callbacks ======== 135 | 136 | async def recv(self): 137 | if not self.timeout or self.timeout <= 0: 138 | return await self._sock.recv(self.current_buffer_size) 139 | else: 140 | try: 141 | return await timeout_after( 142 | self.timeout, 143 | self._sock.recv(self.current_buffer_size) 144 | ) 145 | except TaskTimeout as ex: 146 | raise ReadTimeoutError(str(ex)) from None 147 | 148 | def _set_current_buffer_size(self, buffer_size): 149 | self.current_buffer_size = buffer_size 150 | 151 | def _get_decoder(self): 152 | mode = self.headers.get('Content-Encoding', '').lower() 153 | if mode == 'gzip': 154 | return GzipDecoder() 155 | elif mode == 'deflate': 156 | return DeflateDecoder() 157 | return None 158 | 159 | async def parse(self): 160 | while not self.headers_completed: 161 | data = await self.recv() 162 | self._parser.feed_data(data) 163 | if not data: 164 | break 165 | if not self.headers_completed: 166 | raise ProtocolError('incomplete response headers') 167 | body_stream = self.body_stream() 168 | decoder = self._get_decoder() 169 | if decoder: 170 | body_stream = _decompress(body_stream, decoder) 171 | 172 | def stream(chunk_size=DEFAULT_BUFFER_SIZE): 173 | self._set_current_buffer_size(chunk_size) 174 | return body_stream 175 | 176 | environ = dict( 177 | version=self.version, 178 | status=self.status, 179 | reason=self.reason, 180 | keep_alive=self.keep_alive, 181 | headers=self.headers, 182 | stream=stream, 183 | ) 184 | return Response(**environ) 185 | 186 | async def body_stream(self): 187 | while self.body_chunks: 188 | yield self.body_chunks.pop(0) 189 | while not self.completed: 190 | data = await self.recv() 191 | # feed data even when data is empty, so parser will completed 192 | self._parser.feed_data(data) 193 | while self.body_chunks: 194 | yield self.body_chunks.pop(0) 195 | if not data: 196 | break 197 | if not self.completed: 198 | raise ProtocolError('incomplete response body') 199 | 200 | 201 | class RequestSerializer: 202 | def __init__(self, path, method='GET', *, version='HTTP/1.1', headers=None, 203 | body=b'', body_stream=None): 204 | self.path = path 205 | self.method = method 206 | self.version = version 207 | if headers is None: 208 | self.headers = {} 209 | else: 210 | self.headers = headers 211 | self.body = body if body is not None else b'' 212 | self.body_stream = body_stream 213 | 214 | def _format_headers(self): 215 | headers = [f'{self.method} {self.path} {self.version}'] 216 | for k, v in self.headers.items(): 217 | headers.append(f'{k}: {v}') 218 | return '\r\n'.join(headers).encode() + b'\r\n\r\n' 219 | 220 | def _format_chunk(self, chunk): 221 | return format(len(chunk), 'X').encode() + b'\r\n' + chunk + b'\r\n' 222 | 223 | def _is_chunked(self): 224 | return self.headers.get('Transfer-Encoding', '').lower() == 'chunked' 225 | 226 | async def __aiter__(self): 227 | if self.body_stream is None: 228 | # one-off request 229 | if self.method in {'POST', 'PUT', 'PATCH'}: 230 | self.headers['Content-Length'] = len(self.body) 231 | yield self._format_headers() 232 | if self.body: 233 | yield self.body 234 | else: 235 | # stream request 236 | if self._is_chunked(): 237 | yield self._format_headers() 238 | async for chunk in self.body_stream: 239 | yield self._format_chunk(chunk) 240 | yield b'0\r\n\r\n' 241 | else: 242 | if 'Content-Length' not in self.headers: 243 | raise ValueError('Content-Length not set') 244 | yield self._format_headers() 245 | async for chunk in self.body_stream: 246 | yield chunk 247 | 248 | 249 | async def _decompress(body_stream, decoder): 250 | async for chunk in body_stream: 251 | yield decoder.decompress(chunk) 252 | buf = decoder.decompress(b'') 253 | yield buf + decoder.flush() 254 | -------------------------------------------------------------------------------- /curequests/future.py: -------------------------------------------------------------------------------- 1 | from curio import Event 2 | 3 | 4 | class Future: 5 | 6 | def __init__(self): 7 | self._event = Event() 8 | self._result = None 9 | self._exception = None 10 | 11 | async def set_result(self, result): 12 | self._result = result 13 | await self._event.set() 14 | 15 | async def set_exception(self, exception): 16 | self._exception = exception 17 | await self._event.set() 18 | 19 | async def _get_result(self): 20 | await self._event.wait() 21 | 22 | if self._exception is not None: 23 | raise self._exception 24 | 25 | return self._result 26 | 27 | def __await__(self): 28 | """Future is awaitable 29 | 30 | PS: I don't know how to implement __await__, but I know coroutine 31 | implemented it, so just forward the call! 32 | """ 33 | return self._get_result().__await__() 34 | -------------------------------------------------------------------------------- /curequests/models.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import logging 3 | import mimetypes 4 | from uuid import uuid4 5 | from os.path import basename 6 | from urllib.parse import quote 7 | 8 | from curio.meta import finalize 9 | from curio.file import AsyncFile 10 | from requests.models import Request, Response, PreparedRequest 11 | from requests.utils import super_len, to_key_val_list 12 | from requests.exceptions import ( 13 | ChunkedEncodingError, ContentDecodingError, 14 | ConnectionError, StreamConsumedError, UnrewindableBodyError) 15 | from requests.models import ITER_CHUNK_SIZE 16 | 17 | from .utils import stream_decode_response_unicode, iter_slices 18 | from .cuhttp import DecodeError, ProtocolError, ReadTimeoutError 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | EOL = '\r\n' 23 | bEOL = b'\r\n' 24 | 25 | 26 | class CuResponse(Response): 27 | """The :class:`CuResponse ` object, which contains a 28 | server's response to an async HTTP request. 29 | """ 30 | 31 | def __enter__(self): 32 | raise AttributeError( 33 | f'{type(self).__name__} not support synchronous context ' 34 | 'manager, use asynchronous context manager instead.') 35 | 36 | def __exit__(self, *args): 37 | raise AttributeError( 38 | f'{type(self).__name__} not support synchronous context ' 39 | 'manager, use asynchronous context manager instead.') 40 | 41 | async def __aenter__(self): 42 | return self 43 | 44 | async def __aexit__(self, *args): 45 | await self.close() 46 | 47 | def __iter__(self): 48 | raise AttributeError( 49 | f'{type(self).__name__} not support synchronous iter, ' 50 | 'use asynchronous iter instead.') 51 | 52 | def __aiter__(self): 53 | """Allows you to use a response as an iterator.""" 54 | return self.iter_content(128) 55 | 56 | def iter_content(self, chunk_size=1, decode_unicode=False): 57 | """Iterates over the response data. When stream=True is set on the 58 | request, this avoids reading the content at once into memory for 59 | large responses. The chunk size is the number of bytes it should 60 | read into memory. This is not necessarily the length of each item 61 | returned as decoding can take place. 62 | 63 | chunk_size must be of type int or None. A value of None will 64 | function differently depending on the value of `stream`. 65 | stream=True will read data as it arrives in whatever size the 66 | chunks are received. If stream=False, data is returned as 67 | a single chunk. 68 | 69 | If decode_unicode is True, content will be decoded using the best 70 | available encoding based on the response. 71 | """ 72 | if self._content_consumed and isinstance(self._content, bool): 73 | raise StreamConsumedError() 74 | elif chunk_size is not None and not isinstance(chunk_size, int): 75 | raise TypeError('chunk_size must be an int, it is instead a %s.' % type(chunk_size)) 76 | 77 | async def generate(): 78 | async with self: 79 | async with finalize(self.raw.stream(chunk_size)) as gen: 80 | logger.debug(f'Iterate response body stream: {self}') 81 | try: 82 | async for trunk in gen: 83 | yield trunk 84 | except ProtocolError as e: 85 | raise ChunkedEncodingError(e) 86 | except DecodeError as e: 87 | raise ContentDecodingError(e) 88 | except ReadTimeoutError as e: 89 | raise ConnectionError(e) 90 | self._content_consumed = True 91 | 92 | if self._content_consumed: 93 | # simulate reading small chunks of the content 94 | chunks = iter_slices(self._content, chunk_size) 95 | else: 96 | chunks = generate() 97 | 98 | if decode_unicode: 99 | chunks = stream_decode_response_unicode(chunks, self) 100 | 101 | return chunks 102 | 103 | async def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): 104 | """Iterates over the response data, one line at a time. When 105 | stream=True is set on the request, this avoids reading the 106 | content at once into memory for large responses. 107 | 108 | .. note:: This method is not reentrant safe. 109 | """ 110 | 111 | pending = None 112 | 113 | gen = self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode) 114 | 115 | async with finalize(gen) as gen: 116 | async for chunk in gen: 117 | 118 | if pending is not None: 119 | chunk = pending + chunk 120 | 121 | if delimiter: 122 | lines = chunk.split(delimiter) 123 | else: 124 | lines = chunk.splitlines() 125 | 126 | if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: 127 | pending = lines.pop() 128 | else: 129 | pending = None 130 | 131 | for line in lines: 132 | yield line 133 | 134 | if pending is not None: 135 | yield pending 136 | 137 | @property 138 | def content(self): 139 | """Content of the response, in bytes.""" 140 | 141 | if self._content is False: 142 | # Read the contents. 143 | if self._content_consumed: 144 | raise RuntimeError( 145 | 'The content for this response was already consumed') 146 | 147 | if self.status_code == 0 or self.raw is None: 148 | self._content = None 149 | else: 150 | raise RuntimeError( 151 | 'The content for this response was not readed') 152 | 153 | self._content_consumed = True 154 | # don't need to release the connection; that's been handled by urllib3 155 | # since we exhausted the data. 156 | return self._content 157 | 158 | async def close(self): 159 | if self._content_consumed: 160 | if self.raw.keep_alive: 161 | await self.connection.release() 162 | else: 163 | await self.connection.close() 164 | else: 165 | logger.info(f'Response body not consumed, will close the connection: {self.connection}') 166 | await self.connection.close() 167 | 168 | 169 | def encode_headers(headers): 170 | ret = [] 171 | for k, v in headers.items(): 172 | ret.append('{}: {}'.format(k, v)) 173 | return EOL.join(ret).encode('ascii') 174 | 175 | 176 | def safe_tell(f): 177 | # Record the current file position before reading. 178 | # This will allow us to rewind a file in the event 179 | # of a redirect. 180 | if not hasattr(f, 'tell'): 181 | return None 182 | try: 183 | return f.tell() 184 | except (IOError, OSError): 185 | return None 186 | 187 | 188 | def rewind_file(file, position): 189 | body_seek = getattr(file, 'seek', None) 190 | if body_seek is not None and position is not None: 191 | try: 192 | body_seek(position) 193 | except (IOError, OSError): 194 | raise UnrewindableBodyError( 195 | 'An error occurred when rewinding request ' 196 | 'body for redirect.') 197 | else: 198 | raise UnrewindableBodyError('Unable to rewind request body for redirect.') 199 | 200 | 201 | class Field: 202 | 203 | __slots__ = ( 204 | 'name', 'filename', 'content', 'file', 'headers', 'content_length', 205 | 'encoded_headers', '_should_close_file', '_body_position', 206 | ) 207 | 208 | def __init__(self, name, *, filename=None, headers=None, content_type=None, 209 | file=None, filepath=None, content=None, encoding='utf-8'): 210 | self.name = quote(name, safe='') 211 | self.headers = headers or {} 212 | self.content_length = None 213 | self._should_close_file = False 214 | 215 | if content is not None: 216 | if isinstance(content, str): 217 | content = content.encode(encoding) 218 | self.content = content 219 | 220 | if filepath is not None: 221 | file = open(filepath, 'rb') 222 | self._should_close_file = True 223 | if file is not None: 224 | if not isinstance(file, AsyncFile): 225 | file = AsyncFile(file) 226 | self.file = file 227 | 228 | if content is None and file is None: 229 | raise ValueError('Field data must be provided.') 230 | if content is not None and file is not None: 231 | raise ValueError("Can't provide both content and file.") 232 | 233 | if content is not None: 234 | self.content_length = len(content) 235 | self._body_position = None 236 | else: 237 | with file.blocking() as f: 238 | self.content_length = super_len(f) 239 | self._body_position = safe_tell(f) 240 | 241 | if filename is None: 242 | if filepath is None and file is not None: 243 | filepath = getattr(file, 'name') 244 | if filepath is not None: 245 | filename = basename(filepath) 246 | if filename is not None: 247 | filename = quote(filename, safe='') 248 | self.filename = filename 249 | 250 | if content_type is None and filename is not None: 251 | content_type = mimetypes.guess_type(filename)[0] 252 | if content_type is not None: 253 | self.headers['Content-Type'] = content_type 254 | 255 | disposition = ['form-data', f'name="{self.name}"'] 256 | if self.filename is not None: 257 | disposition.append(f'filename="{self.filename}"') 258 | self.headers['Content-Disposition'] = '; '.join(disposition) 259 | 260 | self.encoded_headers = encode_headers(self.headers) 261 | 262 | def __len__(self): 263 | return self.content_length 264 | 265 | async def close(self): 266 | if self._should_close_file: 267 | await self.file.close() 268 | 269 | def rewind(self): 270 | """Move file pointer back to its recorded starting position 271 | so it can be read again on redirect. 272 | """ 273 | if self.file is None: 274 | return 275 | with self.file.blocking() as f: 276 | rewind_file(f, self._body_position) 277 | 278 | 279 | class MultipartBody: 280 | 281 | def __init__(self, fields, boundary=None): 282 | self.fields = fields 283 | if not boundary: 284 | boundary = uuid4().hex 285 | self.boundary = boundary 286 | self.encoded_boundary = boundary.encode('ascii') 287 | self.content_type = 'multipart/form-data; boundary={}'.format(boundary) 288 | self.content_length = self._compute_content_length() 289 | self._gen = self._generator() 290 | 291 | def rewind(self): 292 | for f in self.fields: 293 | f.rewind() 294 | self._gen = self._generator() 295 | 296 | def _compute_content_length(self): 297 | eol_len = len(bEOL) 298 | boundary_len = len(self.encoded_boundary) 299 | length = 0 300 | for field in self.fields: 301 | length += 2 + boundary_len + eol_len 302 | length += len(field.encoded_headers) + eol_len 303 | length += eol_len 304 | length += field.content_length + eol_len 305 | length += 2 + boundary_len + 2 + eol_len 306 | return length 307 | 308 | def __len__(self): 309 | return self.content_length 310 | 311 | async def __aiter__(self): 312 | async for chunk in self._gen: 313 | yield chunk 314 | 315 | async def _generator(self): 316 | chunk_size = 16 * 1024 317 | sep = b'--' + self.encoded_boundary + bEOL 318 | for field in self.fields: 319 | yield sep + field.encoded_headers + bEOL + bEOL 320 | if field.content is not None: 321 | yield field.content 322 | else: 323 | while True: 324 | chunk = await field.file.read(chunk_size) 325 | if not chunk: 326 | break 327 | yield chunk 328 | yield bEOL 329 | yield b'--' + self.encoded_boundary + b'--' + bEOL 330 | 331 | 332 | class StreamBody: 333 | 334 | def __init__(self, data): 335 | self._data = data 336 | self._body_position = safe_tell(data) 337 | 338 | async def __aiter__(self): 339 | if not inspect.isasyncgen(self._data): 340 | for chunk in self._data: 341 | yield chunk 342 | else: 343 | async for chunk in self._data: 344 | yield chunk 345 | 346 | def rewind(self): 347 | rewind_file(self._data, self._body_position) 348 | 349 | 350 | class CuPreparedRequest(PreparedRequest): 351 | 352 | def prepare_body(self, data, files, json=None): 353 | """Prepares the given HTTP body data.""" 354 | if not files: 355 | super().prepare_body(data, files, json) 356 | if self.body and not isinstance(self.body, bytes): 357 | self.body = StreamBody(self.body) 358 | return 359 | 360 | fields = [] 361 | for key, value in to_key_val_list(data or {}): 362 | fields.append(Field(key, content=value)) 363 | for (k, v) in to_key_val_list(files or {}): 364 | # support for explicit filename 365 | ft = None 366 | fh = None 367 | if isinstance(v, (tuple, list)): 368 | if len(v) == 2: 369 | fn, fp = v 370 | elif len(v) == 3: 371 | fn, fp, ft = v 372 | else: 373 | fn, fp, ft, fh = v 374 | else: 375 | fn = None 376 | fp = v 377 | 378 | if isinstance(fp, (str, bytes, bytearray)): 379 | content = fp 380 | fp = None 381 | else: 382 | content = None 383 | 384 | fields.append(Field( 385 | k, filename=fn, file=fp, content=content, 386 | content_type=ft, headers=fh)) 387 | 388 | self.body = MultipartBody(fields) 389 | self.headers.setdefault('Content-Type', self.body.content_type) 390 | self.prepare_content_length(self.body) 391 | 392 | 393 | class CuRequest(Request): 394 | def prepare(self): 395 | """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" 396 | p = CuPreparedRequest() 397 | p.prepare( 398 | method=self.method, 399 | url=self.url, 400 | headers=self.headers, 401 | files=self.files, 402 | data=self.data, 403 | json=self.json, 404 | params=self.params, 405 | auth=self.auth, 406 | cookies=self.cookies, 407 | hooks=self.hooks, 408 | ) 409 | return p 410 | -------------------------------------------------------------------------------- /curequests/network.py: -------------------------------------------------------------------------------- 1 | from curio import ssl, socket 2 | 3 | 4 | async def ssl_wrap_socket( 5 | sock, ssl_context, 6 | do_handshake_on_connect=True, 7 | server_hostname=None, 8 | alpn_protocols=None, 9 | ): 10 | if not server_hostname: 11 | ssl_context.check_hostname = False 12 | ssl_context.verify_mode = ssl.CERT_NONE 13 | if alpn_protocols: 14 | ssl_context.set_alpn_protocols(alpn_protocols) 15 | sock = await ssl_context.wrap_socket( 16 | sock, 17 | do_handshake_on_connect=do_handshake_on_connect, 18 | server_hostname=server_hostname) 19 | return sock 20 | 21 | 22 | async def open_connection( 23 | # socket.create_connection params 24 | host, port, 25 | timeout=None, 26 | source_addr=None, 27 | # SSLContext.wrap_socket params 28 | ssl_context=None, 29 | do_handshake_on_connect=True, 30 | server_hostname=None, 31 | alpn_protocols=None, 32 | ): 33 | sock = await socket.create_connection( 34 | (host, port), timeout, source_addr) 35 | if not ssl_context: 36 | return sock 37 | return await ssl_wrap_socket( 38 | sock, ssl_context, 39 | do_handshake_on_connect=do_handshake_on_connect, 40 | server_hostname=server_hostname, 41 | alpn_protocols=alpn_protocols, 42 | ) 43 | -------------------------------------------------------------------------------- /curequests/resource_pool.py: -------------------------------------------------------------------------------- 1 | from itertools import chain 2 | from threading import RLock 3 | from namedlist import namedlist 4 | 5 | 6 | class ResourcePoolClosedError(Exception): 7 | """Resource pool closed""" 8 | 9 | 10 | class Resource: 11 | def __init__(self, key): 12 | self.key = key 13 | 14 | def __repr__(self): 15 | return f'<{type(self).__name__} {self.key}>' 16 | 17 | 18 | ResourcePoolResult = namedlist( 19 | 'ResourcePoolResult', 20 | [ 21 | 'idle', # idle resource 22 | 'need_open', # resource need open 23 | 'need_close', # resource need close 24 | 'need_notify', # (future, ResourcePoolResult) 25 | 'need_wait', # future need wait 26 | ], 27 | default=None) 28 | 29 | 30 | class ResourcePool: 31 | """A general resource pool algorithm, it's thread safe 32 | 33 | Params: 34 | future_class: a future class 35 | max_items_per_key (int): max items pre key 36 | max_items_total (int): max items total 37 | 38 | Note: All resource's open/close/await operations are caller's business 39 | """ 40 | 41 | def __init__(self, future_class, max_items_per_key=10, max_items_total=100): 42 | self._closed = False 43 | self.future_class = future_class 44 | self.max_items_per_key = max_items_per_key 45 | self.max_items_total = max_items_total 46 | self._idle_resources = {} # key: [item, ...] 47 | self._busy_resources = {} # key: [item, ...] 48 | self._waitings = {} # key: [promise, ...] 49 | # the two numbers is for better performance 50 | self._num_idle = 0 51 | self._num_total = 0 52 | # keep thread safe 53 | self._lock = RLock() 54 | 55 | @property 56 | def num_idle(self): 57 | """Number of idle resources""" 58 | return self._num_idle 59 | 60 | @property 61 | def num_busy(self): 62 | """Number of busy resources""" 63 | return self._num_total - self._num_idle 64 | 65 | @property 66 | def num_total(self): 67 | """Number of total resources""" 68 | return self._num_total 69 | 70 | def size(self, key): 71 | """Number of resources with the given key""" 72 | r = [self._idle_resources, self._busy_resources] 73 | return sum(len(x.get(key, [])) for x in r) 74 | 75 | def __repr__(self): 76 | return f'<{type(self).__name__} idle:{self.num_idle} total:{self.num_total}>' 77 | 78 | def put(self, *args, **kwargs): 79 | """Put back a resource 80 | 81 | Params: 82 | item (Resource): the resource to put back 83 | close (bool): close the resource or not 84 | Returns: 85 | ResourcePoolResult 86 | """ 87 | with self._lock: 88 | return self._put(*args, **kwargs) 89 | 90 | def _put(self, item, close=False): 91 | ret = ResourcePoolResult() 92 | if self._closed: 93 | ret.need_close = item 94 | return ret 95 | 96 | self._busy_resources[item.key].remove(item) 97 | if not close: 98 | waitings = self._waitings.get(item.key) 99 | if waitings: 100 | self._busy_resources[item.key].append(item) 101 | # just notify a future in the fastest way 102 | ret.need_notify = (waitings.pop(0), ResourcePoolResult(idle=item)) 103 | return ret 104 | self._idle_resources.setdefault(item.key, []).append(item) 105 | self._num_idle += 1 106 | else: 107 | ret.need_close = item 108 | self._num_total -= 1 109 | 110 | for key, waitings in self._waitings.items(): 111 | if not waitings: 112 | continue 113 | need_close, need_open = self._open_new_resource_if_permit(key) 114 | if need_open: 115 | ret.need_notify = (waitings.pop(0), ResourcePoolResult(need_open=need_open)) 116 | assert not (need_close and ret.need_close), \ 117 | "should't close two resource at once, it's a bug!" 118 | ret.need_close = need_close 119 | break 120 | 121 | return ret 122 | 123 | def _close_an_idle_resource(self): 124 | for key, idles in self._idle_resources.items(): 125 | if idles: 126 | self._num_idle -= 1 127 | self._num_total -= 1 128 | return idles.pop(0) 129 | 130 | def _open_new_resource(self, key): 131 | need_open = Resource(key) 132 | self._busy_resources.setdefault(key, []).append(need_open) 133 | self._num_total += 1 134 | return need_open 135 | 136 | def _open_new_resource_if_permit(self, key): 137 | can_open_key = self.size(key) < self.max_items_per_key 138 | can_open_total = self._num_total < self.max_items_total 139 | can_close = self._num_idle > 0 140 | if can_open_key and can_open_total: 141 | # open new resource 142 | need_open = self._open_new_resource(key) 143 | return None, need_open 144 | elif can_open_key and not can_open_total and can_close: 145 | # close an idle resource then open new resource 146 | need_close = self._close_an_idle_resource() 147 | assert need_close and self._num_total < self.max_items_total, \ 148 | "pool still full after close an idle resource, it's a bug!" 149 | need_open = self._open_new_resource(key) 150 | return need_close, need_open 151 | else: 152 | return None, None 153 | 154 | def get(self, *args, **kwargs): 155 | """Get a resource 156 | 157 | Params: 158 | key (hashable): resource key 159 | Returns: 160 | ResourcePoolResult 161 | """ 162 | with self._lock: 163 | return self._get(*args, **kwargs) 164 | 165 | def _get(self, key): 166 | if self._closed: 167 | raise ResourcePoolClosedError('The resource pool was closed') 168 | ret = ResourcePoolResult() 169 | idles = self._idle_resources.get(key) 170 | if idles: 171 | item = idles.pop() 172 | self._busy_resources.setdefault(key, []).append(item) 173 | self._num_idle -= 1 174 | ret.idle = item 175 | else: 176 | need_close, need_open = self._open_new_resource_if_permit(key) 177 | if need_open is None: 178 | fut = self.future_class() 179 | self._waitings.setdefault(key, []).append(fut) 180 | ret.need_wait = fut 181 | else: 182 | ret.need_close = need_close 183 | ret.need_open = need_open 184 | return ret 185 | 186 | def close(self, *args, **kwargs): 187 | """Close resource pool 188 | 189 | Params: 190 | force (bool): close busy resources or not 191 | Returns: 192 | tuple(need_close, need_wait): 193 | need_close: list of resources need close 194 | need_wait: list of futures need wait 195 | """ 196 | with self._lock: 197 | return self._close(*args, **kwargs) 198 | 199 | def _close(self, force=False): 200 | need_close = [] 201 | need_wait = [] 202 | self._closed = True 203 | 204 | for fut in chain.from_iterable(self._waitings.values()): 205 | need_wait.append(fut) 206 | self._waitings.clear() 207 | 208 | for item in chain.from_iterable(self._idle_resources.values()): 209 | need_close.append(item) 210 | self._idle_resources.clear() 211 | self._num_idle = 0 212 | 213 | if force: 214 | for item in chain.from_iterable(self._busy_resources.values()): 215 | need_close.append(item) 216 | self._busy_resources.clear() 217 | 218 | self._num_total = 0 219 | return need_close, need_wait 220 | -------------------------------------------------------------------------------- /curequests/sessions.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from urllib.parse import urlparse, urljoin 3 | 4 | from requests.utils import requote_uri 5 | from requests.sessions import ( 6 | Session, Request, preferred_clock, 7 | timedelta, dispatch_hook, extract_cookies_to_jar 8 | ) 9 | from requests.exceptions import TooManyRedirects 10 | from requests.sessions import ( 11 | cookielib, 12 | cookiejar_from_dict, 13 | merge_cookies, 14 | RequestsCookieJar, 15 | get_netrc_auth, 16 | merge_setting, 17 | CaseInsensitiveDict, 18 | merge_hooks) 19 | from .adapters import CuHTTPAdapter 20 | from .models import CuPreparedRequest 21 | from .models import MultipartBody, StreamBody 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | 26 | class CuSession(Session): 27 | 28 | def __init__(self): 29 | super().__init__() 30 | self.mount('https://', CuHTTPAdapter()) 31 | self.mount('http://', CuHTTPAdapter()) 32 | 33 | def __enter__(self): 34 | raise AttributeError( 35 | f'{type(self).__name__} not support synchronous context ' 36 | 'manager, use asynchronous context manager instead.') 37 | 38 | def __exit__(self, *args): 39 | raise AttributeError( 40 | f'{type(self).__name__} not support synchronous context ' 41 | 'manager, use asynchronous context manager instead.') 42 | 43 | async def __aenter__(self): 44 | return self 45 | 46 | async def __aexit__(self, *args): 47 | await self.close() 48 | 49 | def prepare_request(self, request): 50 | """Constructs a :class:`PreparedRequest ` for 51 | transmission and returns it. The :class:`PreparedRequest` has settings 52 | merged from the :class:`Request ` instance and those of the 53 | :class:`Session`. 54 | 55 | :param request: :class:`Request` instance to prepare with this 56 | session's settings. 57 | :rtype: requests.PreparedRequest 58 | """ 59 | cookies = request.cookies or {} 60 | 61 | # Bootstrap CookieJar. 62 | if not isinstance(cookies, cookielib.CookieJar): 63 | cookies = cookiejar_from_dict(cookies) 64 | 65 | # Merge with session cookies 66 | merged_cookies = merge_cookies( 67 | merge_cookies(RequestsCookieJar(), self.cookies), cookies) 68 | 69 | # Set environment's basic authentication if not explicitly set. 70 | auth = request.auth 71 | if self.trust_env and not auth and not self.auth: 72 | auth = get_netrc_auth(request.url) 73 | 74 | p = CuPreparedRequest() 75 | p.prepare( 76 | method=request.method.upper(), 77 | url=request.url, 78 | files=request.files, 79 | data=request.data, 80 | json=request.json, 81 | headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), 82 | params=merge_setting(request.params, self.params), 83 | auth=merge_setting(auth, self.auth), 84 | cookies=merged_cookies, 85 | hooks=merge_hooks(request.hooks, self.hooks), 86 | ) 87 | return p 88 | 89 | async def _send(self, request, **kwargs): 90 | """Send a given PreparedRequest. 91 | 92 | :rtype: requests.Response 93 | """ 94 | logger.debug(f'Send request: {request}') 95 | # Set defaults that the hooks can utilize to ensure they always have 96 | # the correct parameters to reproduce the previous request. 97 | kwargs.setdefault('stream', self.stream) 98 | kwargs.setdefault('verify', self.verify) 99 | kwargs.setdefault('cert', self.cert) 100 | kwargs.setdefault('proxies', self.proxies) 101 | 102 | # It's possible that users might accidentally send a Request object. 103 | # Guard against that specific failure case. 104 | if isinstance(request, Request): 105 | raise ValueError('You can only send PreparedRequests.') 106 | 107 | hooks = request.hooks 108 | 109 | # Get the appropriate adapter to use 110 | adapter = self.get_adapter(url=request.url) 111 | 112 | # Start time (approximately) of the request 113 | start = preferred_clock() 114 | 115 | # Send the request 116 | r = await adapter.send(request, **kwargs) 117 | 118 | # Total elapsed time of the request (approximately) 119 | elapsed = preferred_clock() - start 120 | logger.debug(f'Request {request} elapsed {elapsed:.3f} seconds') 121 | r.elapsed = timedelta(seconds=elapsed) 122 | 123 | # Response manipulation hooks 124 | r = dispatch_hook('response', hooks, r, **kwargs) 125 | 126 | extract_cookies_to_jar(self.cookies, request, r.raw) 127 | 128 | return r 129 | 130 | def _get_next_url(self, resp): 131 | url = resp.headers['location'] or '' 132 | 133 | # Handle redirection without scheme (see: RFC 1808 Section 4) 134 | if url.startswith('//'): 135 | scheme = urlparse(resp.url).scheme 136 | url = f'{scheme}:{url}' 137 | 138 | # The scheme should be lower case... 139 | parsed = urlparse(url) 140 | url = parsed.geturl() 141 | 142 | # Facilitate relative 'location' headers, as allowed by RFC 7231. 143 | # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') 144 | # Compliant with RFC3986, we percent encode the url. 145 | if not parsed.netloc: 146 | url = urljoin(resp.url, requote_uri(url)) 147 | else: 148 | url = requote_uri(url) 149 | return url 150 | 151 | def _get_next_method(self, resp): 152 | """When being redirected we may want to change the method of the request 153 | based on certain specs or browser behavior. 154 | """ 155 | method = resp.request.method 156 | 157 | # http://tools.ietf.org/html/rfc7231#section-6.4.4 158 | if resp.status_code == 303 and method != 'HEAD': 159 | method = 'GET' 160 | 161 | # Do what the browsers do, despite standards... 162 | # First, turn 302s into GETs. 163 | if resp.status_code == 302 and method != 'HEAD': 164 | method = 'GET' 165 | 166 | # Second, if a POST is responded to with a 301, turn it into a GET. 167 | # This bizarre behaviour is explained in Issue 1704. 168 | if resp.status_code == 301 and method == 'POST': 169 | method = 'GET' 170 | 171 | return method 172 | 173 | async def send(self, request, **kwargs): 174 | """Send a given PreparedRequest. 175 | 176 | :rtype: requests.Response 177 | """ 178 | allow_redirects = kwargs.pop('allow_redirects', True) 179 | if not allow_redirects: 180 | return await self._send(request, **kwargs) 181 | 182 | history = [] 183 | while True: 184 | resp = await self._send(request, **kwargs) 185 | resp.history = history[:] 186 | history.append(resp) 187 | if not resp.is_redirect: 188 | return resp 189 | 190 | # Release the connection back into the pool. 191 | await resp.close() 192 | 193 | if len(history) > self.max_redirects: 194 | raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) 195 | 196 | next_request = request.copy() 197 | next_request.url = self._get_next_url(resp) 198 | next_request.method = self._get_next_method(resp) 199 | logger.debug(f'Redirect to: {next_request.method} {next_request.url}') 200 | headers = next_request.headers 201 | 202 | # https://github.com/requests/requests/issues/1084 203 | if resp.status_code not in (307, 308): 204 | # https://github.com/requests/requests/issues/3490 205 | purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') 206 | for header in purged_headers: 207 | next_request.headers.pop(header, None) 208 | next_request.body = None 209 | 210 | # Attempt to rewind consumed file-like object. 211 | should_rewind = ( 212 | ('Content-Length' in headers or 'Transfer-Encoding' in headers) and 213 | isinstance(next_request.body, (MultipartBody, StreamBody))) 214 | if should_rewind: 215 | logger.debug(f'Rewind request body for redirection: {next_request}') 216 | next_request.body.rewind() 217 | 218 | try: 219 | del headers['Cookie'] 220 | except KeyError: 221 | pass 222 | 223 | # Extract any cookies sent on the response to the cookiejar 224 | # in the new request. Because we've mutated our copied prepared 225 | # request, use the old one that we haven't yet touched. 226 | extract_cookies_to_jar(next_request._cookies, request, resp.raw) 227 | merge_cookies(next_request._cookies, self.cookies) 228 | next_request.prepare_cookies(next_request._cookies) 229 | 230 | self.rebuild_auth(next_request, resp) 231 | 232 | # Override the original request. 233 | request = next_request 234 | 235 | def rebuild_auth(self, prepared_request, response): 236 | """When being redirected we may want to strip authentication from the 237 | request to avoid leaking credentials. This method intelligently removes 238 | and reapplies authentication where possible to avoid credential loss. 239 | """ 240 | headers = prepared_request.headers 241 | url = prepared_request.url 242 | 243 | if 'Authorization' in headers: 244 | # If we get redirected to a new host, we should strip out any 245 | # authentication headers. 246 | original_parsed = urlparse(response.request.url) 247 | redirect_parsed = urlparse(url) 248 | 249 | if (original_parsed.hostname != redirect_parsed.hostname): 250 | del headers['Authorization'] 251 | 252 | # .netrc might have more auth for us on our new host. 253 | new_auth = get_netrc_auth(url) if self.trust_env else None 254 | if new_auth is not None: 255 | prepared_request.prepare_auth(new_auth) 256 | 257 | return 258 | 259 | async def close(self): 260 | """Closes all adapters and as such the session""" 261 | logger.debug(f'Close session {self}') 262 | for v in self.adapters.values(): 263 | await v.close() 264 | 265 | 266 | def session(): 267 | """ 268 | Returns a :class:`CuSession` for context-management. 269 | 270 | :rtype: CuSession 271 | """ 272 | 273 | return CuSession() 274 | -------------------------------------------------------------------------------- /curequests/utils.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | from collections import namedtuple 3 | 4 | from curio.meta import finalize 5 | from requests.adapters import TimeoutSauce 6 | 7 | 8 | async def stream_decode_response_unicode(iterator, r): 9 | """Stream decodes a iterator.""" 10 | 11 | async with finalize(iterator) as iterator: 12 | if r.encoding is None: 13 | async for item in iterator: 14 | yield item 15 | return 16 | 17 | decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') 18 | async for chunk in iterator: 19 | rv = decoder.decode(chunk) 20 | if rv: 21 | yield rv 22 | rv = decoder.decode(b'', final=True) 23 | if rv: 24 | yield rv 25 | 26 | 27 | async def iter_slices(string, slice_length): 28 | """Iterate over slices of a string.""" 29 | pos = 0 30 | if slice_length is None or slice_length <= 0: 31 | slice_length = len(string) 32 | while pos < len(string): 33 | yield string[pos:pos + slice_length] 34 | pos += slice_length 35 | 36 | TimeoutValue = namedtuple('TimeoutValue', 'connect read') 37 | 38 | 39 | def normalize_timeout(timeout): 40 | if isinstance(timeout, tuple): 41 | try: 42 | connect, read = timeout 43 | timeout = TimeoutValue(connect=connect, read=read) 44 | except ValueError: 45 | # this may raise a string formatting error. 46 | err = ('Invalid timeout {0}. Pass a (connect, read) ' 47 | 'timeout tuple, or a single float to set ' 48 | 'both timeouts to the same value'.format(timeout)) 49 | raise ValueError(err) 50 | elif isinstance(timeout, TimeoutSauce): 51 | raise ValueError('Not support urllib3 Timeout object') 52 | else: 53 | timeout = TimeoutValue(connect=timeout, read=timeout) 54 | return timeout 55 | 56 | 57 | def select_proxy(scheme, host, port, proxies): 58 | """Select a proxy for the url, if applicable. 59 | 60 | :param scheme, host, port: The url being for the request 61 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs 62 | """ 63 | proxies = proxies or {} 64 | if host is None: 65 | return proxies.get(scheme, proxies.get('all')) 66 | 67 | proxy_keys = [ 68 | scheme + '://' + host, 69 | scheme, 70 | 'all://' + host, 71 | 'all', 72 | ] 73 | proxy = None 74 | for proxy_key in proxy_keys: 75 | if proxy_key in proxies: 76 | proxy = proxies[proxy_key] 77 | break 78 | 79 | return proxy 80 | -------------------------------------------------------------------------------- /invoke.yaml: -------------------------------------------------------------------------------- 1 | run: 2 | pty: true 3 | echo: true 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aspy.yaml==1.3.0 2 | atomicwrites==1.3.0 3 | attrs==19.2.0 4 | bleach==3.1.0 5 | blinker==1.4 6 | brotlipy==0.7.0 7 | cached-property==1.5.1 8 | certifi==2019.9.11 9 | cffi==1.12.3 10 | cfgv==2.0.1 11 | chardet==3.0.4 12 | Click==7.0 13 | codecov==2.0.15 14 | coverage==4.5.4 15 | curio==0.9 16 | decorator==4.4.0 17 | docutils==0.15.2 18 | entrypoints==0.3 19 | filelock==3.0.12 20 | flake8==3.7.8 21 | Flask==1.1.1 22 | httpbin==0.7.0 23 | httptools==0.0.13 24 | identify==1.4.7 25 | idna==2.8 26 | importlib-metadata==0.23 27 | invoke==1.3.0 28 | itsdangerous==1.1.0 29 | Jinja2==2.10.1 30 | MarkupSafe==1.1.1 31 | mccabe==0.6.1 32 | more-itertools==7.2.0 33 | multidict==4.5.2 34 | namedlist==1.7 35 | nodeenv==1.3.3 36 | packaging==19.2 37 | pkginfo==1.5.0.1 38 | pluggy==0.13.0 39 | pre-commit==1.18.3 40 | py==1.8.0 41 | pycodestyle==2.5.0 42 | pycparser==2.19 43 | pyflakes==2.1.1 44 | Pygments==2.4.2 45 | pyparsing==2.4.2 46 | pytest==5.2.0 47 | pytest-cov==2.7.1 48 | pytest-httpbin==1.0.0 49 | PyYAML==5.1.2 50 | raven==6.10.0 51 | readme-renderer==24.0 52 | requests==2.22.0 53 | requests-toolbelt==0.9.1 54 | six==1.12.0 55 | toml==0.10.0 56 | tox==3.14.0 57 | tqdm==4.36.1 58 | twine==2.0.0 59 | urllib3==1.25.6 60 | virtualenv==16.7.5 61 | wcwidth==0.1.7 62 | webencodings==0.5.1 63 | Werkzeug==0.16.0 64 | yarl==1.3.0 65 | zipp==0.6.0 66 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [coverage:run] 5 | branch = True 6 | source = curequests, tests 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | from setuptools import setup 3 | 4 | here = os.path.abspath(os.path.dirname(__file__)) 5 | 6 | about = {} 7 | with open(os.path.join(here, 'curequests', '__about__.py')) as f: 8 | exec(f.read(), about) 9 | 10 | setup( 11 | name=about['__title__'], 12 | version=about['__version__'], 13 | description=about['__description__'], 14 | long_description=__doc__, 15 | author=about['__author__'], 16 | author_email=about['__author_email__'], 17 | url=about['__url__'], 18 | license=about['__license__'], 19 | packages=['curequests'], 20 | install_requires=[ 21 | 'httptools', 22 | 'yarl', 23 | 'curio', 24 | 'requests', 25 | 'namedlist', 26 | ], 27 | zip_safe=False, 28 | classifiers=[ 29 | 'Intended Audience :: Developers', 30 | 'License :: OSI Approved :: MIT License', 31 | 'Operating System :: OS Independent', 32 | 'Programming Language :: Python :: 3 :: Only', 33 | 'Programming Language :: Python :: 3.6', 34 | 'Topic :: Software Development :: Libraries :: Python Modules' 35 | ], 36 | ) 37 | -------------------------------------------------------------------------------- /tasks.py: -------------------------------------------------------------------------------- 1 | from invoke import task 2 | 3 | 4 | @task 5 | def lint(ctx): 6 | ctx.run('pre-commit run --all-files') 7 | 8 | 9 | @task 10 | def test(ctx, cov=False, verbose=False): 11 | cov = ' --cov=curequests --cov-report=term-missing' if cov else '' 12 | verbose = ' -v -x --log-level=debug' if verbose else '' 13 | cmd = (f'REQUESTS_CA_BUNDLE=`python -m pytest_httpbin.certs` ' 14 | f'pytest --tb=short{cov}{verbose} tests') 15 | ctx.run(cmd) 16 | 17 | 18 | @task 19 | def dist(ctx, upload=False): 20 | cmds = [ 21 | 'rm -f dist/*', 22 | 'python setup.py bdist_wheel', 23 | ] 24 | if upload: 25 | cmds.append('twine upload dist/*') 26 | for cmd in cmds: 27 | ctx.run(cmd, echo=True) 28 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guyskk/curequests/731e1996ebd57aec4bd36e728a5a0f7edb83933e/tests/conftest.py -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | import json 2 | from curio.meta import finalize 3 | from curequests import get, post 4 | from utils import run_with_curio 5 | 6 | 7 | @run_with_curio 8 | async def test_get(httpbin_both): 9 | r = await get(httpbin_both + '/get') 10 | assert r.status_code == 200 11 | 12 | 13 | @run_with_curio 14 | async def test_post(httpbin_both): 15 | data = {'hello': 'world'} 16 | r = await post(httpbin_both + '/post', json=data) 17 | assert r.status_code == 200 18 | assert r.json()['json'] == data 19 | 20 | 21 | @run_with_curio 22 | async def test_gzip(httpbin_both): 23 | r = await get(httpbin_both + '/gzip') 24 | assert r.status_code == 200 25 | assert r.json() 26 | 27 | 28 | @run_with_curio 29 | async def test_chunked(httpbin_both): 30 | r = await get(httpbin_both + '/stream/1', stream=True) 31 | assert r.status_code == 200 32 | body = [] 33 | async with finalize(r.iter_content()) as gen: 34 | async for chunk in gen: 35 | body.append(chunk) 36 | body = b''.join(body).decode('utf-8') 37 | assert json.loads(body) 38 | -------------------------------------------------------------------------------- /tests/test_connection_pool.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guyskk/curequests/731e1996ebd57aec4bd36e728a5a0f7edb83933e/tests/test_connection_pool.py -------------------------------------------------------------------------------- /tests/test_redirect.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from curequests import session 3 | from requests.exceptions import TooManyRedirects 4 | from utils import run_with_curio 5 | 6 | 7 | @run_with_curio 8 | async def test_redirect(httpbin_both): 9 | s = session() 10 | s.max_redirects = 3 11 | r = await s.get(httpbin_both + '/redirect/3') 12 | assert len(r.history) == 3 13 | with pytest.raises(TooManyRedirects) as exc_info: 14 | r = await s.get(httpbin_both + '/redirect/4') 15 | assert len(exc_info.value.response.history) == 3 16 | -------------------------------------------------------------------------------- /tests/test_resource_pool.py: -------------------------------------------------------------------------------- 1 | from curequests.resource_pool import ResourcePool 2 | from curequests.future import Future 3 | from utils import run_with_curio 4 | 5 | 6 | @run_with_curio 7 | async def test_resource_pool_idle(): 8 | pool = ResourcePool(Future, max_items_total=1) 9 | # get resource A 10 | ga1 = pool.get('A') 11 | assert ga1.need_open 12 | A = ga1.need_open 13 | assert pool.num_total == 1 14 | assert pool.num_idle == 0 15 | 16 | # put pack A 17 | pa1 = pool.put(A) 18 | assert not pa1.need_close 19 | assert not pa1.need_notify 20 | assert pool.num_total == 1 21 | assert pool.num_idle == 1 22 | 23 | # get A again 24 | ga2 = pool.get('A') 25 | assert ga2.idle == A 26 | assert pool.num_total == 1 27 | assert pool.num_idle == 0 28 | 29 | 30 | @run_with_curio 31 | async def test_resource_pool_wait_and_notify_same_key(): 32 | pool = ResourcePool(Future, max_items_total=1) 33 | # open a resource 34 | ga1 = pool.get('A') 35 | assert ga1.need_open 36 | A = ga1.need_open 37 | assert pool.num_total == 1 38 | assert pool.num_idle == 0 39 | 40 | # get A again, need wait 41 | ga2 = pool.get('A') 42 | assert not ga2.idle 43 | assert ga2.need_wait 44 | assert pool.num_total == 1 45 | assert pool.num_idle == 0 46 | 47 | # put pack A 48 | pa1 = pool.put(A) 49 | assert not pa1.need_close 50 | assert pa1.need_notify 51 | fut, result = pa1.need_notify 52 | await fut.set_result(result) 53 | 54 | ga2 = await ga2.need_wait 55 | assert ga2.idle == A 56 | assert pool.num_total == 1 57 | assert pool.num_idle == 0 58 | 59 | 60 | @run_with_curio 61 | async def test_resource_pool_wait_and_notify_diff_key(): 62 | pool = ResourcePool(Future, max_items_per_key=2, max_items_total=2) 63 | # open two resource 64 | ga1 = pool.get('A') 65 | assert ga1.need_open 66 | A = ga1.need_open 67 | gb1 = pool.get('B') 68 | assert gb1.need_open 69 | B = gb1.need_open 70 | 71 | # pool is full 72 | assert pool.num_busy == 2 73 | assert pool.num_total == 2 74 | assert pool.size('A') == 1 75 | assert pool.size('B') == 1 76 | 77 | # get A again, need wait 78 | ga2 = pool.get('A') 79 | assert not ga2.need_open 80 | assert not ga2.idle 81 | assert ga2.need_wait 82 | 83 | # put back B, should close B 84 | pb1 = pool.put(B) 85 | assert pb1.need_close == B 86 | assert pb1.need_notify 87 | fut, result = pb1.need_notify 88 | await fut.set_result(result) 89 | 90 | # open a new A 91 | ga2 = await ga2.need_wait 92 | assert not ga2.idle 93 | assert not ga2.need_close 94 | assert ga2.need_open 95 | assert ga2.need_open.key == 'A' 96 | assert ga2.need_open != A 97 | 98 | assert pool.num_busy == 2 99 | assert pool.num_total == 2 100 | assert pool.size('A') == 2 101 | assert pool.size('B') == 0 102 | 103 | 104 | @run_with_curio 105 | def test_put_when_pool_closed(): 106 | pool = ResourcePool(Future) 107 | ga = pool.get('A') 108 | pool.close() 109 | ret = pool.put(ga.need_open) 110 | assert ret.need_close 111 | 112 | 113 | @run_with_curio 114 | async def test_close(): 115 | pool = ResourcePool(Future, max_items_per_key=2, max_items_total=3) 116 | # make an idle resource 117 | ga = pool.get('A') 118 | pool.put(ga.need_open) 119 | # open two new resource 120 | pool.get('B') 121 | pool.get('C') 122 | 123 | assert pool.num_idle == 1 124 | assert pool.num_total == 3 125 | 126 | need_close, need_wait = pool.close(force=True) 127 | assert len(need_close) == 3 128 | assert len(need_wait) == 0 129 | -------------------------------------------------------------------------------- /tests/test_response.py: -------------------------------------------------------------------------------- 1 | from curio.meta import finalize 2 | from curequests import get 3 | from utils import run_with_curio 4 | 5 | 6 | @run_with_curio 7 | async def test_response_iter_stream(httpbin): 8 | r = await get(httpbin + f'/bytes/{80*1024}', stream=True) 9 | body = [] 10 | async with finalize(r.__aiter__()) as gen: 11 | async for chunk in gen: 12 | body.append(chunk) 13 | assert r.connection.closed 14 | assert len(b''.join(body)) == 80 * 1024 15 | 16 | 17 | @run_with_curio 18 | async def test_response_iter_not_stream(httpbin): 19 | r = await get(httpbin + f'/bytes/{80*1024}') 20 | body = [] 21 | async with finalize(r.__aiter__()) as gen: 22 | async for chunk in gen: 23 | body.append(chunk) 24 | assert r.connection.closed 25 | assert len(b''.join(body)) == 80 * 1024 26 | 27 | 28 | @run_with_curio 29 | async def test_response_iter_content(httpbin): 30 | r = await get(httpbin + f'/bytes/{80*1024}', stream=True) 31 | body = [] 32 | async with finalize(r.iter_content(1024)) as gen: 33 | async for chunk in gen: 34 | body.append(chunk) 35 | assert r.connection.closed 36 | assert len(b''.join(body)) == 80 * 1024 37 | 38 | 39 | @run_with_curio 40 | async def test_response_iter_lines(httpbin): 41 | r = await get(httpbin + f'/get', stream=True) 42 | body = [] 43 | async with finalize(r.iter_lines()) as gen: 44 | async for chunk in gen: 45 | body.append(chunk) 46 | assert r.connection.closed 47 | 48 | 49 | @run_with_curio 50 | async def test_response_content(httpbin): 51 | r = await get(httpbin + f'/bytes/{80*1024}') 52 | assert r.connection.closed 53 | assert len(r.content) == 80 * 1024 54 | 55 | 56 | @run_with_curio 57 | async def test_decode_unicode(httpbin): 58 | r = await get(httpbin + f'/encoding/utf8', stream=True) 59 | body = [] 60 | async with finalize(r.iter_content(decode_unicode=True)) as gen: 61 | async for chunk in gen: 62 | body.append(chunk) 63 | assert r.connection.closed 64 | body = ''.join(body).encode('utf-8') 65 | assert len(body) == int(r.headers['content-length']) 66 | 67 | 68 | @run_with_curio 69 | async def test_response_close(httpbin): 70 | r = await get(httpbin + f'/bytes/{80*1024}', stream=True) 71 | assert not r.connection.closed 72 | async with r: 73 | pass 74 | assert r.connection.closed 75 | -------------------------------------------------------------------------------- /tests/test_upload.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from curequests import post 3 | from curio.file import aopen 4 | from requests.exceptions import UnrewindableBodyError 5 | from utils import run_with_curio 6 | 7 | TEST_DATA = 'test data\n' 8 | 9 | 10 | @run_with_curio 11 | async def test_upload_file(httpbin_both): 12 | files = {'file': open('tests/upload.txt', 'rb')} 13 | r = await post(httpbin_both + '/post', files=files) 14 | assert r.ok 15 | assert r.json()['files']['file'] == TEST_DATA 16 | 17 | 18 | @pytest.mark.skip('TODO: curio.aopen has some issues') 19 | @run_with_curio 20 | async def test_upload_asyncfile(httpbin_both): 21 | files = {'file': aopen('tests/upload.txt', 'rb')} 22 | r = await post(httpbin_both + '/post', files=files) 23 | assert r.ok 24 | assert r.json()['files']['file'] == TEST_DATA 25 | 26 | 27 | @run_with_curio 28 | async def test_upload_headers(httpbin_both): 29 | f = ('upload.txt', open('tests/upload.txt', 'rb'), 'text/plain') 30 | files = {'file': f} 31 | r = await post(httpbin_both + '/post', files=files) 32 | assert r.ok 33 | assert r.json()['files']['file'] == TEST_DATA 34 | 35 | 36 | @run_with_curio 37 | async def test_upload_string(httpbin_both): 38 | f = ('upload.txt', TEST_DATA) 39 | files = {'file': f} 40 | r = await post(httpbin_both + '/post', files=files) 41 | assert r.ok 42 | assert r.json()['files']['file'] == TEST_DATA 43 | 44 | 45 | @pytest.mark.skip('the server not support chunked request') 46 | @run_with_curio 47 | async def test_chunked_request(httpbin_both): 48 | def gen(): 49 | yield b'hi' 50 | yield b'there' 51 | r = await post(httpbin_both + '/post', data=gen()) 52 | assert r.ok 53 | 54 | 55 | @run_with_curio 56 | async def test_stream_upload(httpbin_both): 57 | with open('tests/upload.txt', 'rb') as f: 58 | r = await post(httpbin_both + '/post', data=f) 59 | assert r.ok 60 | assert r.json()['data'] == TEST_DATA 61 | 62 | 63 | @run_with_curio 64 | async def test_redirect_upload_file(): 65 | # FIXME: Maybe pytest-httpbin's bug, will cause Broken Pipe when 66 | # send the request to local httpbin. httpbin.org and gunicorn is OK. 67 | httpbin_both = 'http://httpbin.org' 68 | url = httpbin_both + '/redirect-to' 69 | files = {'file': open('tests/upload.txt', 'rb')} 70 | r = await post(url, files=files, params={'url': '/post', 'status_code': 307}) 71 | assert r.ok 72 | assert r.history[0].status_code == 307 73 | assert r.json()['files']['file'] == TEST_DATA 74 | 75 | 76 | class UnrewindableFile: 77 | 78 | def __iter__(self): 79 | yield b'hi' 80 | yield b'there' 81 | 82 | def __len__(self): 83 | return 7 84 | 85 | 86 | @run_with_curio 87 | async def test_redirect_upload_unrewindable(): 88 | url = 'http://httpbin.org' + '/redirect-to' 89 | with pytest.raises(UnrewindableBodyError): 90 | await post(url, data=UnrewindableFile(), params={'url': '/post', 'status_code': 307}) 91 | -------------------------------------------------------------------------------- /tests/test_verify_cert.py: -------------------------------------------------------------------------------- 1 | from utils import run_with_curio 2 | 3 | 4 | @run_with_curio 5 | async def test_verify(httpbin_secure): 6 | pass 7 | 8 | 9 | @run_with_curio 10 | async def test_cert(httpbin_secure): 11 | pass 12 | -------------------------------------------------------------------------------- /tests/upload.txt: -------------------------------------------------------------------------------- 1 | test data 2 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import curio 3 | 4 | 5 | def run_with_curio(f): 6 | @functools.wraps(f) 7 | def wrapper(*args, **kwargs): 8 | try: 9 | curio.run(f(*args, **kwargs)) 10 | except curio.TaskError as ex: 11 | raise ex.__cause__ from None 12 | return wrapper 13 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox (http://tox.testrun.org/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox" from this directory. 5 | 6 | [tox] 7 | envlist = py36,py37 8 | skip_missing_interpreters = true 9 | 10 | [testenv] 11 | commands = inv test 12 | deps = -rrequirements.txt 13 | --------------------------------------------------------------------------------