├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── aiohttp_scraper ├── __init__.py ├── exceptions.py ├── proxies.py ├── scripts │ ├── .gitignore │ └── __init__.py ├── session.py └── user_agents.py ├── poetry.lock └── pyproject.toml /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | .idea 107 | 108 | chromedriver 109 | .DS_Store -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/ambv/black 3 | rev: stable 4 | hooks: 5 | - id: black 6 | language_version: python3.7 7 | - repo: https://github.com/pre-commit/pre-commit-hooks 8 | rev: v1.2.3 9 | hooks: 10 | - id: flake8 11 | args: ["--ignore=E203,W503,E501"] 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Johannes Gontrum 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AIOHTTP Scraper 2 | ### A robust asynchronous web scraping client 3 | 4 | Because scraping is messy. 5 | 6 | --- 7 | 8 | ## Features 9 | 10 | ### ScraperSession 11 | - Drop-in replacement for AIOHTTP's `ClientSession` 12 | - Catches Exceptions caused by a request 13 | - Retries a given number of times 14 | - Exponential backoff to wait between repeated requests 15 | - Validates the MIME-type 16 | - Helper functions to receive HTML and JSON 17 | - Random UserAgent 18 | 19 | ### Proxies 20 | - Manage a pool of rotating proxies 21 | - LeastConn-style proxy selection 22 | - Uses a moving time window to throttle requests per domain and proxy 23 | - Handles 429 to deactivate a proxy for a few minutes 24 | - Redis backend for persistence 25 | 26 | --- 27 | 28 | ## Installation 29 | Requires Python 3.7. 30 | ```bash 31 | pip install aiohttp-scraper 32 | ``` 33 | 34 | ## Usage 35 | ```python 36 | from aiohttp_scraper import ScraperSession 37 | 38 | async with ScraperSession() as session: 39 | resp = await session.get_json(some_url) 40 | ``` 41 | 42 | ```python 43 | from aiohttp_scraper import ScraperSession 44 | from aiohttp_scraper import Proxies 45 | 46 | proxies = Proxies( 47 | proxies=[ 48 | "123.456.789.12:1234", 49 | "123.456.789.12:1237", 50 | # ... 51 | ], 52 | redis_uri="redis://localhost:6379", 53 | window_size_in_minutes=5, 54 | max_requests_per_window=300, 55 | redis_kwargs={}, # E.g for setting authentication 56 | ) 57 | 58 | async with ScraperSession(proxies=proxies) as session: 59 | resp = await session.get_json(some_url) 60 | ``` 61 | -------------------------------------------------------------------------------- /aiohttp_scraper/__init__.py: -------------------------------------------------------------------------------- 1 | from aiohttp_scraper.proxies import Proxies # noqa: F401 2 | from aiohttp_scraper.session import ScraperSession # noqa: F401 3 | -------------------------------------------------------------------------------- /aiohttp_scraper/exceptions.py: -------------------------------------------------------------------------------- 1 | class Unsuccessful(Exception): 2 | pass 3 | 4 | 5 | class AllRetriesFailed(Exception): 6 | pass 7 | -------------------------------------------------------------------------------- /aiohttp_scraper/proxies.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | import json 4 | from random import shuffle 5 | from typing import List, Optional, Tuple 6 | 7 | import aioredis 8 | from aioredis import Redis 9 | from tldextract import tldextract 10 | 11 | 12 | class Proxy: 13 | def __init__( 14 | self, 15 | url: str, 16 | window_size_in_minutes: int = 5, 17 | max_requests_per_window: int = 100, 18 | ): 19 | if url.startswith("http"): 20 | self._url = url 21 | else: 22 | self._url = "http://" + url 23 | 24 | self.window_size_in_minutes = window_size_in_minutes 25 | self.max_requests_per_window = max_requests_per_window 26 | 27 | self.requests: List[datetime.datetime] = [] 28 | self.status_codes: List[Tuple[int, datetime.datetime]] = [] 29 | self.wait_until: Optional[datetime.datetime] = None 30 | 31 | @property 32 | def _clean_url(self) -> str: 33 | return self._url.replace("http://", "").replace(":", "_") 34 | 35 | @staticmethod 36 | def _decode_status(status: str) -> Tuple[int, datetime.datetime]: 37 | obj = json.loads(status) 38 | return int(obj["status"]), datetime.datetime.fromisoformat(obj["created_at"]) 39 | 40 | @staticmethod 41 | def _encode_status(status: Tuple[int, datetime.datetime]) -> str: 42 | return json.dumps({"status": status[0], "created_at": status[1].isoformat()}) 43 | 44 | async def get_number_of_free_slots(self, domain: str, redis: Redis) -> int: 45 | recent_requests, status_code = await asyncio.gather( 46 | redis.keys(f"scrape_proxy:{domain}:{self._clean_url}:requests:*"), 47 | redis.lindex( 48 | key=f"scrape_proxy:{domain}:{self._clean_url}:status_codes", index=0 49 | ), 50 | ) 51 | 52 | num_free_slots = self.max_requests_per_window - len(recent_requests) 53 | 54 | if status_code: 55 | last_status, last_timestamp = self._decode_status(status_code) 56 | window_end = datetime.datetime.utcnow() - datetime.timedelta( 57 | minutes=self.window_size_in_minutes 58 | ) 59 | 60 | if last_status == 429 and last_timestamp > window_end: 61 | num_free_slots = -1 62 | 63 | return num_free_slots 64 | 65 | async def get_url(self, domain: str, redis: Redis) -> str: 66 | now = datetime.datetime.utcnow().isoformat().replace(":", "-") 67 | await redis.set( 68 | key=f"scrape_proxy:{domain}:{self._clean_url}:requests:{now}", 69 | value=datetime.datetime.utcnow().isoformat(), 70 | ) 71 | await redis.pexpire( 72 | key=f"scrape_proxy:{domain}:{self._clean_url}:requests:{now}", 73 | timeout=self.window_size_in_minutes * 60 * 1000, 74 | ) 75 | 76 | return self._url 77 | 78 | async def register_status_code(self, status_code: int, domain: str, redis: Redis): 79 | await redis.lpush( 80 | key=f"scrape_proxy:{domain}:{self._clean_url}:status_codes", 81 | value=self._encode_status((status_code, datetime.datetime.utcnow())), 82 | ) 83 | 84 | 85 | class Proxies: 86 | def __init__( 87 | self, 88 | proxies: List[str], 89 | redis_uri: str, 90 | window_size_in_minutes: int = 5, 91 | max_requests_per_window: int = 100, 92 | redis_kwargs: Optional[dict] = None, 93 | ): 94 | self._proxy_urls = proxies 95 | self._redis_client = None 96 | 97 | self._proxies = [ 98 | Proxy(url, window_size_in_minutes, max_requests_per_window) 99 | for url in self._proxy_urls 100 | ] 101 | 102 | self.redis_uri = redis_uri 103 | self.redis_kwargs = redis_kwargs 104 | 105 | self.window_size_in_minutes = window_size_in_minutes 106 | self.max_requests_per_window = max_requests_per_window 107 | 108 | async def setup(self): 109 | self._redis_client = await aioredis.create_redis_pool( 110 | address=self.redis_uri, **(self.redis_kwargs or {}) 111 | ) 112 | await self.cleanup() 113 | 114 | async def select_proxy(self, url: str) -> str: 115 | if not self._redis_client: 116 | await self.setup() 117 | await self.cleanup() 118 | 119 | domain = tldextract.extract(url).domain 120 | 121 | while True: 122 | free_slots = await asyncio.gather( 123 | *[ 124 | proxy.get_number_of_free_slots(domain, self._redis_client) 125 | for proxy in self._proxies 126 | ] 127 | ) 128 | 129 | slots_and_proxies = list(zip(free_slots, self._proxies)) 130 | shuffle(slots_and_proxies) 131 | 132 | proxies = sorted(slots_and_proxies, key=lambda d: d[0], reverse=True) 133 | 134 | if proxies[0][0] > 0: 135 | return await proxies[0][1].get_url(domain, self._redis_client) 136 | else: 137 | # No proxy available right now. Wait. 138 | await asyncio.sleep(5) 139 | 140 | async def register_status_code(self, url: str, status_code: int, proxy_url: str): 141 | domain = tldextract.extract(url).domain 142 | 143 | for proxy in self._proxies: 144 | if proxy._url == proxy_url: 145 | await proxy.register_status_code( 146 | status_code, domain, self._redis_client 147 | ) 148 | break 149 | 150 | async def cleanup(self): 151 | keys = await self._redis_client.keys(f"scrape_proxy:*requests*") 152 | timestamps = [ 153 | datetime.datetime.strptime( 154 | k.decode().split(":")[-1], "%Y-%m-%dT%H-%M-%S.%f" 155 | ) 156 | for k in keys 157 | ] 158 | 159 | window_end = datetime.datetime.utcnow() - datetime.timedelta( 160 | minutes=self.window_size_in_minutes 161 | ) 162 | 163 | invalid_timestamps = [ 164 | key for key, ts in zip(keys, timestamps) if ts < window_end 165 | ] 166 | 167 | if invalid_timestamps: 168 | await self._redis_client.delete(*invalid_timestamps) 169 | -------------------------------------------------------------------------------- /aiohttp_scraper/scripts/.gitignore: -------------------------------------------------------------------------------- 1 | *.py -------------------------------------------------------------------------------- /aiohttp_scraper/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jgontrum/aiohttp-scraper/d89f5d99ef045cda91972fa516caa00d589dfd7f/aiohttp_scraper/scripts/__init__.py -------------------------------------------------------------------------------- /aiohttp_scraper/session.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import math 3 | import random 4 | from typing import List, Optional 5 | 6 | from aiohttp import ClientSession, ClientResponse 7 | from aiohttp.hdrs import METH_GET 8 | 9 | from aiohttp_scraper.exceptions import Unsuccessful, AllRetriesFailed 10 | from aiohttp_scraper.proxies import Proxies 11 | from aiohttp_scraper.user_agents import USER_AGENTS 12 | 13 | 14 | class ScraperSession(ClientSession): 15 | def __init__( 16 | self, 17 | *args, 18 | proxies: Optional[Proxies] = None, 19 | user_agents: Optional[List[str]] = None, 20 | use_random_user_agent: bool = True, 21 | **kwargs, 22 | ): 23 | super().__init__(*args, **kwargs) 24 | self.proxies = proxies 25 | self.use_random_user_agent = use_random_user_agent or bool(user_agents) 26 | self.user_agents = user_agents or USER_AGENTS 27 | 28 | async def get_json(self, url: str, **kwargs) -> dict: 29 | return await ( 30 | await self._request( 31 | METH_GET, 32 | url, 33 | expect_json=True, 34 | expected_mime_type="application/json", 35 | **kwargs, 36 | ) 37 | ).json() 38 | 39 | async def get_html(self, url: str, **kwargs) -> str: 40 | return await ( 41 | await self._request(METH_GET, url, expected_mime_type="text/html", **kwargs) 42 | ).text() 43 | 44 | async def _request( 45 | self, 46 | *args, 47 | retries: int = 5, 48 | exponential_backoff: bool = True, 49 | max_backoff_delay: float = 300.0, 50 | start_backoff_delay: float = 15.0, 51 | expect_json: bool = False, 52 | expected_mime_type: Optional[str] = None, 53 | **kwargs, 54 | ) -> ClientResponse: 55 | num_tries = 0 56 | stats = [] 57 | 58 | while retries > 0: 59 | if self.use_random_user_agent: 60 | kwargs.get("headers", dict())["user-agent"] = random.choice( 61 | self.user_agents 62 | ) 63 | 64 | if self.proxies: 65 | kwargs["proxy"] = await self.proxies.select_proxy(url=args[1]) 66 | 67 | try: 68 | response = await super()._request(*args, **kwargs) 69 | 70 | if self.proxies: 71 | await self.proxies.register_status_code( 72 | url=args[1], 73 | status_code=response.status, 74 | proxy_url=kwargs["proxy"], 75 | ) 76 | 77 | if not 200 <= response.status < 300: 78 | raise Unsuccessful(f"Status code is {response.status}") 79 | 80 | if expected_mime_type: 81 | content_type = response.headers.get("content-type", "") 82 | success = expected_mime_type.lower() in content_type.lower() 83 | 84 | if not success: 85 | raise Unsuccessful( 86 | f"MIME type does not match. (Expected '{expected_mime_type}', got {content_type})." 87 | ) 88 | 89 | if expect_json: 90 | try: 91 | await response.json() 92 | except Exception as e: 93 | raise Unsuccessful(f"Cannot parse JSON: {e}") 94 | 95 | if not expect_json: 96 | if not response or not (await response.text()): 97 | raise Unsuccessful(f"Empty response.") 98 | 99 | return response 100 | 101 | except Exception as e: 102 | stats.append(e) 103 | 104 | retries -= 1 105 | 106 | if retries > 0: 107 | if not num_tries: 108 | delay = start_backoff_delay 109 | else: 110 | delay = min(2 ** num_tries, max_backoff_delay) 111 | 112 | rnd = math.floor(delay * 0.2) 113 | delay += random.randint(-rnd, rnd) 114 | 115 | await asyncio.sleep(delay) 116 | 117 | num_tries += 1 118 | 119 | raise AllRetriesFailed(f"Tries: [{', '.join([str(e) for e in stats])}]") 120 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "main" 3 | description = "Simple DNS resolver for asyncio" 4 | name = "aiodns" 5 | optional = false 6 | python-versions = "*" 7 | version = "2.0.0" 8 | 9 | [package.dependencies] 10 | pycares = ">=3.0.0" 11 | 12 | [[package]] 13 | category = "main" 14 | description = "Async http client/server framework (asyncio)" 15 | name = "aiohttp" 16 | optional = false 17 | python-versions = ">=3.5.3" 18 | version = "3.6.1" 19 | 20 | [package.dependencies] 21 | async-timeout = ">=3.0,<4.0" 22 | attrs = ">=17.3.0" 23 | chardet = ">=2.0,<4.0" 24 | multidict = ">=4.5,<5.0" 25 | yarl = ">=1.0,<2.0" 26 | 27 | [package.extras] 28 | speedups = ["aiodns", "brotlipy", "cchardet"] 29 | 30 | [[package]] 31 | category = "main" 32 | description = "asyncio (PEP 3156) Redis support" 33 | name = "aioredis" 34 | optional = false 35 | python-versions = "*" 36 | version = "1.2.0" 37 | 38 | [package.dependencies] 39 | async-timeout = "*" 40 | hiredis = "*" 41 | 42 | [[package]] 43 | category = "dev" 44 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 45 | name = "appdirs" 46 | optional = false 47 | python-versions = "*" 48 | version = "1.4.3" 49 | 50 | [[package]] 51 | category = "dev" 52 | description = "A few extensions to pyyaml." 53 | name = "aspy.yaml" 54 | optional = false 55 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 56 | version = "1.3.0" 57 | 58 | [package.dependencies] 59 | pyyaml = "*" 60 | 61 | [[package]] 62 | category = "main" 63 | description = "Timeout context manager for asyncio programs" 64 | name = "async-timeout" 65 | optional = false 66 | python-versions = ">=3.5.3" 67 | version = "3.0.1" 68 | 69 | [[package]] 70 | category = "main" 71 | description = "Classes Without Boilerplate" 72 | name = "attrs" 73 | optional = false 74 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 75 | version = "19.1.0" 76 | 77 | [package.extras] 78 | dev = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope.interface", "sphinx", "pre-commit"] 79 | docs = ["sphinx", "zope.interface"] 80 | tests = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope.interface"] 81 | 82 | [[package]] 83 | category = "dev" 84 | description = "The uncompromising code formatter." 85 | name = "black" 86 | optional = false 87 | python-versions = ">=3.6" 88 | version = "18.9b0" 89 | 90 | [package.dependencies] 91 | appdirs = "*" 92 | attrs = ">=17.4.0" 93 | click = ">=6.5" 94 | toml = ">=0.9.4" 95 | 96 | [package.extras] 97 | d = ["aiohttp (>=3.3.2)"] 98 | 99 | [[package]] 100 | category = "main" 101 | description = "Python package for providing Mozilla's CA Bundle." 102 | name = "certifi" 103 | optional = false 104 | python-versions = "*" 105 | version = "2019.9.11" 106 | 107 | [[package]] 108 | category = "main" 109 | description = "Foreign Function Interface for Python calling C code." 110 | name = "cffi" 111 | optional = false 112 | python-versions = "*" 113 | version = "1.12.3" 114 | 115 | [package.dependencies] 116 | pycparser = "*" 117 | 118 | [[package]] 119 | category = "dev" 120 | description = "Validate configuration and produce human readable error messages." 121 | name = "cfgv" 122 | optional = false 123 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 124 | version = "2.0.1" 125 | 126 | [package.dependencies] 127 | six = "*" 128 | 129 | [[package]] 130 | category = "main" 131 | description = "Universal encoding detector for Python 2 and 3" 132 | name = "chardet" 133 | optional = false 134 | python-versions = "*" 135 | version = "3.0.4" 136 | 137 | [[package]] 138 | category = "dev" 139 | description = "Composable command line interface toolkit" 140 | name = "click" 141 | optional = false 142 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 143 | version = "7.0" 144 | 145 | [[package]] 146 | category = "dev" 147 | description = "Discover and load entry points from installed packages." 148 | name = "entrypoints" 149 | optional = false 150 | python-versions = ">=2.7" 151 | version = "0.3" 152 | 153 | [[package]] 154 | category = "dev" 155 | description = "the modular source code checker: pep8, pyflakes and co" 156 | name = "flake8" 157 | optional = false 158 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 159 | version = "3.7.8" 160 | 161 | [package.dependencies] 162 | entrypoints = ">=0.3.0,<0.4.0" 163 | mccabe = ">=0.6.0,<0.7.0" 164 | pycodestyle = ">=2.5.0,<2.6.0" 165 | pyflakes = ">=2.1.0,<2.2.0" 166 | 167 | [[package]] 168 | category = "main" 169 | description = "Python wrapper for hiredis" 170 | name = "hiredis" 171 | optional = false 172 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 173 | version = "1.0.0" 174 | 175 | [[package]] 176 | category = "dev" 177 | description = "File identification library for Python" 178 | name = "identify" 179 | optional = false 180 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 181 | version = "1.4.7" 182 | 183 | [package.extras] 184 | license = ["editdistance"] 185 | 186 | [[package]] 187 | category = "main" 188 | description = "Internationalized Domain Names in Applications (IDNA)" 189 | name = "idna" 190 | optional = false 191 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 192 | version = "2.8" 193 | 194 | [[package]] 195 | category = "dev" 196 | description = "Read metadata from Python packages" 197 | name = "importlib-metadata" 198 | optional = false 199 | python-versions = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3" 200 | version = "0.23" 201 | 202 | [package.dependencies] 203 | zipp = ">=0.5" 204 | 205 | [package.extras] 206 | docs = ["sphinx", "rst.linker"] 207 | testing = ["packaging", "importlib-resources"] 208 | 209 | [[package]] 210 | category = "dev" 211 | description = "McCabe checker, plugin for flake8" 212 | name = "mccabe" 213 | optional = false 214 | python-versions = "*" 215 | version = "0.6.1" 216 | 217 | [[package]] 218 | category = "dev" 219 | description = "More routines for operating on iterables, beyond itertools" 220 | name = "more-itertools" 221 | optional = false 222 | python-versions = ">=3.4" 223 | version = "7.2.0" 224 | 225 | [[package]] 226 | category = "main" 227 | description = "multidict implementation" 228 | name = "multidict" 229 | optional = false 230 | python-versions = ">=3.4.1" 231 | version = "4.5.2" 232 | 233 | [[package]] 234 | category = "dev" 235 | description = "Node.js virtual environment builder" 236 | name = "nodeenv" 237 | optional = false 238 | python-versions = "*" 239 | version = "1.3.3" 240 | 241 | [[package]] 242 | category = "dev" 243 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 244 | name = "pre-commit" 245 | optional = false 246 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 247 | version = "1.18.3" 248 | 249 | [package.dependencies] 250 | "aspy.yaml" = "*" 251 | cfgv = ">=2.0.0" 252 | identify = ">=1.0.0" 253 | importlib-metadata = "*" 254 | nodeenv = ">=0.11.1" 255 | pyyaml = "*" 256 | six = "*" 257 | toml = "*" 258 | virtualenv = ">=15.2" 259 | 260 | [[package]] 261 | category = "main" 262 | description = "Python interface for c-ares" 263 | name = "pycares" 264 | optional = false 265 | python-versions = "*" 266 | version = "3.0.0" 267 | 268 | [package.dependencies] 269 | cffi = ">=1.5.0" 270 | 271 | [package.extras] 272 | idna = ["idna (>=2.1)"] 273 | 274 | [[package]] 275 | category = "dev" 276 | description = "Python style guide checker" 277 | name = "pycodestyle" 278 | optional = false 279 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 280 | version = "2.5.0" 281 | 282 | [[package]] 283 | category = "main" 284 | description = "C parser in Python" 285 | name = "pycparser" 286 | optional = false 287 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 288 | version = "2.19" 289 | 290 | [[package]] 291 | category = "dev" 292 | description = "passive checker of Python programs" 293 | name = "pyflakes" 294 | optional = false 295 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 296 | version = "2.1.1" 297 | 298 | [[package]] 299 | category = "dev" 300 | description = "YAML parser and emitter for Python" 301 | name = "pyyaml" 302 | optional = false 303 | python-versions = "*" 304 | version = "5.1.2" 305 | 306 | [[package]] 307 | category = "main" 308 | description = "Python HTTP for Humans." 309 | name = "requests" 310 | optional = false 311 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 312 | version = "2.22.0" 313 | 314 | [package.dependencies] 315 | certifi = ">=2017.4.17" 316 | chardet = ">=3.0.2,<3.1.0" 317 | idna = ">=2.5,<2.9" 318 | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" 319 | 320 | [package.extras] 321 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"] 322 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] 323 | 324 | [[package]] 325 | category = "main" 326 | description = "File transport adapter for Requests" 327 | name = "requests-file" 328 | optional = false 329 | python-versions = "*" 330 | version = "1.4.3" 331 | 332 | [package.dependencies] 333 | requests = ">=1.0.0" 334 | six = "*" 335 | 336 | [[package]] 337 | category = "main" 338 | description = "Python 2 and 3 compatibility utilities" 339 | name = "six" 340 | optional = false 341 | python-versions = ">=2.6, !=3.0.*, !=3.1.*" 342 | version = "1.12.0" 343 | 344 | [[package]] 345 | category = "main" 346 | description = "Accurately separate the TLD from the registered domain and subdomains of a URL, using the Public Suffix List. By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." 347 | name = "tldextract" 348 | optional = false 349 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 350 | version = "2.2.1" 351 | 352 | [package.dependencies] 353 | idna = "*" 354 | requests = ">=2.1.0" 355 | requests-file = ">=1.4" 356 | setuptools = "*" 357 | 358 | [[package]] 359 | category = "dev" 360 | description = "Python Library for Tom's Obvious, Minimal Language" 361 | name = "toml" 362 | optional = false 363 | python-versions = "*" 364 | version = "0.10.0" 365 | 366 | [[package]] 367 | category = "main" 368 | description = "HTTP library with thread-safe connection pooling, file post, and more." 369 | name = "urllib3" 370 | optional = false 371 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" 372 | version = "1.25.5" 373 | 374 | [package.extras] 375 | brotli = ["brotlipy (>=0.6.0)"] 376 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 377 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] 378 | 379 | [[package]] 380 | category = "dev" 381 | description = "Virtual Python Environment builder" 382 | name = "virtualenv" 383 | optional = false 384 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 385 | version = "16.7.5" 386 | 387 | [package.extras] 388 | docs = ["sphinx (>=1.8.0,<2)", "towncrier (>=18.5.0)", "sphinx-rtd-theme (>=0.4.2,<1)"] 389 | testing = ["pytest (>=4.0.0,<5)", "coverage (>=4.5.0,<5)", "pytest-timeout (>=1.3.0,<2)", "six (>=1.10.0,<2)", "pytest-xdist", "pytest-localserver", "pypiserver", "mock", "xonsh"] 390 | 391 | [[package]] 392 | category = "main" 393 | description = "Yet another URL library" 394 | name = "yarl" 395 | optional = false 396 | python-versions = ">=3.5.3" 397 | version = "1.3.0" 398 | 399 | [package.dependencies] 400 | idna = ">=2.0" 401 | multidict = ">=4.0" 402 | 403 | [[package]] 404 | category = "dev" 405 | description = "Backport of pathlib-compatible object wrapper for zip files" 406 | name = "zipp" 407 | optional = false 408 | python-versions = ">=2.7" 409 | version = "0.6.0" 410 | 411 | [package.dependencies] 412 | more-itertools = "*" 413 | 414 | [package.extras] 415 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 416 | testing = ["pathlib2", "contextlib2", "unittest2"] 417 | 418 | [metadata] 419 | content-hash = "ae647dfcb32741c0c277104b41820c799784904bc8b0edd72c2e245f040c9644" 420 | python-versions = "^3.7" 421 | 422 | [metadata.hashes] 423 | aiodns = ["815fdef4607474295d68da46978a54481dd1e7be153c7d60f9e72773cd38d77d", "aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de"] 424 | aiohttp = ["022c400e30848b1994236e31fb38db1dc4b551efe049f737cbac690ab2cdf5c4", "10f9316ef068536dec0b9f09531fa1cb6bfa8394f278022cb96e789c77811ad2", "2599b93fd5ba1120b3bd1366d67a7e26bd45b3d5d5548069e00b2fbef7f20ab0", "2a1c71e7fb8c50e60fb4c9bab8bd5cf7c07f91a6b27dc2556d7354cd2ebb3689", "6a19d34cc01414d94dd5a4466f8f397293fcb8929df8eeb8989119cc5ef928bb", "7aab39c2a61a5c6b15bb7e561218ef64770ca1fbf4cc1878c96e630e2b7cc3cc", "8959e28bc1b87542b0ee4a8302128f633bee296252f261bf03e118c4dff725f0", "89820f7c488f4e9b1f74371da33403181e11e006663ddf074317aacd690838a6", "ab761cf0f0b0b90887e276b4a7918f11e323f2228bbb30814bbd538c122028bf", "cc648ecaca79e37c6e26f370e802e7ae640a069913f661f66c0421084bef219a", "d6f26e80cd55ac88e1f0397fc8d547933225a5dc1add040e27788c2a028c64c6", "e7d6ae4a36bfe6d7f93c6f42a0bfa1659f7d011006cb6e8207c85ef5acdb2986", "fc55b1fec0e4cc1134ffb09ea3970783ee2906dc5dfd7cd16917913f2cfed65b"] 425 | aioredis = ["84d62be729beb87118cf126c20b0e3f52d7a42bb7373dc5bcdd874f26f1f251a", "aee16aa5cb3f636cf8fa0e2b62d2f6abc90366e19b5c30e94a5471d834a55975"] 426 | appdirs = ["9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", "d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"] 427 | "aspy.yaml" = ["463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc", "e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45"] 428 | async-timeout = ["0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", "4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"] 429 | attrs = ["69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", "f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"] 430 | black = ["817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739", "e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5"] 431 | certifi = ["e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", "fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"] 432 | cffi = ["041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", "046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d", "066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90", "066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b", "2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63", "300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45", "34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25", "46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3", "4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b", "4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647", "4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016", "50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4", "55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb", "5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753", "59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7", "73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9", "a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f", "a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8", "a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f", "a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc", "ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42", "b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3", "d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909", "d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45", "dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d", "e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512", "e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff", "ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201"] 433 | cfgv = ["edb387943b665bf9c434f717bf630fa78aecd53d5900d2e05da6ad6048553144", "fbd93c9ab0a523bf7daec408f3be2ed99a980e20b2d19b50fc184ca6b820d289"] 434 | chardet = ["84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"] 435 | click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] 436 | entrypoints = ["589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", "c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"] 437 | flake8 = ["19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", "8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"] 438 | hiredis = ["0124911115f2cb7deb4f8e221e109a53d3d718174b238a2c5e2162175a3929a5", "0656658d0448c2c82c4890ae933c2c2e51196101d3d06fc19cc92e062410c2fd", "09d284619f7142ddd7a4ffa94c12a0445e834737f4ce8739a737f2b1ca0f6142", "12299b7026e5dc22ed0ff603375c1bf583cf59adbb0e4d062df434e9140d72dd", "12fc6210f8dc3e9c8ce4b95e8f5db404b838dbdeb25bca41e33497de6d89334f", "197febe5e63c77f4ad19b36e15ed33152064dc606c8b7413c7a0ca3fd04672cc", "20e48289fbffb59a5ac7cc677fc02c2726c1da22488e5f7636b9feb9afde199f", "26bed296b92b88db02afe214aa1fefad7f9e8ba88a5a7c0e355b55c4b168d212", "321b19d2a21fd576111032fe7694d317de2c11b265ef775f2e3f22734a6b94c8", "32d5f2c461250f5fc7ccef647682651b1d9f69443f16c213d7fa5e183222b233", "36bfcc86715d109a5ef6edefd52b893de97d555cb5cb0e9cab83eb9665942ccc", "438ddfd1484e98110959dc4648c0ba22c3307c9c0ae7e2a856755067f9ce9cef", "66f17c1633b2fb967bf4165f7b3d369a1bdfe3537d3646cf9a7c208506c96c49", "94ab0fa3ac93ab36a5400c474439881d182b43fd38a2766d984470c57931ae88", "955f12da861f2608c181049f623bbb52851769e10639c4919cc586395b89813f", "b1fd831f96ce0f715e9356574f5184b840b59eb8901fc5f9124fedbe84ad2a59", "b3813c641494fca2eda66c32a2117816472a5a39b12f59f7887c6d17bdb8c77e", "bbc3ee8663024c82a1226a0d56ad882f42a2fd8c2999bf52d27bdd25f1320f4b", "bd12c2774b574f5b209196e25b03b5d62c7919bf69046bc7b955ebe84e0ec1fe", "c54d2b3d7a2206df35f3c1140ac20ca6faf7819ff92ea5be8bf4d1cbdb433216", "c7b0bcaf2353a2ad387dd8b5e1b5f55991adc3a7713ac3345a4ef0de58276690", "c9319a1503efb3b5a4ec13b2f8fae2c23610a645e999cb8954d330f0610b0f6d", "cbe5c0273224babe2ec77058643312d07aa5e8fed08901b3f7bccaa744c5728e", "cc884ea50185009d794b31314a144110efc76b71beb0a5827a8bff970ae6d248", "d1e2e751327781ad81df5a5a29d7c7b19ee0ebfbeddf037fd8df19ec1c06e18b", "d2ef58cece6cae4b354411df498350d836f10b814c8a890df0d8079aff30c518", "e97c953f08729900a5e740f1760305434d62db9f281ac351108d6c4b5bf51795", "fcdf2e10f56113e1cb4326dbca7bf7edbfdbd246cd6d7ec088688e5439129e2c"] 439 | identify = ["4f1fe9a59df4e80fcb0213086fcf502bc1765a01ea4fe8be48da3b65afd2a017", "d8919589bd2a5f99c66302fec0ef9027b12ae150b0b0213999ad3f695fc7296e"] 440 | idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] 441 | importlib-metadata = ["aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", "d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"] 442 | mccabe = ["ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"] 443 | more-itertools = ["409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", "92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"] 444 | multidict = ["024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f", "041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3", "045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef", "047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b", "068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73", "148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc", "1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3", "1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd", "31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351", "34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941", "3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d", "4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1", "4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b", "4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a", "5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3", "61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7", "6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0", "76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0", "7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014", "7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5", "7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036", "8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d", "8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a", "c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce", "c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1", "ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a", "d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9", "d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7", "db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b"] 445 | nodeenv = ["ad8259494cf1c9034539f6cced78a1da4840a4b157e23640bc4a0c0546b0cb7a"] 446 | pre-commit = ["1d3c0587bda7c4e537a46c27f2c84aa006acc18facf9970bf947df596ce91f3f", "fa78ff96e8e9ac94c748388597693f18b041a181c94a4f039ad20f45287ba44a"] 447 | pycares = ["2ca080db265ea238dc45f997f94effb62b979a617569889e265c26a839ed6305", "6f79c6afb6ce603009db2042fddc2e348ad093ece9784cbe2daa809499871a23", "70918d06eb0603016d37092a5f2c0228509eb4e6c5a3faacb4184f6ab7be7650", "755187d28d24a9ea63aa2b4c0638be31d65fbf7f0ce16d41261b9f8cb55a1b99", "7baa4b1f2146eb8423ff8303ebde3a20fb444a60db761fba0430d104fe35ddbf", "90b27d4df86395f465a171386bc341098d6d47b65944df46518814ae298f6cc6", "9e090dd6b2afa65cb51c133883b2bf2240fd0f717b130b0048714b33fb0f47ce", "a11b7d63c3718775f6e805d6464cb10943780395ab042c7e5a0a7a9f612735dd", "b253f5dcaa0ac7076b79388a3ac80dd8f3bd979108f813baade40d3a9b8bf0bd", "c7f4f65e44ba35e35ad3febc844270665bba21cfb0fb7d749434e705b556e087", "cdb342e6a254f035bd976d95807a2184038fc088d957a5104dcaab8be602c093", "cf08e164f8bfb83b9fe633feb56f2754fae6baefcea663593794fa0518f8f98c", "df9bc694cf03673878ea8ce674082c5acd134991d64d6c306d4bd61c0c1df98f"] 448 | pycodestyle = ["95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", "e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"] 449 | pycparser = ["a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"] 450 | pyflakes = ["17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", "d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"] 451 | pyyaml = ["0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", "01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", "5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", "5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", "7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", "7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", "87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", "9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", "a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", "b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", "b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", "bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", "f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"] 452 | requests = ["11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", "9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"] 453 | requests-file = ["75c175eed739270aec3c5279ffd74e6527dada275c5c0d76b5817e9c86bb7dea", "8f04aa6201bacda0567e7ac7f677f1499b0fc76b22140c54bc06edf1ba92e2fa"] 454 | six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] 455 | tldextract = ["2c1c5d9d454f79734b4f3da0d603856dd9f820753410a3e9abf0a0c9fde33e97", "b72bef6013de67c7fa181250bc2c2e089a994d259c09ca95a9771f2f97e29ed1"] 456 | toml = ["229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", "235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e", "f1db651f9657708513243e61e6cc67d101a39bad662eaa9b5546f789338e07a3"] 457 | urllib3 = ["2f3eadfea5d92bc7899e75b5968410b749a054b492d5a6379c1344a1481bc2cb", "9c6c593cb28f52075016307fc26b0a0f8e82bc7d1ff19aaaa959b91710a56c47"] 458 | virtualenv = ["680af46846662bb38c5504b78bad9ed9e4f3ba2d54f54ba42494fdf94337fe30", "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2"] 459 | yarl = ["024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9", "2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f", "3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb", "3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320", "5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842", "73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0", "7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829", "b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310", "c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4", "c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8", "e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1"] 460 | zipp = ["3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", "f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"] 461 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "aiohttp-scraper" 3 | version = "0.1.4" 4 | description = "An asyncronous HTTP client built for web scraping." 5 | authors = ["Johannes Gontrum "] 6 | license = "MIT" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.7" 10 | aiohttp = "^3.6.1" 11 | aiodns = "^2.0.0" 12 | aioredis = "^1.2.0" 13 | tldextract = "^2.2.1" 14 | 15 | [tool.poetry.dev-dependencies] 16 | black = {version = "^18.3-alpha.4",allows-prereleases = true} 17 | pre-commit = "^1.17" 18 | flake8 = "^3.7" 19 | 20 | [build-system] 21 | requires = ["poetry>=0.12"] 22 | build-backend = "poetry.masonry.api" 23 | --------------------------------------------------------------------------------