├── .gitignore ├── upgrade_tree.sh ├── .pylintrc ├── itchiodl ├── __init__.py ├── bundle_tool │ └── __main__.py ├── login.py ├── bundle.py ├── utils.py ├── downloader │ └── __main__.py ├── library.py └── game.py ├── .github └── workflows │ ├── poetry-publish.yml │ └── lint.yml ├── pyproject.toml ├── LICENSE ├── Readme.md └── poetry.lock /.gitignore: -------------------------------------------------------------------------------- 1 | venv 2 | __pycache__ 3 | 4 | dist -------------------------------------------------------------------------------- /upgrade_tree.sh: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | 3 | find $1 -maxdepth 3 -mindepth 3 -type f -print0 | while IFS= read -r -d '' f; do 4 | md5sum "$f" | cut -d ' ' -f 1 >> "$f.md5"; 5 | done -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | jobs=4 3 | extension-pkg-allow-list=lxml 4 | 5 | [MESSAGES CONTROL] 6 | disable=invalid-name,unspecified-encoding,missing-module-docstring,too-many-instance-attributes,missing-timeout -------------------------------------------------------------------------------- /itchiodl/__init__.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=consider-using-from-import 2 | import itchiodl.utils as utils 3 | from .login import LoginWeb, LoginAPI 4 | from .bundle import Bundle 5 | from .library import Library 6 | from .game import Game 7 | -------------------------------------------------------------------------------- /.github/workflows/poetry-publish.yml: -------------------------------------------------------------------------------- 1 | name: Python package 2 | on: 3 | push: 4 | tags: 5 | - "v*.*.*" 6 | jobs: 7 | build: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | - name: Build and publish to pypi 12 | uses: JRubics/poetry-publish@v1.10 13 | with: 14 | pypi_token: ${{ secrets.PYPI_TOKEN }} -------------------------------------------------------------------------------- /itchiodl/bundle_tool/__main__.py: -------------------------------------------------------------------------------- 1 | from getpass import getpass 2 | import itchiodl 3 | 4 | 5 | def main(): 6 | """CLI tool to at all games in a bundle to your library.""" 7 | 8 | user = input("Username: ") 9 | password = getpass("Password: ") 10 | 11 | l = itchiodl.LoginWeb(user, password) 12 | 13 | url = input("Bundle URL: ") 14 | b = itchiodl.Bundle(l, url) 15 | b.load_games() 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Linting 2 | on: 3 | - push 4 | - pull_request 5 | jobs: 6 | black: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - uses: psf/black@stable 11 | pylint: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: Set up Python 3.10 17 | uses: actions/setup-python@v3 18 | with: 19 | python-version: '3.10' 20 | - name: Install dependencies 21 | run: | 22 | pip install poetry 23 | poetry install 24 | - name: Analysing the code with pylint 25 | run: poetry run pylint --rcfile=.pylintrc itchiodl/ itchiodl/downloader/ itchiodl/bundle_tool/ 26 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "itchiodl" 3 | version = "2.3.0" 4 | description = "Python Scripts for downloading / archiving your itchio library" 5 | authors = ["Peter Taylor "] 6 | license = "MIT" 7 | readme = "Readme.md" 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.8" 11 | requests = "^2.28.1" 12 | beautifulsoup4 = "^4.10.0" 13 | clint = "^0.5.1" 14 | 15 | [tool.poetry.dev-dependencies] 16 | black = "^22.3.0" 17 | pylint = "^2.13.8" 18 | 19 | [build-system] 20 | requires = ["poetry-core>=1.0.0"] 21 | build-backend = "poetry.core.masonry.api" 22 | 23 | [tool.poetry.plugins."console_scripts"] 24 | "itch-download" = "itchiodl.downloader.__main__:main" 25 | "itch-load-bundle" = "itchiodl.bundle_tool.__main__:main" 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022-2023 Peter Taylor 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /itchiodl/login.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup as soup 2 | import requests 3 | 4 | 5 | warning = ( 6 | "Will print the response text (Please be careful as " 7 | + "this may contain personal data or allow others to login to your account):" 8 | ) 9 | 10 | 11 | def LoginWeb(user, password): 12 | """Login to itch.io using webscraping""" 13 | session = requests.Session() 14 | 15 | # GET the page first so we have a valid CSRF token value 16 | login1 = session.get("https://itch.io/login") 17 | s = soup(login1.text, "html.parser") 18 | csrf_token = s.find("input", {"name": "csrf_token"})["value"] 19 | 20 | # Now POST the login 21 | r = session.post( 22 | "https://itch.io/login", 23 | {"username": user, "password": password, "csrf_token": csrf_token}, 24 | ) 25 | 26 | if r.status_code != 200: 27 | raise RuntimeError 28 | 29 | return session 30 | 31 | 32 | def LoginAPI(user, password): 33 | """Login to itch.io using API""" 34 | r = requests.post( 35 | "https://api.itch.io/login", 36 | {"username": user, "password": password, "source": "desktop"}, 37 | ) 38 | if r.status_code != 200: 39 | print(f"Error: {r.status_code} is not 200") 40 | print(warning) 41 | print(r.text) 42 | raise RuntimeError 43 | t = r.json() 44 | 45 | if not t["success"]: 46 | print("Error: success key is not true") 47 | print(warning) 48 | print(r.text) 49 | raise RuntimeError 50 | 51 | return t["key"]["key"] 52 | -------------------------------------------------------------------------------- /itchiodl/bundle.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup as soup 2 | 3 | 4 | class Bundle: 5 | """Bundle class, represents a bundle on itch.io""" 6 | 7 | def __init__(self, login, url): 8 | self.login = login 9 | self.url = url 10 | 11 | def load_games(self): 12 | """Load all games in the bundle via web scraping""" 13 | i = 1 14 | 15 | r = self.login.get(self.url) 16 | s = soup(r.text, "html.parser") 17 | pages = int(s.select("span.pager_label a")[-1].text) 18 | while i < pages: 19 | if self.load_game(i): 20 | i += 1 21 | print(f"Processing Page {i} of {pages}") 22 | 23 | def load_game(self, i): 24 | """Load 1 game. This will refresh the game afterwards, as the csrf token 25 | will update""" 26 | r = self.login.get(f"{self.url}?page={i}") 27 | s = soup(r.text, "html.parser") 28 | for g in s.select("div.game_row"): 29 | name = g.select("h2 a")[0].text 30 | if f := g.find("form"): 31 | print(f"Processing {name}") 32 | 33 | game_id = f.find("input", {"name": "game_id"})["value"] 34 | csrf_token = f.find("input", {"name": "csrf_token"})["value"] 35 | 36 | data = {"action": "claim", "game_id": game_id, "csrf_token": csrf_token} 37 | 38 | r = self.login.post(f"{self.url}?page={i}", data=data) 39 | return False 40 | # else: 41 | # print(f"Skipping {name} - Already in Library") 42 | return True 43 | -------------------------------------------------------------------------------- /itchiodl/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys 3 | import hashlib 4 | import requests 5 | 6 | 7 | class NoDownloadError(Exception): 8 | """No download found exception""" 9 | 10 | 11 | def download(url, path, name, file): 12 | """Downloads a file from a url and saves it to a path, skips it if it already exists.""" 13 | 14 | desc = f"{name} - {file}" 15 | print(f"Downloading {desc}") 16 | rsp = requests.get(url, stream=True) 17 | 18 | if ( 19 | rsp.headers.get("content-length") is None 20 | or rsp.headers.get("Content-Disposition") is None 21 | ): 22 | raise NoDownloadError("Http response is not a download, skipping") 23 | 24 | cd = rsp.headers.get("Content-Disposition") 25 | 26 | filename_re = re.search(r'filename="(.+)"', cd) 27 | if filename_re is None: 28 | filename = file 29 | else: 30 | filename = filename_re.group(1) 31 | 32 | with open(f"{path}/{filename}", "wb") as f: 33 | for chunk in rsp.iter_content(10240): 34 | f.write(chunk) 35 | 36 | print(f"Downloaded {filename}") 37 | return f"{path}/{filename}", True 38 | 39 | 40 | def clean_path(path): 41 | """Cleans a path on windows""" 42 | if sys.platform in ["win32", "cygwin", "msys"]: 43 | path_clean = re.sub(r"[<>:|?*\"\/\\]", "-", path) 44 | # This checks for strings that end in ... or similar, 45 | # weird corner case that affects fewer than 0.1% of titles 46 | path_clean = re.sub(r"(.)[.]\1+$", "-", path_clean) 47 | return path_clean 48 | return path 49 | 50 | 51 | def md5sum(path): 52 | """Returns the md5sum of a file""" 53 | md5 = hashlib.md5() 54 | with path.open("rb") as f: 55 | for chunk in iter(lambda: f.read(4096), b""): 56 | md5.update(chunk) 57 | return md5.hexdigest() 58 | -------------------------------------------------------------------------------- /itchiodl/downloader/__main__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from getpass import getpass 3 | import re 4 | 5 | import itchiodl 6 | 7 | 8 | def main(): 9 | """CLI tool to download all games in your library.""" 10 | 11 | parser = argparse.ArgumentParser( 12 | prog="itch-download", description="Download / archive your itch.io library." 13 | ) 14 | 15 | parser.add_argument( 16 | "-k", "--api-key", help="Use API key instead of username/password" 17 | ) 18 | 19 | parser.add_argument( 20 | "-p", 21 | "--platform", 22 | help=( 23 | "Platform to download for (default: all), will accept values like 'windows', 'linux', " 24 | "'osx' and 'android'" 25 | ), 26 | ) 27 | 28 | parser.add_argument( 29 | "--human-folders", 30 | action="store_true", 31 | help=( 32 | "Download Folders are named based on the full text version of the title instead of " 33 | "the trimmed URL title" 34 | ), 35 | ) 36 | 37 | parser.add_argument( 38 | "-j", 39 | "--jobs", 40 | type=int, 41 | default=4, 42 | help="Number of concurrent downloads, defaults to 4", 43 | ) 44 | 45 | parser.add_argument( 46 | "--download-publisher", 47 | type=str, 48 | help="Download all games from a specific publisher", 49 | ) 50 | 51 | parser.add_argument( 52 | "--download-game", 53 | type=str, 54 | help="Download a specific game, should be in the format 'https://publisher.itch.io/game'", 55 | ) 56 | 57 | args = parser.parse_args() 58 | 59 | l = "" 60 | 61 | if not args.api_key: 62 | user = input("Username: ") 63 | password = getpass("Password: ") 64 | l = itchiodl.LoginAPI(user, password) 65 | else: 66 | l = args.api_key 67 | 68 | lib = itchiodl.Library(l, args.jobs) 69 | 70 | if args.download_publisher: 71 | lib.load_games(args.download_publisher) 72 | elif args.download_game: 73 | matches = re.match(r"https://(.+)\.itch\.io/(.+)", args.download_game) 74 | lib.load_game(matches.group(1), matches.group(2)) 75 | else: 76 | lib.load_owned_games() 77 | 78 | lib.download_library(args.platform) 79 | 80 | 81 | if __name__ == "__main__": 82 | main() 83 | -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | ## IMPORTANT NOTICE: up until 2022-03-09, the package was called itchio, it is now called itchiodl (to avoid pypi conflicts) 2 | 3 | # Itchio Downloader Tool 4 | ## Install 5 | ```bash 6 | pip install itchiodl 7 | ``` 8 | ## Download All Games in library from account 9 | **Please Note:** Having too many jobs may cause rate-limiting from some testing 8 works fine but 10 starts giving errors. 10 | 11 | ```bash 12 | # via python 13 | python -m itchiodl.downloader 14 | 15 | # via setup-tools entry point 16 | itch-download 17 | ``` 18 | 19 | This uses the same API the itchio app uses to download the files. If you have 2FA enabled, generate an API key [here](https://itch.io/user/settings/api-keys) and run the following instead 20 | 21 | ```bash 22 | # via python (with 4 concurrent downloads) 23 | python -m itchiodl.downloader --api-key=KEYHERE --jobs=4 24 | 25 | # via setup-tools entry point 26 | itch-download -k KEYHERE 27 | 28 | # download with multiple threads 29 | itch-download -k KEYHERE -j 4 30 | 31 | # only download osx or cross platform downloads 32 | itch-download -p osx 33 | 34 | # folder structure uses display names for users/publishers and game titles 35 | itch-download --human-folders 36 | 37 | ``` 38 | 39 | ## Add All Games in a bundle to your library 40 | 41 | ```bash 42 | # via python 43 | python -m itchiodl.bundle_tool 44 | 45 | # via setup-tools entry point 46 | itch-load-bundle 47 | ``` 48 | 49 | This is a bit of a bodge, but it works. It essentially goes through and clicks the "Download" link on every item on the bundle's page, which adds it to your itchio library. It does not download any files. You will need the download page's URL (this will be in the bundle's email, and possibly your purchase history). It will not work with 2FA, and I'm unlikely to be able to fix it without making it far more complicated 50 | 51 | 52 | ## Errors 53 | if a download fails it will be reported in ```errors.txt``` in the same directory as your downloads 54 | 55 | An example of which could look something like this: 56 | ```Cannot download game/asset: 57 | Publisher Name: 58 | Output File: // 59 | Request URL: 60 | Request Response Code: 404 61 | Error Reason: Not Found 62 | This game/asset has been skipped please download manually 63 | --------------------------------------------------------- 64 | ``` 65 | 66 | This is not a perfect solution but does prevent the whole process from crashing 67 | -------------------------------------------------------------------------------- /itchiodl/library.py: -------------------------------------------------------------------------------- 1 | from concurrent.futures import ThreadPoolExecutor 2 | import functools 3 | import threading 4 | import requests 5 | from bs4 import BeautifulSoup 6 | 7 | from itchiodl.game import Game 8 | from itchiodl.utils import NoDownloadError 9 | 10 | 11 | class Library: 12 | """Representation of a user's game library""" 13 | 14 | def __init__(self, login, jobs=4): 15 | self.login = login 16 | self.games = [] 17 | self.jobs = jobs 18 | 19 | def load_game_page(self, page): 20 | """Load a page of games via the API""" 21 | print("Loading page", page) 22 | r = requests.get( 23 | f"https://api.itch.io/profile/owned-keys?page={page}", 24 | headers={"Authorization": self.login}, 25 | ) 26 | j = r.json() 27 | 28 | for s in j["owned_keys"]: 29 | self.games.append(Game(s)) 30 | 31 | return len(j["owned_keys"]) 32 | 33 | def load_owned_games(self): 34 | """Load all games in the library via the API""" 35 | page = 1 36 | while True: 37 | n = self.load_game_page(page) 38 | if n == 0: 39 | break 40 | page += 1 41 | 42 | def load_game(self, publisher, title): 43 | """Load a game by publisher and title""" 44 | rsp = requests.get( 45 | f"https://{publisher}.itch.io/{title}/data.json", 46 | headers={"Authorization": self.login}, 47 | ) 48 | j = rsp.json() 49 | game_id = j["id"] 50 | gsp = requests.get( 51 | f"https://api.itch.io/games/{game_id}/uploads", 52 | headers={"Authorization": self.login}, 53 | ) 54 | k = gsp.json() 55 | if k != {"uploads": {}}: 56 | self.games.append(Game(k)) 57 | return 58 | print(f"{title} is a purchased game.") 59 | i = 1 60 | while self.games == []: 61 | j = self.load_game_page(i) 62 | 63 | self.games = [ 64 | x 65 | for x in self.games 66 | if x.link == f"https://{publisher}.itch.io/{title}" 67 | ] 68 | 69 | if j == 0: 70 | break 71 | i += 1 72 | 73 | if self.games == []: 74 | print(f"Cannot find {title} in owned keys, you may not own it.") 75 | 76 | def load_games(self, publisher): 77 | """Load all games by publisher""" 78 | rsp = requests.get(f"https://{publisher}.itch.io") 79 | soup = BeautifulSoup(rsp.text, "html.parser") 80 | for link in soup.select("a.game_link"): 81 | game_id = link.get("data-label").split(":")[1] 82 | gsp = requests.get( 83 | f"https://api.itch.io/games/{game_id}", 84 | headers={"Authorization": self.login}, 85 | ) 86 | k = gsp.json() 87 | self.games.append(Game(k)) 88 | 89 | def download_library(self, platform=None): 90 | """Download all games in the library""" 91 | with ThreadPoolExecutor(max_workers=self.jobs) as executor: 92 | i = [0, 0] 93 | l = len(self.games) 94 | lock = threading.RLock() 95 | 96 | def dl(i, g): 97 | try: 98 | g.download(self.login, platform) 99 | with lock: 100 | i[0] += 1 101 | print(f"Downloaded {g.name} ({i[0]} of {l})") 102 | except NoDownloadError as e: 103 | print(e) 104 | i[1] += 1 105 | 106 | r = executor.map(functools.partial(dl, i), self.games) 107 | for _ in r: 108 | pass 109 | print(f"Downloaded {i[0]} Games, {i[1]} Errors") 110 | -------------------------------------------------------------------------------- /itchiodl/game.py: -------------------------------------------------------------------------------- 1 | import re 2 | import json 3 | import urllib 4 | import datetime 5 | from pathlib import Path 6 | from sys import argv 7 | import requests 8 | 9 | from itchiodl import utils 10 | 11 | 12 | class Game: 13 | """Representation of a game download""" 14 | 15 | def __init__(self, data): 16 | self.args = argv[1:] 17 | if "--human-folders" in self.args: 18 | self.humanFolders = True 19 | else: 20 | self.humanFolders = False 21 | 22 | self.data = data["game"] 23 | self.name = self.data["title"] 24 | self.publisher = self.data["user"]["username"] 25 | self.link = self.data["url"] 26 | if "game_id" in data: 27 | self.id = data["id"] 28 | self.game_id = data["game_id"] 29 | else: 30 | self.id = False 31 | self.game_id = self.data["id"] 32 | 33 | matches = re.match(r"https://(.+)\.itch\.io/(.+)", self.link) 34 | self.game_slug = matches.group(2) 35 | if self.humanFolders: 36 | self.game_slug = utils.clean_path(self.data["title"]) 37 | self.publisher_slug = self.data.get("user").get("display_name") 38 | # This Branch covers the case that the user has 39 | # not set a display name, and defaults to their username 40 | if not self.publisher_slug: 41 | self.publisher_slug = self.data.get("user").get("username") 42 | else: 43 | self.publisher_slug = matches.group(1) 44 | 45 | self.files = [] 46 | self.downloads = [] 47 | self.dir = ( 48 | Path(".") 49 | / utils.clean_path(self.publisher_slug) 50 | / utils.clean_path(self.game_slug) 51 | ) 52 | 53 | def load_downloads(self, token): 54 | """Load all downloads for this game""" 55 | self.downloads = [] 56 | if self.id: 57 | r = requests.get( 58 | f"https://api.itch.io/games/{self.game_id}/uploads?download_key_id={self.id}", 59 | headers={"Authorization": token}, 60 | ) 61 | else: 62 | r = requests.get( 63 | f"https://api.itch.io/games/{self.game_id}/uploads", 64 | headers={"Authorization": token}, 65 | ) 66 | j = r.json() 67 | for d in j["uploads"]: 68 | self.downloads.append(d) 69 | 70 | def download(self, token, platform): 71 | """Download a singular file""" 72 | print("Downloading", self.name) 73 | 74 | # if out_folder.with_suffix(".json").exists(): 75 | # print(f"Skipping Game {self.name}") 76 | # return 77 | 78 | self.load_downloads(token) 79 | 80 | self.dir.mkdir(parents=True, exist_ok=True) 81 | 82 | for d in self.downloads: 83 | if ( 84 | platform is not None 85 | and d["traits"] 86 | and f"p_{platform}" not in d["traits"] 87 | ): 88 | print(f"Skipping {self.name} for platform {d['traits']}") 89 | continue 90 | self.do_download(d, token) 91 | 92 | with self.dir.with_suffix(".json").open("w") as f: 93 | json.dump( 94 | { 95 | "name": self.name, 96 | "publisher": self.publisher, 97 | "link": self.link, 98 | "itch_id": self.id, 99 | "game_id": self.game_id, 100 | "itch_data": self.data, 101 | }, 102 | f, 103 | indent=2, 104 | ) 105 | 106 | def do_download(self, d, token): 107 | """Download a single file, checking for existing files""" 108 | print(f"Downloading {d['filename']}") 109 | 110 | filename = d["filename"] or d["display_name"] or d["id"] 111 | 112 | out_file = self.dir / filename 113 | 114 | if out_file.exists(): 115 | print(f"File Already Exists! {filename}") 116 | md5_file = out_file.with_suffix(".md5") 117 | if md5_file.exists(): 118 | with md5_file.open("r") as f: 119 | md5 = f.read().strip() 120 | if md5 == d["md5_hash"]: 121 | print(f"Skipping {self.name} - {filename}") 122 | return 123 | print(f"MD5 Mismatch! {filename}") 124 | else: 125 | md5 = utils.md5sum(str(out_file)) 126 | if md5 == d["md5_hash"]: 127 | print(f"Skipping {self.name} - {filename}") 128 | 129 | # Create checksum file 130 | with md5_file.open("w") as f: 131 | f.write(d["md5_hash"]) 132 | return 133 | # Old Download or corrupted file? 134 | corrupted = False 135 | if corrupted: 136 | out_file.remove() 137 | return 138 | 139 | old_dir = self.dir / "old" 140 | old_dir.mkdir(exist_ok=True) 141 | 142 | print(f"Moving {filename} to old/") 143 | timestamp = datetime.datetime.now().strftime("%Y-%m-%d") 144 | out_file.rename(old_dir / f"{timestamp}-{filename}") 145 | 146 | # Get UUID 147 | r = requests.post( 148 | f"https://api.itch.io/games/{self.game_id}/download-sessions", 149 | headers={"Authorization": token}, 150 | ) 151 | j = r.json() 152 | 153 | # Download 154 | if self.id: 155 | url = ( 156 | f"https://api.itch.io/uploads/{d['id']}/" 157 | + f"download?api_key={token}&download_key_id={self.id}&uuid={j['uuid']}" 158 | ) 159 | else: 160 | url = ( 161 | f"https://api.itch.io/uploads/{d['id']}/" 162 | + f"download?api_key={token}&uuid={j['uuid']}" 163 | ) 164 | # response_code = urllib.request.urlopen(url).getcode() 165 | try: 166 | utils.download(url, self.dir, self.name, filename) 167 | except utils.NoDownloadError: 168 | print("Http response is not a download, skipping") 169 | 170 | with open("errors.txt", "a") as f: 171 | f.write( 172 | f""" Cannot download game/asset: {self.game_slug} 173 | Publisher Name: {self.publisher_slug} 174 | Path: {out_file} 175 | File: {filename} 176 | Request URL: {url} 177 | This request failed due to a missing response header 178 | This game/asset has been skipped please download manually 179 | ---------------------------------------------------------\n """ 180 | ) 181 | 182 | return 183 | except urllib.error.HTTPError as e: 184 | print("This one has broken due to an HTTP error!!") 185 | 186 | with open("errors.txt", "a") as f: 187 | f.write( 188 | f""" Cannot download game/asset: {self.game_slug} 189 | Publisher Name: {self.publisher_slug} 190 | Path: {out_file} 191 | File: {filename} 192 | Request URL: {url} 193 | Request Response Code: {e.code} 194 | Error Reason: {e.reason} 195 | This game/asset has been skipped please download manually 196 | ---------------------------------------------------------\n """ 197 | ) 198 | 199 | return 200 | 201 | # Verify 202 | if utils.md5sum(out_file) != d["md5_hash"]: 203 | print(f"Failed to verify {filename}") 204 | return 205 | 206 | # Create checksum file 207 | with out_file.with_suffix(".md5").open("w") as f: 208 | f.write(d["md5_hash"]) 209 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "args" 3 | version = "0.1.0" 4 | description = "Command Arguments for Humans." 5 | category = "main" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "astroid" 11 | version = "2.12.13" 12 | description = "An abstract syntax tree for Python with inference support." 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=3.7.2" 16 | 17 | [package.dependencies] 18 | lazy-object-proxy = ">=1.4.0" 19 | typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} 20 | wrapt = [ 21 | {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, 22 | {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, 23 | ] 24 | 25 | [[package]] 26 | name = "beautifulsoup4" 27 | version = "4.11.1" 28 | description = "Screen-scraping library" 29 | category = "main" 30 | optional = false 31 | python-versions = ">=3.6.0" 32 | 33 | [package.dependencies] 34 | soupsieve = ">1.2" 35 | 36 | [package.extras] 37 | html5lib = ["html5lib"] 38 | lxml = ["lxml"] 39 | 40 | [[package]] 41 | name = "black" 42 | version = "22.10.0" 43 | description = "The uncompromising code formatter." 44 | category = "dev" 45 | optional = false 46 | python-versions = ">=3.7" 47 | 48 | [package.dependencies] 49 | click = ">=8.0.0" 50 | mypy-extensions = ">=0.4.3" 51 | pathspec = ">=0.9.0" 52 | platformdirs = ">=2" 53 | tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} 54 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 55 | 56 | [package.extras] 57 | colorama = ["colorama (>=0.4.3)"] 58 | d = ["aiohttp (>=3.7.4)"] 59 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 60 | uvloop = ["uvloop (>=0.15.2)"] 61 | 62 | [[package]] 63 | name = "certifi" 64 | version = "2022.9.24" 65 | description = "Python package for providing Mozilla's CA Bundle." 66 | category = "main" 67 | optional = false 68 | python-versions = ">=3.6" 69 | 70 | [[package]] 71 | name = "charset-normalizer" 72 | version = "2.1.1" 73 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 74 | category = "main" 75 | optional = false 76 | python-versions = ">=3.6.0" 77 | 78 | [package.extras] 79 | unicode_backport = ["unicodedata2"] 80 | 81 | [[package]] 82 | name = "click" 83 | version = "8.1.3" 84 | description = "Composable command line interface toolkit" 85 | category = "dev" 86 | optional = false 87 | python-versions = ">=3.7" 88 | 89 | [package.dependencies] 90 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 91 | 92 | [[package]] 93 | name = "clint" 94 | version = "0.5.1" 95 | description = "Python Command Line Interface Tools" 96 | category = "main" 97 | optional = false 98 | python-versions = "*" 99 | 100 | [package.dependencies] 101 | args = "*" 102 | 103 | [[package]] 104 | name = "colorama" 105 | version = "0.4.6" 106 | description = "Cross-platform colored terminal text." 107 | category = "dev" 108 | optional = false 109 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 110 | 111 | [[package]] 112 | name = "dill" 113 | version = "0.3.6" 114 | description = "serialize all of python" 115 | category = "dev" 116 | optional = false 117 | python-versions = ">=3.7" 118 | 119 | [package.extras] 120 | graph = ["objgraph (>=1.7.2)"] 121 | 122 | [[package]] 123 | name = "idna" 124 | version = "3.4" 125 | description = "Internationalized Domain Names in Applications (IDNA)" 126 | category = "main" 127 | optional = false 128 | python-versions = ">=3.5" 129 | 130 | [[package]] 131 | name = "isort" 132 | version = "5.10.1" 133 | description = "A Python utility / library to sort Python imports." 134 | category = "dev" 135 | optional = false 136 | python-versions = ">=3.6.1,<4.0" 137 | 138 | [package.extras] 139 | pipfile_deprecated_finder = ["pipreqs", "requirementslib"] 140 | requirements_deprecated_finder = ["pipreqs", "pip-api"] 141 | colors = ["colorama (>=0.4.3,<0.5.0)"] 142 | plugins = ["setuptools"] 143 | 144 | [[package]] 145 | name = "lazy-object-proxy" 146 | version = "1.8.0" 147 | description = "A fast and thorough lazy object proxy." 148 | category = "dev" 149 | optional = false 150 | python-versions = ">=3.7" 151 | 152 | [[package]] 153 | name = "mccabe" 154 | version = "0.7.0" 155 | description = "McCabe checker, plugin for flake8" 156 | category = "dev" 157 | optional = false 158 | python-versions = ">=3.6" 159 | 160 | [[package]] 161 | name = "mypy-extensions" 162 | version = "0.4.3" 163 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 164 | category = "dev" 165 | optional = false 166 | python-versions = "*" 167 | 168 | [[package]] 169 | name = "pathspec" 170 | version = "0.10.2" 171 | description = "Utility library for gitignore style pattern matching of file paths." 172 | category = "dev" 173 | optional = false 174 | python-versions = ">=3.7" 175 | 176 | [[package]] 177 | name = "platformdirs" 178 | version = "2.5.4" 179 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 180 | category = "dev" 181 | optional = false 182 | python-versions = ">=3.7" 183 | 184 | [package.extras] 185 | docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] 186 | test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] 187 | 188 | [[package]] 189 | name = "pylint" 190 | version = "2.15.6" 191 | description = "python code static checker" 192 | category = "dev" 193 | optional = false 194 | python-versions = ">=3.7.2" 195 | 196 | [package.dependencies] 197 | astroid = ">=2.12.12,<=2.14.0-dev0" 198 | colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} 199 | dill = ">=0.2" 200 | isort = ">=4.2.5,<6" 201 | mccabe = ">=0.6,<0.8" 202 | platformdirs = ">=2.2.0" 203 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 204 | tomlkit = ">=0.10.1" 205 | typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} 206 | 207 | [package.extras] 208 | spelling = ["pyenchant (>=3.2,<4.0)"] 209 | testutils = ["gitpython (>3)"] 210 | 211 | [[package]] 212 | name = "requests" 213 | version = "2.28.1" 214 | description = "Python HTTP for Humans." 215 | category = "main" 216 | optional = false 217 | python-versions = ">=3.7, <4" 218 | 219 | [package.dependencies] 220 | certifi = ">=2017.4.17" 221 | charset-normalizer = ">=2,<3" 222 | idna = ">=2.5,<4" 223 | urllib3 = ">=1.21.1,<1.27" 224 | 225 | [package.extras] 226 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 227 | use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] 228 | 229 | [[package]] 230 | name = "soupsieve" 231 | version = "2.3.2.post1" 232 | description = "A modern CSS selector implementation for Beautiful Soup." 233 | category = "main" 234 | optional = false 235 | python-versions = ">=3.6" 236 | 237 | [[package]] 238 | name = "tomli" 239 | version = "2.0.1" 240 | description = "A lil' TOML parser" 241 | category = "dev" 242 | optional = false 243 | python-versions = ">=3.7" 244 | 245 | [[package]] 246 | name = "tomlkit" 247 | version = "0.11.6" 248 | description = "Style preserving TOML library" 249 | category = "dev" 250 | optional = false 251 | python-versions = ">=3.6" 252 | 253 | [[package]] 254 | name = "typing-extensions" 255 | version = "4.4.0" 256 | description = "Backported and Experimental Type Hints for Python 3.7+" 257 | category = "dev" 258 | optional = false 259 | python-versions = ">=3.7" 260 | 261 | [[package]] 262 | name = "urllib3" 263 | version = "1.26.13" 264 | description = "HTTP library with thread-safe connection pooling, file post, and more." 265 | category = "main" 266 | optional = false 267 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 268 | 269 | [package.extras] 270 | brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] 271 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] 272 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 273 | 274 | [[package]] 275 | name = "wrapt" 276 | version = "1.14.1" 277 | description = "Module for decorators, wrappers and monkey patching." 278 | category = "dev" 279 | optional = false 280 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 281 | 282 | [metadata] 283 | lock-version = "1.1" 284 | python-versions = "^3.8" 285 | content-hash = "a8f409d740dc1dad488355b7d54c8278401c6996596a11617ed3049e3d54beec" 286 | 287 | [metadata.files] 288 | args = [ 289 | {file = "args-0.1.0.tar.gz", hash = "sha256:a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"}, 290 | ] 291 | astroid = [ 292 | {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"}, 293 | {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"}, 294 | ] 295 | beautifulsoup4 = [ 296 | {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, 297 | {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, 298 | ] 299 | black = [] 300 | certifi = [] 301 | charset-normalizer = [] 302 | click = [] 303 | clint = [ 304 | {file = "clint-0.5.1.tar.gz", hash = "sha256:05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"}, 305 | ] 306 | colorama = [] 307 | dill = [ 308 | {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, 309 | {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, 310 | ] 311 | idna = [] 312 | isort = [ 313 | {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, 314 | {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, 315 | ] 316 | lazy-object-proxy = [ 317 | {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, 318 | {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, 319 | {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, 320 | {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"}, 321 | {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"}, 322 | {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"}, 323 | {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"}, 324 | {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"}, 325 | {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"}, 326 | {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"}, 327 | {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"}, 328 | {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"}, 329 | {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"}, 330 | {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"}, 331 | {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"}, 332 | {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"}, 333 | {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"}, 334 | {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, 335 | {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, 336 | ] 337 | mccabe = [] 338 | mypy-extensions = [] 339 | pathspec = [ 340 | {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, 341 | {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, 342 | ] 343 | platformdirs = [ 344 | {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, 345 | {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, 346 | ] 347 | pylint = [ 348 | {file = "pylint-2.15.6-py3-none-any.whl", hash = "sha256:15060cc22ed6830a4049cf40bc24977744df2e554d38da1b2657591de5bcd052"}, 349 | {file = "pylint-2.15.6.tar.gz", hash = "sha256:25b13ddcf5af7d112cf96935e21806c1da60e676f952efb650130f2a4483421c"}, 350 | ] 351 | requests = [] 352 | soupsieve = [ 353 | {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, 354 | {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, 355 | ] 356 | tomli = [] 357 | tomlkit = [ 358 | {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, 359 | {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, 360 | ] 361 | typing-extensions = [] 362 | urllib3 = [ 363 | {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, 364 | {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, 365 | ] 366 | wrapt = [ 367 | {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, 368 | {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, 369 | {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, 370 | {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, 371 | {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, 372 | {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, 373 | {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, 374 | {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, 375 | {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, 376 | {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, 377 | {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, 378 | {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, 379 | {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, 380 | {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, 381 | {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, 382 | {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, 383 | {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, 384 | {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, 385 | {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, 386 | {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, 387 | {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, 388 | {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, 389 | {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, 390 | {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, 391 | {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, 392 | {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, 393 | {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, 394 | {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, 395 | {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, 396 | {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, 397 | {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, 398 | {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, 399 | {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, 400 | {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, 401 | {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, 402 | {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, 403 | {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, 404 | {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, 405 | {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, 406 | {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, 407 | {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, 408 | {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, 409 | {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, 410 | {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, 411 | {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, 412 | {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, 413 | {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, 414 | {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, 415 | {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, 416 | {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, 417 | {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, 418 | {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, 419 | {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, 420 | {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, 421 | {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, 422 | {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, 423 | {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, 424 | {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, 425 | {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, 426 | {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, 427 | {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, 428 | {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, 429 | {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, 430 | {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, 431 | ] 432 | --------------------------------------------------------------------------------