├── anime_cli ├── __init__.py ├── anime.py ├── search │ ├── gogoanime.py │ └── __init__.py ├── proxy_server.py └── __main__.py ├── .gitignore ├── .github └── dependabot.yml ├── pyproject.toml ├── LICENSE ├── README.md └── poetry.lock /anime_cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | dist/ -------------------------------------------------------------------------------- /anime_cli/anime.py: -------------------------------------------------------------------------------- 1 | class Anime: 2 | def __init__(self, title, id) -> None: 3 | self.title = title 4 | self.id = id 5 | 6 | def __str__(self) -> str: 7 | return f"{self.title}" 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "anime-cli" 3 | version = "0.3.4" 4 | description = "A python based anime cli, search, download watch animes. (Works on Windows too)" 5 | authors = ["Chirag Singla "] 6 | license = "MIT" 7 | readme = "README.md" 8 | homepage = "https://github.com/chirag-droid/anime-cli" 9 | repository = "https://github.com/chirag-droid/anime-cli" 10 | classifiers = [ 11 | "Development Status :: 4 - Beta", 12 | "Environment :: Console", 13 | "License :: Public Domain", 14 | "Natural Language :: English", 15 | "Programming Language :: Python :: 3.9", 16 | "Topic :: Games/Entertainment" 17 | ] 18 | 19 | [tool.poetry.scripts] 20 | anime-cli = 'anime_cli.__main__:main' 21 | 22 | [tool.poetry.dependencies] 23 | python = "^3.9" 24 | inquirerpy = "^0.3.0" 25 | bs4 = "^0.0.1" 26 | requests = "^2.26.0" 27 | html5lib = "^1.1" 28 | 29 | [tool.poetry.dev-dependencies] 30 | 31 | [build-system] 32 | requires = ["poetry-core>=1.0.0"] 33 | build-backend = "poetry.core.masonry.api" 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Chirag Singla 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This project is now no longer maintained. Use this now [chirag-droid/animeflix](https://github.com/chirag-droid/animeflix). If someone does want to use this project let me know. 2 | 3 | ## Anime-cli 4 | 5 | A CLI for streaming, downloading anime shows. 6 | The shows data is indexed through [GogoAnime](https://gogoanime.pe). 7 | 8 | Please install [mpv video-player](https://mpv.io/installation/) for better experience and no ads. 9 | 10 | Version 0.3.1+ works on android/Smart TV's see [usage instructions](#usage-android) below 11 | 12 | https://user-images.githubusercontent.com/81347541/137595104-0c0418e9-71b8-4c45-b507-78892cca961c.mp4 13 | 14 | ### Usage 15 | It's recommended to stream episodes using a video player (no ads) 16 | Almost all video players all supported which can stream a m3u8 url. To achive this a proxy server is used. 17 | 18 | You can install anime-cli from pip using 19 | ``` 20 | pip install anime-cli 21 | ``` 22 | Then run using `python -m anime_cli` or simply `anime-cli` 23 | 24 | If you want to help develop `anime-cli`. It is recommended that you clone the repo using and then install the dependencies 25 | ``` 26 | git clone https://github.com/chirag-droid/anime-cli.git 27 | poetry install 28 | ``` 29 | and then to run, `poetry run anime-cli` 30 | 31 | ### Usage Android 32 | - Download `Termux` from Fdroid 33 | - Download `mpv-player` from playstore 34 | 35 | In termux install python using `pkg install python` 36 | Follow the same steps as above for downloading `anime-cli` 37 | 38 | When asked to enter the video-player change it to `xdg-open` which will automatically open `mpv-player`. 39 | 40 | ### Motivation 41 | 42 | I recently found out about [ani-cli](https://github.com/pystardust/ani-cli), but it was not cross-platform because it was written in shell, so I decided to recreate that same thing in Python, hoping to make it cross-platform and possibly also have pretty UI. 43 | 44 | ### TODO 45 | - [x] Stream on browser 46 | - [ ] Make streaming on browsers ad free 47 | - [x] Stream to video player like MPV 48 | - [ ] Ability to download the shows as mp4 49 | - [ ] Support for multiple mirrors 50 | -------------------------------------------------------------------------------- /anime_cli/search/gogoanime.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import List 3 | 4 | import requests 5 | 6 | from anime_cli.anime import Anime 7 | from anime_cli.search import SearchApi 8 | 9 | 10 | class GogoAnime(SearchApi): 11 | def __init__(self, mirror: str): 12 | super().__init__(mirror) 13 | self.url = f"https://gogoanime.{mirror}" 14 | 15 | @staticmethod 16 | def get_headers() -> dict[str, str]: 17 | return {"Referer": "https://gogoplay1.com/"} 18 | 19 | def search_anime(self, keyword: str) -> List[Anime]: 20 | # Get and parse the html from the site 21 | soup = self.get_soup(f"search.html?keyword={keyword}") 22 | 23 | # Find all the p tags which have the name class 24 | animes = soup.findAll("p", {"class": "name"}) 25 | return [ 26 | Anime(anime.a["title"], anime.a["href"].split("/")[2]) for anime in animes 27 | ] 28 | 29 | def get_episodes_count(self, anime: Anime) -> int: 30 | soup = self.get_soup(f"category/{anime.id}") 31 | 32 | # Find all the ul tag which have an id of episode_page 33 | episode_page = soup.find("ul", {"id": "episode_page"}) 34 | # From the ul tag find all the elements having li tag and then get ep_end 35 | # from the last li tag which is the total number of episodes 36 | episode_count = int(episode_page.find_all("li")[-1].a["ep_end"]) 37 | return episode_count 38 | 39 | def get_embed_video(self, anime: Anime, episode: int) -> str: 40 | soup = self.get_soup(f"{anime.id}-episode-{episode}") 41 | 42 | # In the html search for a `a` tag 43 | # having the rel: 100 and href: # properties 44 | link = soup.find("a", {"href": "#", "rel": "100"}) 45 | return f'https:{link["data-video"]}' 46 | 47 | def get_video_url(self, embed_url: str) -> str: 48 | """ 49 | Get video url returns the direct link to video by parsing 50 | the page where the video is embedded 51 | """ 52 | # Get the page where the video is embedded 53 | r = requests.get(embed_url, headers=self.request_headers) 54 | 55 | # Search for the link to the video and return it 56 | link = re.search(r"\s*sources.*", r.text).group() 57 | link = re.search(r"https:.*(m3u8)|(mp4)", link).group() 58 | return link 59 | -------------------------------------------------------------------------------- /anime_cli/proxy_server.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sets up a proxy server, which automatically adds required header 3 | to each request. It makes it possible to run videos in players 4 | where passing in http-header is not supported 5 | """ 6 | from http.server import HTTPServer, SimpleHTTPRequestHandler 7 | 8 | import requests 9 | 10 | 11 | def proxyServer(headers, serverAddress) -> HTTPServer: 12 | """returns the http server 13 | 14 | Args: 15 | headers: The headers to send along with the request 16 | serverAddress: The server address to use for the server 17 | 18 | Returns: 19 | HTTPServer object 20 | """ 21 | 22 | class ProxyHTTPRequestHandler(SimpleHTTPRequestHandler): 23 | """A proxy http request handler 24 | The request to the server is handled through it 25 | 26 | Example: 27 | On a request to /http://example.com, the server will 28 | first get the content of http://example.com by sending 29 | required headers with the request, then 30 | send the same content to the user 31 | """ 32 | 33 | protocol_version = "HTTP/1.0" 34 | 35 | def do_GET(self, body=True): 36 | """The function which handles the get request to the server""" 37 | # Get the url 38 | hostname = self.path[1:] 39 | # Get the content from the url, along with the headers 40 | resp = requests.get(hostname, headers=headers) 41 | 42 | # Send respose status to be same as response status from above request 43 | self.send_response(resp.status_code) 44 | 45 | exclude_headers = [ 46 | "content-encoding", 47 | "content-length", 48 | "transfer-encoding", 49 | "connection", 50 | ] 51 | for k in resp.headers.keys(): 52 | if k.lower() in exclude_headers: 53 | continue 54 | self.send_header(k, resp.headers.get(k)) 55 | self.end_headers() 56 | 57 | # Send the content from the request to the user as it is 58 | self.wfile.write(resp.content) 59 | return 60 | 61 | # Disable logging for requests 62 | def log_request(self, code=..., size=...) -> None: 63 | pass 64 | 65 | return HTTPServer(serverAddress, ProxyHTTPRequestHandler) 66 | -------------------------------------------------------------------------------- /anime_cli/search/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod, abstractstaticmethod 2 | from typing import List 3 | 4 | import requests 5 | from bs4 import BeautifulSoup 6 | 7 | from anime_cli.anime import Anime 8 | 9 | 10 | class SearchApi(metaclass=ABCMeta): 11 | def __init__(self, mirror: str): 12 | self.url = f"https://example.{mirror}" 13 | self.request_headers = { 14 | "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36 Edg/95.0.1020.44" 15 | } 16 | 17 | def get_soup(self, location: str) -> BeautifulSoup: 18 | """Gets soup of a page 19 | The get_soup function takes in the location of 20 | the page and the gets the html from it 21 | and parses the html to `BeautifulSoup` 22 | 23 | Args: 24 | location: the location of the page 25 | 26 | Returns: 27 | `BeautifulSoup` object by parsing the html 28 | 29 | """ 30 | r = requests.get(f"{self.url}/{location}", headers=self.request_headers) 31 | return BeautifulSoup(r.content, features="html5lib") 32 | 33 | @abstractstaticmethod 34 | def get_headers() -> dict[str, str]: 35 | """Headers to set while quering anything from the site 36 | 37 | Returns: 38 | The header to set while quering anything. 39 | Some links require some additional headers 40 | to be passed to work properly 41 | """ 42 | pass 43 | 44 | @abstractmethod 45 | def search_anime(self, keyword: str) -> List[Anime]: 46 | """Search anime searches for animes by looking at the keyword 47 | 48 | Args: 49 | keyword: The keyword to search for when searching animes 50 | 51 | Returns: 52 | A list of `Anime` that matched the keyword 53 | """ 54 | pass 55 | 56 | @abstractmethod 57 | def get_episodes_count(self, anime: Anime) -> int: 58 | """Get the total number of episodes in an anime 59 | 60 | Args: 61 | anime: The anime for which you want to get episodes for 62 | 63 | Returns: 64 | The total number of episodes in the anime 65 | """ 66 | pass 67 | 68 | @abstractmethod 69 | def get_embed_video(self, anime: Anime, episode: int) -> str: 70 | """Get the link to the page where the episode video is embedded 71 | 72 | Args: 73 | anime: The anime you want to get episodes for 74 | episode: The episode number of the anime 75 | 76 | Returns: 77 | The link to the page where the episode for the anime 78 | is embedded 79 | """ 80 | pass 81 | 82 | @abstractmethod 83 | def get_video_url(self, embed_url: str) -> str: 84 | """Get the direct url to the video 85 | 86 | Args: 87 | embed_url: The link to the page where the video is embedded 88 | 89 | Returns: 90 | The direct link to the video 91 | """ 92 | pass 93 | -------------------------------------------------------------------------------- /anime_cli/__main__.py: -------------------------------------------------------------------------------- 1 | import subprocess as sp 2 | import sys 3 | import threading 4 | import webbrowser 5 | from typing import List 6 | 7 | from InquirerPy import inquirer 8 | 9 | from anime_cli.anime import Anime 10 | from anime_cli.proxy_server import proxyServer 11 | from anime_cli.search import SearchApi 12 | from anime_cli.search.gogoanime import GogoAnime 13 | 14 | 15 | def run_server(searchApi: SearchApi, serverAddress): 16 | """Run server function creates a server for the searchApi and runs it 17 | 18 | Args: 19 | searchApi: The api to create the proxy server for 20 | serverAddress: The server address to bind the server to 21 | """ 22 | server = proxyServer(searchApi.get_headers(), serverAddress) 23 | server.serve_forever() 24 | server.server_close() 25 | 26 | 27 | def anime_prompt(searchApi: SearchApi) -> Anime: 28 | """prompts the user for the keyword and then the anime 29 | The function first prompts the user for the keyword to enter 30 | Then runs the search on the keyword using the search api, 31 | prompts the user again to enter the anime from the search results 32 | 33 | Args: 34 | searchApi: The search api to use to search for animes 35 | 36 | Returns: 37 | The Anime the user selected 38 | """ 39 | # Prompt the user for anime keyword 40 | keyword: str = inquirer.text( 41 | message="What anime would you like to watch?" 42 | ).execute() 43 | 44 | # Search for the animes using the keyword 45 | animes = searchApi.search_anime(keyword) 46 | 47 | # Prompt the user to choose from one of the animes 48 | return inquirer.select( 49 | message=f"Found {len(animes)} results for {keyword}", choices=animes 50 | ).execute() 51 | 52 | 53 | def episode_prompt(searchApi: SearchApi, anime: Anime) -> str: 54 | """prompts the user for the episode number to watch 55 | 56 | Args: 57 | searchApi: the search api to use 58 | anime: The anime whose episodes we want the user to enter 59 | 60 | Returns: 61 | returns the link to the episode page 62 | """ 63 | # Get the total episodes count for the anime 64 | episodes = searchApi.get_episodes_count(anime) 65 | 66 | # prompt the user to choose from 1 to total number of episodes 67 | # validate function makes sure the user enter from 1 to total number of episodes 68 | # filter function changes the user entered number to the link 69 | return inquirer.text( 70 | message=f"Choose from 1-{episodes} episodes:", 71 | filter=lambda episode: searchApi.get_embed_video(anime, int(episode)), 72 | validate=lambda episode: 1 <= int(episode) <= episodes, 73 | ).execute() 74 | 75 | 76 | def action_prompt(actions: List[str]) -> int: 77 | """Prompts the user for the action to execute 78 | 79 | Args: 80 | actions: The list containing the available actions 81 | 82 | Returns: 83 | The index of the action user chose from the actions 84 | """ 85 | return inquirer.select( 86 | message="What would you like to do for me?", 87 | choices=actions, 88 | filter=lambda action: actions.index(action), 89 | ).execute() 90 | 91 | 92 | def video_player_prompt() -> str: 93 | """Prompt the user for the video player to use 94 | 95 | Returns: 96 | The video player for streaming 97 | """ 98 | # TODO: validate whether the video player exists 99 | return inquirer.text( 100 | message="Which video player would you like to use to stream?", default="mpv" 101 | ).execute() 102 | 103 | 104 | def main(): 105 | # TODO: Ability to select which search api, mirror to use 106 | searchApi = GogoAnime(mirror="pe") 107 | 108 | # Prompt the user for anime 109 | anime = anime_prompt(searchApi) 110 | # Prompt the user for episode 111 | embed_url = episode_prompt(searchApi, anime) 112 | 113 | # A list of actions the user can perform 114 | actions = [ 115 | "Stream on browser (Not recommended)", 116 | "Stream on a video player (Recommended)", 117 | ] 118 | # Prompt the user for the action to perform 119 | action = action_prompt(actions) 120 | video_player = video_player_prompt() 121 | 122 | # Directly stream the embedded url maycontain ad 123 | if action == 0: 124 | webbrowser.open(embed_url) 125 | return 126 | 127 | # If user doesn't want to directly stream 128 | # Get the direct link to the video 129 | video_url = searchApi.get_video_url(embed_url) 130 | 131 | # Start the proxy server 132 | serverAddress = ("localhost", 8081) 133 | print(f"Starting proxy server on {serverAddress}") 134 | server = threading.Thread( 135 | target=run_server, 136 | args=( 137 | searchApi, 138 | serverAddress, 139 | ), 140 | daemon=True, 141 | ) 142 | server.start() 143 | 144 | # Change the video url to use the proxy 145 | video_url = f"http://{serverAddress[0]}:{serverAddress[1]}/{video_url}" 146 | 147 | if action == 1: 148 | # Stream to the video player 149 | print("It may take some time to open the video player. Be Patient :)") 150 | sp.Popen([video_player, video_url]) 151 | 152 | while True: 153 | try: 154 | choice = inquirer.select( 155 | message="Note: Exitting will stop the proxy server too", 156 | choices=["exit"], 157 | ).execute() 158 | if choice == "exit": 159 | print("Bye!") 160 | sys.exit() 161 | except KeyboardInterrupt: 162 | print("Bye!") 163 | sys.exit() 164 | 165 | 166 | if __name__ == "__main__": 167 | main() 168 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "beautifulsoup4" 3 | version = "4.10.0" 4 | description = "Screen-scraping library" 5 | category = "main" 6 | optional = false 7 | python-versions = ">3.0.0" 8 | 9 | [package.dependencies] 10 | soupsieve = ">1.2" 11 | 12 | [package.extras] 13 | html5lib = ["html5lib"] 14 | lxml = ["lxml"] 15 | 16 | [[package]] 17 | name = "bs4" 18 | version = "0.0.1" 19 | description = "Dummy package for Beautiful Soup" 20 | category = "main" 21 | optional = false 22 | python-versions = "*" 23 | 24 | [package.dependencies] 25 | beautifulsoup4 = "*" 26 | 27 | [[package]] 28 | name = "certifi" 29 | version = "2021.10.8" 30 | description = "Python package for providing Mozilla's CA Bundle." 31 | category = "main" 32 | optional = false 33 | python-versions = "*" 34 | 35 | [[package]] 36 | name = "charset-normalizer" 37 | version = "2.0.7" 38 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 39 | category = "main" 40 | optional = false 41 | python-versions = ">=3.5.0" 42 | 43 | [package.extras] 44 | unicode_backport = ["unicodedata2"] 45 | 46 | [[package]] 47 | name = "html5lib" 48 | version = "1.1" 49 | description = "HTML parser based on the WHATWG HTML specification" 50 | category = "main" 51 | optional = false 52 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 53 | 54 | [package.dependencies] 55 | six = ">=1.9" 56 | webencodings = "*" 57 | 58 | [package.extras] 59 | all = ["genshi", "chardet (>=2.2)", "lxml"] 60 | chardet = ["chardet (>=2.2)"] 61 | genshi = ["genshi"] 62 | lxml = ["lxml"] 63 | 64 | [[package]] 65 | name = "idna" 66 | version = "3.3" 67 | description = "Internationalized Domain Names in Applications (IDNA)" 68 | category = "main" 69 | optional = false 70 | python-versions = ">=3.5" 71 | 72 | [[package]] 73 | name = "inquirerpy" 74 | version = "0.3.3" 75 | description = "Python port of Inquirer.js (A collection of common interactive command-line user interfaces)" 76 | category = "main" 77 | optional = false 78 | python-versions = ">=3.7,<4.0" 79 | 80 | [package.dependencies] 81 | pfzy = ">=0.3.1,<0.4.0" 82 | prompt-toolkit = ">=3.0.1,<4.0.0" 83 | 84 | [package.extras] 85 | docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)"] 86 | 87 | [[package]] 88 | name = "pfzy" 89 | version = "0.3.3" 90 | description = "Python port of the fzy fuzzy string matching algorithm" 91 | category = "main" 92 | optional = false 93 | python-versions = ">=3.7,<4.0" 94 | 95 | [package.extras] 96 | docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)"] 97 | 98 | [[package]] 99 | name = "prompt-toolkit" 100 | version = "3.0.20" 101 | description = "Library for building powerful interactive command lines in Python" 102 | category = "main" 103 | optional = false 104 | python-versions = ">=3.6.2" 105 | 106 | [package.dependencies] 107 | wcwidth = "*" 108 | 109 | [[package]] 110 | name = "requests" 111 | version = "2.27.1" 112 | description = "Python HTTP for Humans." 113 | category = "main" 114 | optional = false 115 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 116 | 117 | [package.dependencies] 118 | certifi = ">=2017.4.17" 119 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 120 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 121 | urllib3 = ">=1.21.1,<1.27" 122 | 123 | [package.extras] 124 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 125 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 126 | 127 | [[package]] 128 | name = "six" 129 | version = "1.16.0" 130 | description = "Python 2 and 3 compatibility utilities" 131 | category = "main" 132 | optional = false 133 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 134 | 135 | [[package]] 136 | name = "soupsieve" 137 | version = "2.2.1" 138 | description = "A modern CSS selector implementation for Beautiful Soup." 139 | category = "main" 140 | optional = false 141 | python-versions = ">=3.6" 142 | 143 | [[package]] 144 | name = "urllib3" 145 | version = "1.26.7" 146 | description = "HTTP library with thread-safe connection pooling, file post, and more." 147 | category = "main" 148 | optional = false 149 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 150 | 151 | [package.extras] 152 | brotli = ["brotlipy (>=0.6.0)"] 153 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 154 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 155 | 156 | [[package]] 157 | name = "wcwidth" 158 | version = "0.2.5" 159 | description = "Measures the displayed width of unicode strings in a terminal" 160 | category = "main" 161 | optional = false 162 | python-versions = "*" 163 | 164 | [[package]] 165 | name = "webencodings" 166 | version = "0.5.1" 167 | description = "Character encoding aliases for legacy web content" 168 | category = "main" 169 | optional = false 170 | python-versions = "*" 171 | 172 | [metadata] 173 | lock-version = "1.1" 174 | python-versions = "^3.9" 175 | content-hash = "fbc68b30ef6b25f3393fad016fecfcbb5180d00b33a4cb2f21882d40e5f98c57" 176 | 177 | [metadata.files] 178 | beautifulsoup4 = [ 179 | {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, 180 | {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, 181 | ] 182 | bs4 = [ 183 | {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"}, 184 | ] 185 | certifi = [ 186 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, 187 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, 188 | ] 189 | charset-normalizer = [ 190 | {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, 191 | {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, 192 | ] 193 | html5lib = [ 194 | {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, 195 | {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, 196 | ] 197 | idna = [ 198 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, 199 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, 200 | ] 201 | inquirerpy = [ 202 | {file = "InquirerPy-0.3.3-py3-none-any.whl", hash = "sha256:773ba1f1c82852e5289e9fb3956ee06ef113ec949e3614d277ce24fe7945021c"}, 203 | {file = "InquirerPy-0.3.3.tar.gz", hash = "sha256:29a1ace830d98730e0a2fc01b4484256491c182cdde93ad66ff602b1b510aaeb"}, 204 | ] 205 | pfzy = [ 206 | {file = "pfzy-0.3.3-py3-none-any.whl", hash = "sha256:33682ed1929c5ff85c0c57e3b3cfa5c355063138be3560a1797378cf08fb05a2"}, 207 | {file = "pfzy-0.3.3.tar.gz", hash = "sha256:3efd83e49d854830369173b44e64a2cd9adcb9d5382698a792474c7d18e2fe32"}, 208 | ] 209 | prompt-toolkit = [ 210 | {file = "prompt_toolkit-3.0.20-py3-none-any.whl", hash = "sha256:6076e46efae19b1e0ca1ec003ed37a933dc94b4d20f486235d436e64771dcd5c"}, 211 | {file = "prompt_toolkit-3.0.20.tar.gz", hash = "sha256:eb71d5a6b72ce6db177af4a7d4d7085b99756bf656d98ffcc4fecd36850eea6c"}, 212 | ] 213 | requests = [ 214 | {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, 215 | {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, 216 | ] 217 | six = [ 218 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 219 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 220 | ] 221 | soupsieve = [ 222 | {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"}, 223 | {file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"}, 224 | ] 225 | urllib3 = [ 226 | {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, 227 | {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, 228 | ] 229 | wcwidth = [ 230 | {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, 231 | {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, 232 | ] 233 | webencodings = [ 234 | {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, 235 | {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, 236 | ] 237 | --------------------------------------------------------------------------------