├── runtime.txt ├── Procfile ├── requirements.txt ├── anibot ├── __main__.py ├── utils │ ├── db.py │ ├── google_trans_new.py │ ├── helper.py │ └── data_parser.py ├── plugins │ ├── jikan.py │ ├── animequotes.py │ ├── animefillerslist.py │ ├── watch.py │ ├── tracemoepy.py │ ├── livechartme.py │ └── bot.py └── __init__.py ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ └── codeql-analysis.yml ├── LICENSE ├── app.json ├── .gitignore └── README.md /runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.10.1 -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | worker: python3 -m anibot -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyrogram 2 | tgcrypto 3 | requests 4 | aiofiles>=0.6.0 5 | aiohttp[speedups]>=3.7.3 6 | tracemoepy 7 | lottie 8 | bs4 9 | motor 10 | dnspython 11 | natsort 12 | apscheduler 13 | lxml 14 | html5lib -------------------------------------------------------------------------------- /anibot/__main__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pyrogram import idle 3 | from . import anibot, has_user, session 4 | from .utils.db import _close_db 5 | 6 | user = None 7 | if has_user: 8 | from . import user 9 | 10 | async def main(): 11 | await anibot.start() 12 | if user is not None: 13 | await user.start() 14 | await idle() 15 | await anibot.stop() 16 | if user is not None: 17 | await user.stop() 18 | _close_db() 19 | await session.close() 20 | 21 | 22 | asyncio.get_event_loop().run_until_complete(main()) -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /anibot/utils/db.py: -------------------------------------------------------------------------------- 1 | # below code is taken from USERGE-X repo 2 | # all credits to the respective author (dunno who wrote it will find later 3 | # n update) 4 | 5 | 6 | __all__ = ['get_collection'] 7 | 8 | import asyncio 9 | from motor.motor_asyncio import AsyncIOMotorClient 10 | from motor.core import AgnosticClient, AgnosticDatabase, AgnosticCollection 11 | from .. import DB_URL 12 | 13 | print("Connecting to Database ...") 14 | 15 | _MGCLIENT: AgnosticClient = AsyncIOMotorClient(DB_URL) 16 | _RUN = asyncio.get_event_loop().run_until_complete 17 | 18 | if "anibot" in _RUN(_MGCLIENT.list_database_names()): 19 | print("anibot Database Found :) => Now Logging to it...") 20 | else: 21 | print("anibot Database Not Found :( => Creating New Database...") 22 | 23 | _DATABASE: AgnosticDatabase = _MGCLIENT["anibot"] 24 | 25 | 26 | def get_collection(name: str) -> AgnosticCollection: 27 | """ Create or Get Collection from your database """ 28 | return _DATABASE[name] 29 | 30 | 31 | def _close_db() -> None: 32 | _MGCLIENT.close() -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Lucky Jain 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /anibot/plugins/jikan.py: -------------------------------------------------------------------------------- 1 | # uses jikanpy (Jikan API) 2 | from pyrogram import filters, Client 3 | from pyrogram.types import Message, CallbackQuery 4 | from .. import BOT_NAME, TRIGGERS as trg, anibot 5 | from ..utils.data_parser import get_scheduled 6 | from ..utils.helper import ( 7 | control_user, 8 | get_btns, 9 | check_user, 10 | get_user_from_channel as gcc 11 | ) 12 | from ..utils.db import get_collection 13 | 14 | DC = get_collection('DISABLED_CMDS') 15 | 16 | 17 | @anibot.on_message( 18 | filters.command(["schedule", f"schedule{BOT_NAME}"], prefixes=trg) 19 | ) 20 | @control_user 21 | async def get_schuled(client: Client, message: Message, mdata: dict): 22 | """Get List of Scheduled Anime""" 23 | gid = mdata['chat']['id'] 24 | find_gc = await DC.find_one({'_id': gid}) 25 | if find_gc is not None and 'schedule' in find_gc['cmd_list'].split(): 26 | return 27 | x = await client.send_message( 28 | gid, "Fetching Scheduled Animes" 29 | ) 30 | try: 31 | user = mdata['from_user']['id'] 32 | except KeyError: 33 | user = mdata['sender_chat']['id'] 34 | msg = await get_scheduled() 35 | buttons = get_btns("SCHEDULED", result=[msg[1]], user=user) 36 | await x.edit_text(msg[0], reply_markup=buttons) 37 | 38 | 39 | @anibot.on_callback_query(filters.regex(pattern=r"sched_(.*)")) 40 | @check_user 41 | async def ns_(client: anibot, cq: CallbackQuery, cdata: dict): 42 | kek, day, user = cdata['data'].split("_") 43 | msg = await get_scheduled(int(day)) 44 | buttons = get_btns("SCHEDULED", result=[int(day)], user=user) 45 | await cq.edit_message_text(msg[0], reply_markup=buttons) 46 | 47 | 48 | @anibot.on_edited_message( 49 | filters.command(["schedule", f"schedule{BOT_NAME}"], prefixes=trg) 50 | ) 51 | async def get_schuled_edit(client: Client, message: Message): 52 | await get_schuled(client, message) -------------------------------------------------------------------------------- /anibot/plugins/animequotes.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from pyrogram import filters 3 | from pyrogram.types import ( 4 | Message, 5 | CallbackQuery, 6 | InlineKeyboardMarkup as IKM, 7 | InlineKeyboardButton as IKB 8 | ) 9 | from .. import BOT_NAME, TRIGGERS as trg, anibot 10 | from ..utils.helper import control_user, check_user 11 | from ..utils.db import get_collection 12 | 13 | DC = get_collection('DISABLED_CMDS') 14 | 15 | @anibot.on_message( 16 | filters.command( 17 | ["quote", f"quote{BOT_NAME}"], 18 | prefixes=trg 19 | ) 20 | ) 21 | @control_user 22 | async def quote(_, message: Message, mdata: dict): 23 | gid = mdata['chat']['id'] 24 | try: 25 | user = mdata['from_user']['id'] 26 | except KeyError: 27 | user = mdata['sender_chat']['id'] 28 | find_gc = await DC.find_one({'_id': gid}) 29 | if find_gc is not None and 'quote' in find_gc['cmd_list'].split(): 30 | return 31 | q = requests.get("https://animechan.vercel.app/api/random").json() 32 | btn = IKM([[IKB("Refresh", callback_data=f"quoteref_{user}")]]) 33 | await message.reply_text( 34 | '`'+q['quote']+'`\n\n— **'+q['character'] 35 | +'** (From __'+q['anime']+'__)', 36 | reply_markup=btn 37 | ) 38 | 39 | 40 | @anibot.on_callback_query(filters.regex(pattern=r"quoteref_(.*)")) 41 | @check_user 42 | async def quote_btn(client: anibot, cq: CallbackQuery, cdata: dict): 43 | kek, user = cdata['data'].split("_") 44 | await cq.answer() 45 | q = requests.get("https://animechan.vercel.app/api/random").json() 46 | btn = IKM([[IKB("Refresh", callback_data=f"quoteref_{user}")]]) 47 | await cq.edit_message_text( 48 | '`'+q['quote']+'`\n\n— **'+q['character'] 49 | +'** (From __'+q['anime']+'__)', 50 | reply_markup=btn 51 | ) 52 | 53 | 54 | @anibot.on_message( 55 | filters.command( 56 | ["quote", f"quote{BOT_NAME}"], 57 | prefixes=trg 58 | ) 59 | ) 60 | async def quote_edit(_, message: Message): 61 | await quote(_, message) 62 | -------------------------------------------------------------------------------- /app.json: -------------------------------------------------------------------------------- 1 | { 2 | "name":"Ani-Info", 3 | "description":"telegram anime info provider bot", 4 | "keywords":[ 5 | "Pyrogram", 6 | "Telegram", 7 | "Bot", 8 | "Anime" 9 | ], 10 | "repository":"https://github.com/lostb053/anibot", 11 | "website":"https://github.com/lostb053/anibot", 12 | "success_url":"https://t.me/lostb053", 13 | "env":{ 14 | "API_ID":{ 15 | "description":"Get this value from https://my.telegram.org" 16 | }, 17 | "API_HASH":{ 18 | "description":"Get this value from https://my.telegram.org" 19 | }, 20 | "DATABASE_URL":{ 21 | "description":"Mongodb url from https://cloud.mongodb.com/, guide: https://del.dog/mongodb_guide" 22 | }, 23 | "LOG_CHANNEL_ID":{ 24 | "description":"[ Private Telegram Log Channel ID ], Note: Also add your Bot to LOG CHANNEL !" 25 | }, 26 | "BOT_TOKEN":{ 27 | "description":"Get this from https://t.me/botfather and enable Inline Mode" 28 | }, 29 | "BOT_NAME":{ 30 | "description":"Your bot name with @, like @hanabi_robot, Note: enter exact bot name with exact letters that are in capital" 31 | }, 32 | "OWNER_ID":{ 33 | "description":"Your user_id e.g 123456789, for multiple ids just add a space between them" 34 | }, 35 | "ANILIST_CLIENT": { 36 | "description":"Get from https://anilist.co/settings/developer" 37 | }, 38 | "ANILIST_SECRET": { 39 | "description":"Get from https://anilist.co/settings/developer" 40 | }, 41 | "ANILIST_REDIRECT_URL": { 42 | "description":"If you don't wish to change auth method, just leave it that way", 43 | "required":false 44 | }, 45 | "TRIGGERS":{ 46 | "description":"Custom triggers, for multiple, just put a space between like '/ ?'. / and ! are defaults", 47 | "required":false 48 | }, 49 | "PREFERRED_LANGUAGE":{ 50 | "description":"Sets custom description language, let's say you want all descriptions in Malay. Check and add appropriate language code from http://telegra.ph/Supported-Languages-01-05-2", 51 | "required":false 52 | } 53 | }, 54 | "buildpacks":[ 55 | { 56 | "url":"https://github.com/jonathanong/heroku-buildpack-ffmpeg-latest.git" 57 | }, 58 | { 59 | "url":"heroku/python" 60 | } 61 | ], 62 | "formation":{ 63 | "worker":{ 64 | "quantity":1, 65 | "size":"free" 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '39 13 * * 6' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 37 | # Learn more: 38 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed 39 | 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@v2 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /anibot/plugins/animefillerslist.py: -------------------------------------------------------------------------------- 1 | from pyrogram import filters 2 | from pyrogram.types import ( 3 | InlineKeyboardButton, 4 | InlineKeyboardMarkup, 5 | CallbackQuery, 6 | Message 7 | ) 8 | from ..utils.data_parser import search_filler, parse_filler 9 | from ..utils.helper import ( 10 | check_user, 11 | control_user, 12 | rand_key, 13 | get_user_from_channel as gcc 14 | ) 15 | from ..utils.db import get_collection 16 | from .. import BOT_NAME, TRIGGERS as trg, anibot 17 | 18 | FILLERS = {} 19 | DC = get_collection('DISABLED_CMDS') 20 | 21 | 22 | @anibot.on_message( 23 | filters.command(['fillers', f"fillers{BOT_NAME}"], prefixes=trg) 24 | ) 25 | @control_user 26 | async def fillers_cmd(client: anibot, message: Message, mdata: dict): 27 | find_gc = await DC.find_one({'_id': mdata['chat']['id']}) 28 | try: 29 | user = mdata['from_user']['id'] 30 | except KeyError: 31 | user = mdata['sender_chat']['id'] 32 | if find_gc is not None and 'watch' in find_gc['cmd_list'].split(): 33 | return 34 | qry = mdata['text'].split(" ", 1) 35 | if len(qry)==1: 36 | return await message.reply_text( 37 | """Give some anime name to search fillers for 38 | example: /fillers Detective Conan""" 39 | ) 40 | k = search_filler(qry[1]) 41 | if k == {}: 42 | await message.reply_text("No fillers found for the given anime...") 43 | return 44 | button = [] 45 | list_ = list(k.keys()) 46 | if len(list_)==1: 47 | result = parse_filler(k.get(list_[0])) 48 | msg = "" 49 | msg += f"Fillers for anime `{list_[0]}`\n\nManga Canon episodes:\n" 50 | msg += str(result.get("total_ep")) 51 | msg += "\n\nMixed/Canon fillers:\n" 52 | msg += str(result.get("mixed_ep")) 53 | msg += "\n\nFillers:\n" 54 | msg += str(result.get("filler_ep")) 55 | if result.get("ac_ep") is not None: 56 | msg += "\n\nAnime Canon episodes:\n" 57 | msg += str(result.get("ac_ep")) 58 | await message.reply_text(msg) 59 | return 60 | for i in list_: 61 | fl_js = rand_key() 62 | FILLERS[fl_js] = [k.get(i), i] 63 | button.append( 64 | [InlineKeyboardButton(i, callback_data=f"fill_{fl_js}_{user}")] 65 | ) 66 | await message.reply_text( 67 | "Pick anime you want to see fillers list for:", 68 | reply_markup=InlineKeyboardMarkup(button) 69 | ) 70 | 71 | 72 | @anibot.on_callback_query(filters.regex(pattern=r"fill_(.*)")) 73 | @check_user 74 | async def filler_btn(client: anibot, cq: CallbackQuery, cdata: dict): 75 | kek, req, user = cdata['data'].split("_") 76 | result = parse_filler((FILLERS.get(req))[0]) 77 | msg = "" 78 | msg += f"**Fillers for anime** `{(FILLERS.get(req))[1]}`" 79 | msg += "\n\n**Manga Canon episodes:**\n" 80 | msg += str(result.get("total_ep")) 81 | msg += "\n\n**Mixed/Canon fillers:**\n" 82 | msg += str(result.get("mixed_ep")) 83 | msg += "\n\n**Fillers:**\n" 84 | msg += str(result.get("filler_ep")) 85 | if result.get("ac_ep") is not None: 86 | msg += "\n\n**Anime Canon episodes:**\n" 87 | msg += str(result.get("ac_ep")) 88 | await cq.edit_message_text(msg) 89 | 90 | 91 | @anibot.on_message( 92 | filters.command(['fillers', f"fillers{BOT_NAME}"], prefixes=trg) 93 | ) 94 | async def fillers_cmd(client: anibot, message: Message): 95 | await fillers_cmd(client, message) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | 162 | *.session* 163 | .vscode/ -------------------------------------------------------------------------------- /anibot/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pyrogram import Client 3 | from aiohttp import ClientSession 4 | 5 | TRIGGERS = os.environ.get("TRIGGERS", "/ !").split() 6 | API_HASH = os.environ.get("API_HASH") 7 | BOT_TOKEN = os.environ.get("BOT_TOKEN") 8 | BOT_NAME = os.environ.get("BOT_NAME") 9 | DB_URL = os.environ.get("DATABASE_URL") 10 | ANILIST_CLIENT = os.environ.get("ANILIST_CLIENT") 11 | ANILIST_SECRET = os.environ.get("ANILIST_SECRET") 12 | ANILIST_REDIRECT_URL = os.environ.get("ANILIST_REDIRECT_URL", "https://anilist.co/api/v2/oauth/pin") 13 | API_ID = int(os.environ.get("API_ID")) 14 | LOG_CHANNEL_ID = int(os.environ.get("LOG_CHANNEL_ID")) 15 | OWNER = list(filter(lambda x: x, map(int, os.environ.get("OWNER_ID", "1005170481 804248372 1993696756").split()))) ## sudos can be included 16 | 17 | DOWN_PATH = "anibot/downloads/" 18 | HELP_DICT = dict() 19 | 20 | session = ClientSession() 21 | plugins = dict(root="anibot/plugins") 22 | anibot = Client("anibot", bot_token=BOT_TOKEN, api_id=API_ID, api_hash=API_HASH, plugins=plugins) 23 | 24 | has_user: bool = False 25 | if os.environ.get('USER_SESSION'): 26 | has_user: bool = True 27 | user = Client(os.environ.get('USER_SESSION'), api_id=API_ID, api_hash=API_HASH) 28 | 29 | HELP_DICT['Group'] = ''' 30 | Group based commands: 31 | 32 | /settings - Toggle stuff like whether to allow 18+ stuff in group or whether to notify about aired animes, etc and change UI 33 | 34 | /disable - Disable use of a cmd in the group (Disable multiple cmds by adding space between them) 35 | `/disable anime anilist me user` 36 | 37 | /enable - Enable use of a cmd in the group (Enable multiple cmds by adding space between them) 38 | `/enable anime anilist me user` 39 | 40 | /disabled - List out disabled cmds 41 | ''' 42 | 43 | HELP_DICT["Additional"] = """Use /reverse cmd to get reverse search via tracemoepy API 44 | __Note: This works best on uncropped anime pic, 45 | when used on cropped media, you may get result but it might not be too reliable__ 46 | 47 | Use /schedule cmd to get scheduled animes based on weekdays 48 | 49 | Use /watch cmd to get watch order of searched anime 50 | 51 | Use /fillers cmd to get a list of fillers for an anime 52 | 53 | Use /quote cmd to get a random quote 54 | """ 55 | 56 | HELP_DICT["Anilist"] = """ 57 | Below is the list of basic anilist cmds for info on anime, character, manga, etc. 58 | 59 | /anime - Use this cmd to get info on specific anime using keywords (anime name) or Anilist ID 60 | (Can lookup info on sequels and prequels) 61 | 62 | /anilist - Use this cmd to choose between multiple animes with similar names related to searched query 63 | (Doesn't includes buttons for prequel and sequel) 64 | 65 | /character - Use this cmd to get info on character 66 | 67 | /manga - Use this cmd to get info on manga 68 | 69 | /airing - Use this cmd to get info on airing status of anime 70 | 71 | /top - Use this cmd to lookup top animes of a genre/tag or from all animes 72 | (To get a list of available tags or genres send /gettags or /getgenres 73 | '/gettags nsfw' for nsfw tags) 74 | 75 | /user - Use this cmd to get info on an anilist user 76 | 77 | /browse - Use this cmd to get updates about latest animes 78 | """ 79 | 80 | HELP_DICT["Oauth"] = """ 81 | This includes advanced anilist features 82 | 83 | Use /auth or !auth cmd to get details on how to authorize your Anilist account with bot 84 | Authorising yourself unlocks advanced features of bot like: 85 | - adding anime/character/manga to favourites 86 | - viewing your anilist data related to anime/manga in your searches which includes score, status, and favourites 87 | - unlock /flex, /me, /activity and /favourites commands 88 | - adding/updating anilist entry like completed or plan to watch/read 89 | - deleting anilist entry 90 | 91 | Use /flex or !flex cmd to get your anilist stats 92 | 93 | Use /logout or !logout cmd to disconnect your Anilist account 94 | 95 | Use /me or !me cmd to get your anilist recent activity 96 | Can also use /activity or !activity 97 | 98 | Use /favourites or !favourites cmd to get your anilist favourites 99 | """ 100 | -------------------------------------------------------------------------------- /anibot/plugins/watch.py: -------------------------------------------------------------------------------- 1 | # credits to @NotThatMF on telegram for chiaki fast api 2 | # well i also borrowed the base code from him 3 | 4 | from pyrogram import filters, Client 5 | from pyrogram.types import ( 6 | CallbackQuery, 7 | InlineKeyboardButton, 8 | InlineKeyboardMarkup, 9 | Message 10 | ) 11 | from .. import BOT_NAME, TRIGGERS as trg, anibot 12 | from ..utils.data_parser import get_wo, get_wols 13 | from ..utils.helper import ( 14 | check_user, 15 | control_user, 16 | get_user_from_channel as gcc 17 | ) 18 | from ..utils.db import get_collection 19 | 20 | DC = get_collection('DISABLED_CMDS') 21 | 22 | 23 | @anibot.on_message( 24 | filters.command(["watch", f"watch{BOT_NAME}"], prefixes=trg) 25 | ) 26 | @control_user 27 | async def get_watch_order(client: Client, message: Message, mdata: dict): 28 | """Get List of Scheduled Anime""" 29 | gid = mdata['chat']['id'] 30 | find_gc = await DC.find_one({'_id': gid}) 31 | if find_gc is not None and 'watch' in find_gc['cmd_list'].split(): 32 | return 33 | x = message.text.split(" ", 1) 34 | if len(x)==1: 35 | await message.reply_text("Nothing given to search for!!!") 36 | return 37 | try: 38 | user = mdata['from_user']['id'] 39 | except KeyError: 40 | user = mdata['sender_chat']['id'] 41 | data = get_wols(x[1]) 42 | msg = f"Found related animes for the query {x[1]}" 43 | buttons = [] 44 | if data == []: 45 | await client.send_message(gid, 'No results found!!!') 46 | return 47 | for i in data: 48 | buttons.append( 49 | [ 50 | InlineKeyboardButton( 51 | str(i[1]), 52 | callback_data=f"watch_{i[0]}_{x[1]}_0_{user}" 53 | ) 54 | ] 55 | ) 56 | await client.send_message( 57 | gid, msg, reply_markup=InlineKeyboardMarkup(buttons) 58 | ) 59 | 60 | 61 | @anibot.on_callback_query(filters.regex(pattern=r"watch_(.*)")) 62 | @check_user 63 | async def watch_(client: anibot, cq: CallbackQuery, cdata: dict): 64 | kek, id_, qry, req, user = cdata['data'].split("_") 65 | msg, total = get_wo(int(id_), int(req)) 66 | totalpg, lol = divmod(total, 50) 67 | button = [] 68 | if lol!=0: 69 | totalpg + 1 70 | if total>50: 71 | if int(req)==0: 72 | button.append( 73 | [ 74 | InlineKeyboardButton( 75 | text="Next", 76 | callback_data=f"{kek}_{id_}_{qry}_{int(req)+1}_{user}" 77 | ) 78 | ] 79 | ) 80 | elif int(req)==totalpg: 81 | button.append( 82 | [ 83 | InlineKeyboardButton( 84 | text="Prev", 85 | callback_data=f"{kek}_{id_}_{qry}_{int(req)-1}_{user}" 86 | ) 87 | ] 88 | ) 89 | else: 90 | button.append( 91 | [ 92 | InlineKeyboardButton( 93 | text="Prev", 94 | callback_data=f"{kek}_{id_}_{qry}_{int(req)-1}_{user}" 95 | ), 96 | InlineKeyboardButton( 97 | text="Next", 98 | callback_data=f"{kek}_{id_}_{qry}_{int(req)+1}_{user}" 99 | ) 100 | ] 101 | ) 102 | button.append([ 103 | InlineKeyboardButton("Back", callback_data=f"wol_{qry}_{user}") 104 | ]) 105 | await cq.edit_message_text(msg, reply_markup=InlineKeyboardMarkup(button)) 106 | 107 | 108 | @anibot.on_callback_query(filters.regex(pattern=r"wol_(.*)")) 109 | @check_user 110 | async def wls(client: anibot, cq: CallbackQuery, cdata: dict): 111 | kek, qry, user = cdata['data'].split("_") 112 | data = get_wols(qry) 113 | msg = f"Found related animes for the query {qry}" 114 | buttons = [] 115 | for i in data: 116 | buttons.append( 117 | [ 118 | InlineKeyboardButton( 119 | str(i[1]), 120 | callback_data=f"watch_{i[0]}_{qry}_0_{user}" 121 | ) 122 | ] 123 | ) 124 | await cq.edit_message_text(msg, reply_markup=InlineKeyboardMarkup(buttons)) 125 | 126 | 127 | @anibot.on_edited_message( 128 | filters.command(["watch", f"watch{BOT_NAME}"], prefixes=trg) 129 | ) 130 | async def get_watch_order_edit(client: Client, message: Message): 131 | await get_watch_order(client, message) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Telegram Bot Repo Capable of fetching the following Info via Anilist API inspired from [AniFluid](https://t.me/anifluidbot) and [Nepgear](https://t.me/nepgearbot) 2 | * Anime 3 | * Airing 4 | * Manga 5 | * Character 6 | * Studio 7 | * Scheduled 8 | * Top animes 9 | * Favourites 10 | * Anilist Activity 11 | * Update Anilist entry using bot 12 | * Popular, trending and upcoming animes for a season 13 | * Random anime quotes 14 | * Anime fillers from [animefillerslist](https://www.animefillerlist.com) 15 | * Anime Airing notifications from [LiveChart](https://livechart.me) 16 | * Anime Headlines from [LiveChart](https://livechart.me) 17 | * Anime Headlines from [MyAnimeList](https://myanimelist.net) 18 | * Anime release notifications for [Crunchyroll](https://crunchyroll.com) 19 | * Anime release notifications for [Subsplease](https://subsplease.org) 20 | * Anime Reverse Search Powered by [tracemoepy](https://github.com/dragsama/tracemoepy) 21 | * Watch Order from [Chiaki](https://chiaki.site/) using [web api](https://chiaki.vercel.app) 22 | * Supports custom UI to be set for all results shown by /anime and /anilist in a group 23 |

Also can add to groups and enable sfw lock to prevent members from looking up hentai and 18+ rated stuff
Also includes command disabling
With new update you can now change UI for anime/anilist/manga results in your group

24 | 25 | [![CodeFactor](https://www.codefactor.io/repository/github/lostb053/anibot/badge)](https://www.codefactor.io/repository/github/lostb053/anibot) 26 | 27 | ## Requirements 28 | * Python 3.10.1 29 | * Telegram [API Keys](https://my.telegram.org/apps) 30 | * Bot Token from [BotFather](https://t.me/botfather) 31 | * MongoDB [Database URL](https://cloud.mongodb.com/) 32 | * Anilist [Client Keys](https://anilist.co/settings/developer) 33 | * For smooth authentication process deploy [this](https://github.com/lostb053/anilist_oauth_webserver) webserver (well a noob code server hope this helps) 34 | 35 | 36 | ## Available Cmds 37 | ``` 38 | /help - Get interactive and detailed help on bot cmds 39 | /ping - Ping the bot to check if it's online 40 | /start - To start bot in group (will be logged) or pm (user if not OWNER will be logged) 41 | /anime - Fetches info on single anime (includes buttons to look up for prequels and sequels) 42 | /anilist - Fetches info on multiple possible animes related to query 43 | /character - Fetches info on multiple possible characters related to query 44 | /manga - Fetches info on multiple possible mangas related to query 45 | /airing - Fetches info on airing data for anime 46 | /studio - Fetches info on multiple possible studios related to query 47 | /flex - Fetches anilist info of an authorised user 48 | /user - Fetches anilist info as per query 49 | /schedule - Fetches scheduled animes 50 | /auth - Fetches info on how to authorize anilist account 51 | /browse - get popular, trending or upcoming animes 52 | /quote - get random quotes 53 | /logout - removes authorization 54 | /settings - To toggle nsfw lock and airing notifications and other settings in groups 55 | /top - to retrieve top animes for a genre or tag 56 | /reverse - Reverse search powered by tracemoepy 57 | /watch - Fetches watch order for anime series 58 | /feedback - contact bot owner or main support grp at @hanabi_support 59 | /me or /activity - Get Anilist recent activity 60 | /fillers - To get list of anime fillers 61 | /disable - To disable a command in group 62 | /enable - To enable a command in group 63 | /disabled - To list disabled commands in a group 64 | /favourites - Get Anilist favourites 65 | /gettags - Get list of available Tags 66 | /getgenres - Get list of available Genres 67 | /connect - Helps connect Public channel, to use bot as Channel in group 68 | ``` 69 | 70 | 71 | ## Owner/Sudo Cmds 72 | ``` 73 | /eval - Runs python code (code must start right after cmd like "/eval print('UwU')") 74 | /term - Runs the code in terminal 75 | /stats - Gibs data on bot such as no. of grps/users and ping 76 | /dbcleanup - Cleans useless entries in database 77 | ``` 78 | 79 | 80 | ## How to host 81 |

82 | 83 | 84 | ## Credits 85 | * AniList Api ([GitHub](https://github.com/AniList/ApiV2-GraphQL-Docs)) 86 | * jikanpy ([GitHub](https://github.com/abhinavk99/jikanpy)) 87 | * [@NotThatMF](https://t.me/notthatmf) for [chiaki fast api](https://chiaki.vercel.app/) and for creating base for this bot to work 88 | * [@DragSama](https://t.me/dragsama) on telegram for [tracemoepy](https://github.com/dragsama/tracemoepy) & [AniFluid-Base](https://github.com/DragSama/AniFluid-Base) 89 | * [@DeletedUser420](https://t.me/deleteduser420) on telegram for [USERGE-X](https://github.com/code-rgb/USERGE-X) & [Userge-Plugins](https://github.com/code-rgb/Userge-Plugins) 90 | * [Phyco-Ninja](https://github.com/Phyco-Ninja) as author of anilist plugin in Userge-Plugins repo 91 | * [@blank_x](https://t.me/blank_x) on tg for [sukuinote](https://gitlab.com/blank-x/sukuinote) 92 | 93 | 94 | For improvements PR or contact [@LostB053](https://t.me/lostb053) or [@hanabi_support](https://t.me/hanabi_support)
95 | Can ask for support too but don't expect much (since i myself am learning yet)
96 |
97 |

Note: I dropped SauceNAO stuff cuz i couldnt represent it in some good looking manner
98 | I would be grateful if anybody can help me parse results and organize them like @reverseSearchBot

Something nearby but good looking would suffice too 99 | -------------------------------------------------------------------------------- /anibot/plugins/tracemoepy.py: -------------------------------------------------------------------------------- 1 | # The following code is exact (almost i mean) copy of 2 | # reverse search taken from @DeletedUser420's Userge-Plugins repo 3 | # originally authored by 4 | # Phyco-Ninja (https://github.com/Phyco-Ninja) (@PhycoNinja13b) 5 | # but is in current state after DeletedUser420's edits 6 | # which made this code shorter and more efficient 7 | 8 | import random 9 | import asyncio 10 | import tracemoepy 11 | from traceback import format_exc as err 12 | from tracemoepy.errors import ServerError 13 | from aiohttp import ClientSession 14 | from pyrogram import filters 15 | from pyrogram.types import ( 16 | InlineKeyboardButton, 17 | InlineKeyboardMarkup, 18 | CallbackQuery, 19 | InputMediaPhoto, 20 | InputMediaVideo, 21 | Message 22 | ) 23 | from .. import BOT_NAME, TRIGGERS as trg, anibot, session 24 | from ..utils.helper import ( 25 | check_user, 26 | clog, 27 | control_user, 28 | media_to_image, 29 | rand_key, 30 | get_user_from_channel as gcc 31 | ) 32 | from ..utils.data_parser import check_if_adult 33 | from ..utils.db import get_collection 34 | from .anilist import no_pic 35 | 36 | SFW_GRPS = get_collection("SFW_GROUPS") 37 | DC = get_collection('DISABLED_CMDS') 38 | 39 | TRACE_MOE = {} 40 | 41 | @anibot.on_message( 42 | filters.command(["reverse", f"reverse{BOT_NAME}"], prefixes=trg) 43 | ) 44 | @control_user 45 | async def trace_bek(client: anibot, message: Message, mdata: dict): 46 | """ Reverse Search Anime Clips/Photos """ 47 | gid = mdata['chat']['id'] 48 | try: 49 | user = mdata['from_user']['id'] 50 | except KeyError: 51 | user = mdata['sender_chat']['id'] 52 | find_gc = await DC.find_one({'_id': gid}) 53 | if find_gc is not None and 'reverse' in find_gc['cmd_list'].split(): 54 | return 55 | x = await message.reply_text("Reverse searching the given media") 56 | replied = message.reply_to_message 57 | if not replied: 58 | await x.edit_text("Reply to some media !") 59 | await asyncio.sleep(5) 60 | await x.delete() 61 | return 62 | dls_loc = await media_to_image(client, message, x, replied) 63 | if dls_loc: 64 | async with session: 65 | tracemoe = tracemoepy.AsyncTrace(session=session) 66 | try: 67 | search = await tracemoe.search(dls_loc, upload_file=True) 68 | except ServerError: 69 | await x.edit_text('ServerError, retrying') 70 | try: 71 | search = await tracemoe.search(dls_loc, upload_file=True) 72 | except ServerError: 73 | await x.edit_text('Couldnt parse results!!!') 74 | return 75 | except RuntimeError: 76 | cs = ClientSession() 77 | tracemoe = tracemoepy.AsyncTrace(session=cs) 78 | search = await tracemoe.search(dls_loc, upload_file=True) 79 | except Exception: 80 | e = err() 81 | await x.edit_text( 82 | e.split("\n").pop(-2) 83 | +"\n\nTrying again in 2-3 minutes might just fix this" 84 | ) 85 | await clog("ANIBOT", e, "TRACEMOE", replied=replied) 86 | return 87 | result = search["result"][0] 88 | caption_ = ( 89 | f"**Title**: {result['anilist']['title']['english']}" 90 | +f" (`{result['anilist']['title']['native']}`)\n" 91 | +f"**Anilist ID:** `{result['anilist']['id']}`\n" 92 | +f"**Similarity**: `{(str(result['similarity']*100))[:5]}`\n" 93 | +f"**Episode**: `{result['episode']}`" 94 | ) 95 | preview = result['video'] 96 | dls_js = rand_key() 97 | TRACE_MOE[dls_js] = search 98 | button = [] 99 | nsfw = False 100 | if await check_if_adult( 101 | int(result['anilist']['id']) 102 | )=="True" and ( 103 | await SFW_GRPS.find_one({"id": gid}) 104 | ): 105 | msg = no_pic[random.randint(0, 4)] 106 | caption="The results seems to be 18+ and not allowed in this group" 107 | nsfw = True 108 | else: 109 | msg = preview 110 | caption=caption_ 111 | button.append([ 112 | InlineKeyboardButton( 113 | "More Info", 114 | url=f"https://anilist.co/anime/{result['anilist']['id']}") 115 | ]) 116 | button.append([ 117 | InlineKeyboardButton( 118 | "Next", callback_data=f"tracech_1_{dls_js}_{user}" 119 | ) 120 | ]) 121 | try: 122 | await ( 123 | message.reply_video if nsfw is False else message.reply_photo 124 | )( 125 | msg, caption=caption, reply_markup=InlineKeyboardMarkup(button) 126 | ) 127 | except Exception: 128 | e = err() 129 | await x.edit_text( 130 | e.split("\n").pop(-2) 131 | +"\n\nTrying again in 2-3 minutes might just fix this" 132 | ) 133 | await clog("ANIBOT", e, "TRACEMOE", replied=replied) 134 | return 135 | else: 136 | await message.reply_text("Couldn't parse results!!!") 137 | await x.delete() 138 | 139 | 140 | @anibot.on_callback_query(filters.regex(pattern=r"tracech_(.*)")) 141 | @check_user 142 | async def tracemoe_btn(client: anibot, cq: CallbackQuery, cdata: dict): 143 | kek, page, dls_loc, user = cdata['data'].split("_") 144 | try: 145 | TRACE_MOE[dls_loc] 146 | except KeyError: 147 | return await cq.answer( 148 | "Query Expired!!!\nCreate new one", show_alert=True 149 | ) 150 | search = TRACE_MOE[dls_loc] 151 | result = search["result"][int(page)] 152 | caption = ( 153 | f"**Title**: {result['anilist']['title']['english']}" 154 | +f" (`{result['anilist']['title']['native']}`)\n" 155 | +f"**Anilist ID:** `{result['anilist']['id']}`\n" 156 | +f"**Similarity**: `{(str(result['similarity']*100))[:5]}`\n" 157 | +f"**Episode**: `{result['episode']}`" 158 | ) 159 | preview = result['video'] 160 | button = [] 161 | if await check_if_adult( 162 | int(result['anilist']['id']) 163 | )=="True" and ( 164 | await SFW_GRPS.find_one({"id": cq.message.chat.id}) 165 | ): 166 | msg = InputMediaPhoto( 167 | no_pic[random.randint(0, 4)], 168 | caption="The results seems to be 18+ and not allowed in this group" 169 | ) 170 | else: 171 | msg = InputMediaVideo(preview, caption=caption) 172 | button.append([ 173 | InlineKeyboardButton( 174 | "More Info", 175 | url=f"https://anilist.co/anime/{result['anilist']['id']}" 176 | ) 177 | ]) 178 | if int(page)==0: 179 | button.append([ 180 | InlineKeyboardButton( 181 | "Next", callback_data=f"tracech_{int(page)+1}_{dls_loc}_{user}" 182 | ) 183 | ]) 184 | elif int(page)==(len(search['result'])-1): 185 | button.append([ 186 | InlineKeyboardButton( 187 | "Back", callback_data=f"tracech_{int(page)-1}_{dls_loc}_{user}" 188 | ) 189 | ]) 190 | else: 191 | button.append([ 192 | InlineKeyboardButton( 193 | "Back", 194 | callback_data=f"tracech_{int(page)-1}_{dls_loc}_{user}" 195 | ), 196 | InlineKeyboardButton( 197 | "Next", 198 | callback_data=f"tracech_{int(page)+1}_{dls_loc}_{user}" 199 | ) 200 | ]) 201 | await cq.edit_message_media(msg, reply_markup=InlineKeyboardMarkup(button)) 202 | 203 | 204 | @anibot.on_message( 205 | filters.command(["reverse", f"reverse{BOT_NAME}"], prefixes=trg) 206 | ) 207 | async def trace_bek_edit(client: anibot, message: Message): 208 | await trace_bek(client, message) -------------------------------------------------------------------------------- /anibot/utils/google_trans_new.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | # author LuShan 3 | # version : 1.1.9 4 | import json, requests, random, re 5 | from urllib.parse import quote 6 | import urllib3 7 | import logging 8 | 9 | LANGUAGES = { 10 | 'af': 'afrikaans', 11 | 'sq': 'albanian', 12 | 'am': 'amharic', 13 | 'ar': 'arabic', 14 | 'hy': 'armenian', 15 | 'az': 'azerbaijani', 16 | 'eu': 'basque', 17 | 'be': 'belarusian', 18 | 'bn': 'bengali', 19 | 'bs': 'bosnian', 20 | 'bg': 'bulgarian', 21 | 'ca': 'catalan', 22 | 'ceb': 'cebuano', 23 | 'ny': 'chichewa', 24 | 'zh-cn': 'chinese (simplified)', 25 | 'zh-tw': 'chinese (traditional)', 26 | 'co': 'corsican', 27 | 'hr': 'croatian', 28 | 'cs': 'czech', 29 | 'da': 'danish', 30 | 'nl': 'dutch', 31 | 'en': 'english', 32 | 'eo': 'esperanto', 33 | 'et': 'estonian', 34 | 'tl': 'filipino', 35 | 'fi': 'finnish', 36 | 'fr': 'french', 37 | 'fy': 'frisian', 38 | 'gl': 'galician', 39 | 'ka': 'georgian', 40 | 'de': 'german', 41 | 'el': 'greek', 42 | 'gu': 'gujarati', 43 | 'ht': 'haitian creole', 44 | 'ha': 'hausa', 45 | 'haw': 'hawaiian', 46 | 'iw': 'hebrew', 47 | 'he': 'hebrew', 48 | 'hi': 'hindi', 49 | 'hmn': 'hmong', 50 | 'hu': 'hungarian', 51 | 'is': 'icelandic', 52 | 'ig': 'igbo', 53 | 'id': 'indonesian', 54 | 'ga': 'irish', 55 | 'it': 'italian', 56 | 'ja': 'japanese', 57 | 'jw': 'javanese', 58 | 'kn': 'kannada', 59 | 'kk': 'kazakh', 60 | 'km': 'khmer', 61 | 'ko': 'korean', 62 | 'ku': 'kurdish (kurmanji)', 63 | 'ky': 'kyrgyz', 64 | 'lo': 'lao', 65 | 'la': 'latin', 66 | 'lv': 'latvian', 67 | 'lt': 'lithuanian', 68 | 'lb': 'luxembourgish', 69 | 'mk': 'macedonian', 70 | 'mg': 'malagasy', 71 | 'ms': 'malay', 72 | 'ml': 'malayalam', 73 | 'mt': 'maltese', 74 | 'mi': 'maori', 75 | 'mr': 'marathi', 76 | 'mn': 'mongolian', 77 | 'my': 'myanmar (burmese)', 78 | 'ne': 'nepali', 79 | 'no': 'norwegian', 80 | 'or': 'odia', 81 | 'ps': 'pashto', 82 | 'fa': 'persian', 83 | 'pl': 'polish', 84 | 'pt': 'portuguese', 85 | 'pa': 'punjabi', 86 | 'ro': 'romanian', 87 | 'ru': 'russian', 88 | 'sm': 'samoan', 89 | 'gd': 'scots gaelic', 90 | 'sr': 'serbian', 91 | 'st': 'sesotho', 92 | 'sn': 'shona', 93 | 'sd': 'sindhi', 94 | 'si': 'sinhala', 95 | 'sk': 'slovak', 96 | 'sl': 'slovenian', 97 | 'so': 'somali', 98 | 'es': 'spanish', 99 | 'su': 'sundanese', 100 | 'sw': 'swahili', 101 | 'sv': 'swedish', 102 | 'tg': 'tajik', 103 | 'ta': 'tamil', 104 | 'tt': 'tatar', 105 | 'te': 'telugu', 106 | 'th': 'thai', 107 | 'tr': 'turkish', 108 | 'tk': 'turkmen', 109 | 'uk': 'ukrainian', 110 | 'ur': 'urdu', 111 | 'ug': 'uyghur', 112 | 'uz': 'uzbek', 113 | 'vi': 'vietnamese', 114 | 'cy': 'welsh', 115 | 'xh': 'xhosa', 116 | 'yi': 'yiddish', 117 | 'yo': 'yoruba', 118 | 'zu': 'zulu', 119 | } 120 | 121 | DEFAULT_SERVICE_URLS = ('translate.google.ac','translate.google.ad','translate.google.ae', 122 | 'translate.google.al','translate.google.am','translate.google.as', 123 | 'translate.google.at','translate.google.az','translate.google.ba', 124 | 'translate.google.be','translate.google.bf','translate.google.bg', 125 | 'translate.google.bi','translate.google.bj','translate.google.bs', 126 | 'translate.google.bt','translate.google.by','translate.google.ca', 127 | 'translate.google.cat','translate.google.cc','translate.google.cd', 128 | 'translate.google.cf','translate.google.cg','translate.google.ch', 129 | 'translate.google.ci','translate.google.cl','translate.google.cm', 130 | 'translate.google.cn','translate.google.co.ao','translate.google.co.bw', 131 | 'translate.google.co.ck','translate.google.co.cr','translate.google.co.id', 132 | 'translate.google.co.il','translate.google.co.in','translate.google.co.jp', 133 | 'translate.google.co.ke','translate.google.co.kr','translate.google.co.ls', 134 | 'translate.google.co.ma','translate.google.co.mz','translate.google.co.nz', 135 | 'translate.google.co.th','translate.google.co.tz','translate.google.co.ug', 136 | 'translate.google.co.uk','translate.google.co.uz','translate.google.co.ve', 137 | 'translate.google.co.vi','translate.google.co.za','translate.google.co.zm', 138 | 'translate.google.co.zw','translate.google.co','translate.google.com.af', 139 | 'translate.google.com.ag','translate.google.com.ai','translate.google.com.ar', 140 | 'translate.google.com.au','translate.google.com.bd','translate.google.com.bh', 141 | 'translate.google.com.bn','translate.google.com.bo','translate.google.com.br', 142 | 'translate.google.com.bz','translate.google.com.co','translate.google.com.cu', 143 | 'translate.google.com.cy','translate.google.com.do','translate.google.com.ec', 144 | 'translate.google.com.eg','translate.google.com.et','translate.google.com.fj', 145 | 'translate.google.com.gh','translate.google.com.gi','translate.google.com.gt', 146 | 'translate.google.com.hk','translate.google.com.jm','translate.google.com.kh', 147 | 'translate.google.com.kw','translate.google.com.lb','translate.google.com.lc', 148 | 'translate.google.com.ly','translate.google.com.mm','translate.google.com.mt', 149 | 'translate.google.com.mx','translate.google.com.my','translate.google.com.na', 150 | 'translate.google.com.ng','translate.google.com.ni','translate.google.com.np', 151 | 'translate.google.com.om','translate.google.com.pa','translate.google.com.pe', 152 | 'translate.google.com.pg','translate.google.com.ph','translate.google.com.pk', 153 | 'translate.google.com.pr','translate.google.com.py','translate.google.com.qa', 154 | 'translate.google.com.sa','translate.google.com.sb','translate.google.com.sg', 155 | 'translate.google.com.sl','translate.google.com.sv','translate.google.com.tj', 156 | 'translate.google.com.tr','translate.google.com.tw','translate.google.com.ua', 157 | 'translate.google.com.uy','translate.google.com.vc','translate.google.com.vn', 158 | 'translate.google.com','translate.google.cv','translate.google.cx', 159 | 'translate.google.cz','translate.google.de','translate.google.dj', 160 | 'translate.google.dk','translate.google.dm','translate.google.dz', 161 | 'translate.google.ee','translate.google.es','translate.google.eu', 162 | 'translate.google.fi','translate.google.fm','translate.google.fr', 163 | 'translate.google.ga','translate.google.ge','translate.google.gf', 164 | 'translate.google.gg','translate.google.gl','translate.google.gm', 165 | 'translate.google.gp','translate.google.gr','translate.google.gy', 166 | 'translate.google.hn','translate.google.hr','translate.google.ht', 167 | 'translate.google.hu','translate.google.ie','translate.google.im', 168 | 'translate.google.io','translate.google.iq','translate.google.is', 169 | 'translate.google.it','translate.google.je','translate.google.jo', 170 | 'translate.google.kg','translate.google.ki','translate.google.kz', 171 | 'translate.google.la','translate.google.li','translate.google.lk', 172 | 'translate.google.lt','translate.google.lu','translate.google.lv', 173 | 'translate.google.md','translate.google.me','translate.google.mg', 174 | 'translate.google.mk','translate.google.ml','translate.google.mn', 175 | 'translate.google.ms','translate.google.mu','translate.google.mv', 176 | 'translate.google.mw','translate.google.ne','translate.google.nf', 177 | 'translate.google.nl','translate.google.no','translate.google.nr', 178 | 'translate.google.nu','translate.google.pl','translate.google.pn', 179 | 'translate.google.ps','translate.google.pt','translate.google.ro', 180 | 'translate.google.rs','translate.google.ru','translate.google.rw', 181 | 'translate.google.sc','translate.google.se','translate.google.sh', 182 | 'translate.google.si','translate.google.sk','translate.google.sm', 183 | 'translate.google.sn','translate.google.so','translate.google.sr', 184 | 'translate.google.st','translate.google.td','translate.google.tg', 185 | 'translate.google.tk','translate.google.tl','translate.google.tm', 186 | 'translate.google.tn','translate.google.to','translate.google.tt', 187 | 'translate.google.us','translate.google.vg','translate.google.vu','translate.google.ws') 188 | log = logging.getLogger(__name__) 189 | log.addHandler(logging.NullHandler()) 190 | 191 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 192 | 193 | URLS_SUFFIX = [re.search('translate.google.(.*)', url.strip()).group(1) for url in DEFAULT_SERVICE_URLS] 194 | URL_SUFFIX_DEFAULT = 'cn' 195 | 196 | 197 | class google_new_transError(Exception): 198 | """Exception that uses context to present a meaningful error message""" 199 | 200 | def __init__(self, msg=None, **kwargs): 201 | self.tts = kwargs.pop('tts', None) 202 | self.rsp = kwargs.pop('response', None) 203 | if msg: 204 | self.msg = msg 205 | elif self.tts is not None: 206 | self.msg = self.infer_msg(self.tts, self.rsp) 207 | else: 208 | self.msg = None 209 | super(google_new_transError, self).__init__(self.msg) 210 | 211 | def infer_msg(self, tts, rsp=None): 212 | cause = "Unknown" 213 | 214 | if rsp is None: 215 | premise = "Failed to connect" 216 | 217 | return "{}. Probable cause: {}".format(premise, "timeout") 218 | # if tts.tld != 'com': 219 | # host = _translate_url(tld=tts.tld) 220 | # cause = "Host '{}' is not reachable".format(host) 221 | 222 | else: 223 | status = rsp.status_code 224 | reason = rsp.reason 225 | 226 | premise = "{:d} ({}) from TTS API".format(status, reason) 227 | 228 | if status == 403: 229 | cause = "Bad token or upstream API changes" 230 | elif status == 200 and not tts.lang_check: 231 | cause = "No audio stream in response. Unsupported language '%s'" % self.tts.lang 232 | elif status >= 500: 233 | cause = "Uptream API error. Try again later." 234 | 235 | return "{}. Probable cause: {}".format(premise, cause) 236 | 237 | 238 | class google_translator: 239 | ''' 240 | You can use 108 language in target and source,details view LANGUAGES. 241 | Target language: like 'en'、'zh'、'th'... 242 | :param url_suffix: The source text(s) to be translated. Batch translation is supported via sequence input. 243 | The value should be one of the url_suffix listed in : `DEFAULT_SERVICE_URLS` 244 | :type url_suffix: UTF-8 :class:`str`; :class:`unicode`; string sequence (list, tuple, iterator, generator) 245 | :param text: The source text(s) to be translated. 246 | :type text: UTF-8 :class:`str`; :class:`unicode`; 247 | :param lang_tgt: The language to translate the source text into. 248 | The value should be one of the language codes listed in : `LANGUAGES` 249 | :type lang_tgt: :class:`str`; :class:`unicode` 250 | :param lang_src: The language of the source text. 251 | The value should be one of the language codes listed in :const:`googletrans.LANGUAGES` 252 | If a language is not specified, 253 | the system will attempt to identify the source language automatically. 254 | :type lang_src: :class:`str`; :class:`unicode` 255 | :param timeout: Timeout Will be used for every request. 256 | :type timeout: number or a double of numbers 257 | :param proxies: proxies Will be used for every request. 258 | :type proxies: class : dict; like: {'http': 'http:171.112.169.47:19934/', 'https': 'https:171.112.169.47:19934/'} 259 | ''' 260 | 261 | def __init__(self, url_suffix="cn", timeout=5, proxies=None): 262 | self.proxies = proxies 263 | if url_suffix not in URLS_SUFFIX: 264 | self.url_suffix = URL_SUFFIX_DEFAULT 265 | else: 266 | self.url_suffix = url_suffix 267 | url_base = "https://translate.google.{}".format(self.url_suffix) 268 | self.url = url_base + "/_/TranslateWebserverUi/data/batchexecute" 269 | self.timeout = timeout 270 | 271 | def _package_rpc(self, text, lang_src='auto', lang_tgt='auto'): 272 | GOOGLE_TTS_RPC = ["MkEWBc"] 273 | parameter = [[text.strip(), lang_src, lang_tgt, True], [1]] 274 | escaped_parameter = json.dumps(parameter, separators=(',', ':')) 275 | rpc = [[[random.choice(GOOGLE_TTS_RPC), escaped_parameter, None, "generic"]]] 276 | espaced_rpc = json.dumps(rpc, separators=(',', ':')) 277 | # text_urldecode = quote(text.strip()) 278 | freq_initial = "f.req={}&".format(quote(espaced_rpc)) 279 | freq = freq_initial 280 | return freq 281 | 282 | def translate(self, text, lang_tgt='auto', lang_src='auto', pronounce=False): 283 | try: 284 | lang = LANGUAGES[lang_src] 285 | except Exception: 286 | lang_src = 'auto' 287 | try: 288 | lang = LANGUAGES[lang_tgt] 289 | except Exception: 290 | lang_src = 'auto' 291 | text = str(text) 292 | if len(text) >= 5000: 293 | return "Warning: Can only detect less than 5000 characters" 294 | if len(text) == 0: 295 | return "" 296 | headers = { 297 | "Referer": "http://translate.google.{}/".format(self.url_suffix), 298 | "User-Agent": 299 | "Mozilla/5.0 (Windows NT 10.0; WOW64) " 300 | "AppleWebKit/537.36 (KHTML, like Gecko) " 301 | "Chrome/47.0.2526.106 Safari/537.36", 302 | "Content-Type": "application/x-www-form-urlencoded;charset=utf-8" 303 | } 304 | freq = self._package_rpc(text, lang_src, lang_tgt) 305 | response = requests.Request(method='POST', 306 | url=self.url, 307 | data=freq, 308 | headers=headers, 309 | ) 310 | try: 311 | if self.proxies is None or type(self.proxies) != dict: 312 | self.proxies = {} 313 | with requests.Session() as s: 314 | s.proxies = self.proxies 315 | r = s.send(request=response.prepare(), 316 | verify=False, 317 | timeout=self.timeout) 318 | for line in r.iter_lines(chunk_size=1024): 319 | decoded_line = line.decode('utf-8') 320 | if "MkEWBc" in decoded_line: 321 | try: 322 | response = (decoded_line) 323 | response = json.loads(response) 324 | response = list(response) 325 | response = json.loads(response[0][2]) 326 | response_ = list(response) 327 | response = response_[1][0] 328 | if len(response) == 1: 329 | if len(response[0]) > 5: 330 | sentences = response[0][5] 331 | else: ## only url 332 | sentences = response[0][0] 333 | if pronounce is False: 334 | return sentences 335 | elif pronounce == True: 336 | return [sentences,None,None] 337 | translate_text = "" 338 | for sentence in sentences: 339 | sentence = sentence[0] 340 | translate_text += sentence.strip() + ' ' 341 | translate_text = translate_text 342 | if pronounce is False: 343 | return translate_text 344 | elif pronounce == True: 345 | pronounce_src = (response_[0][0]) 346 | pronounce_tgt = (response_[1][0][0][1]) 347 | return [translate_text, pronounce_src, pronounce_tgt] 348 | elif len(response) == 2: 349 | sentences = [] 350 | for i in response: 351 | sentences.append(i[0]) 352 | if pronounce is False: 353 | return sentences 354 | elif pronounce == True: 355 | pronounce_src = (response_[0][0]) 356 | pronounce_tgt = (response_[1][0][0][1]) 357 | return [sentences, pronounce_src, pronounce_tgt] 358 | except Exception as e: 359 | raise e 360 | r.raise_for_status() 361 | except requests.exceptions.ConnectTimeout as e: 362 | raise e 363 | except requests.exceptions.HTTPError as e: 364 | # Request successful, bad response 365 | raise google_new_transError(tts=self, response=r) 366 | except requests.exceptions.RequestException as e: 367 | # Request failed 368 | raise google_new_transError(tts=self) 369 | 370 | def detect(self, text): 371 | text = str(text) 372 | if len(text) >= 5000: 373 | return log.debug("Warning: Can only detect less than 5000 characters") 374 | if len(text) == 0: 375 | return "" 376 | headers = { 377 | "Referer": "http://translate.google.{}/".format(self.url_suffix), 378 | "User-Agent": 379 | "Mozilla/5.0 (Windows NT 10.0; WOW64) " 380 | "AppleWebKit/537.36 (KHTML, like Gecko) " 381 | "Chrome/47.0.2526.106 Safari/537.36", 382 | "Content-Type": "application/x-www-form-urlencoded;charset=utf-8" 383 | } 384 | freq = self._package_rpc(text) 385 | response = requests.Request(method='POST', 386 | url=self.url, 387 | data=freq, 388 | headers=headers) 389 | try: 390 | if self.proxies is None or type(self.proxies) != dict: 391 | self.proxies = {} 392 | with requests.Session() as s: 393 | s.proxies = self.proxies 394 | r = s.send(request=response.prepare(), 395 | verify=False, 396 | timeout=self.timeout) 397 | 398 | for line in r.iter_lines(chunk_size=1024): 399 | decoded_line = line.decode('utf-8') 400 | if "MkEWBc" in decoded_line: 401 | # regex_str = r"\[\[\"wrb.fr\",\"MkEWBc\",\"\[\[(.*).*?,\[\[\[" 402 | try: 403 | # data_got = re.search(regex_str,decoded_line).group(1) 404 | response = (decoded_line) 405 | response = json.loads(response) 406 | response = list(response) 407 | response = json.loads(response[0][2]) 408 | response = list(response) 409 | detect_lang = response[0][2] 410 | except Exception: 411 | raise Exception 412 | # data_got = data_got.split('\\\"]')[0] 413 | return [detect_lang, LANGUAGES[detect_lang.lower()]] 414 | r.raise_for_status() 415 | except requests.exceptions.HTTPError as e: 416 | # Request successful, bad response 417 | log.debug(str(e)) 418 | raise google_new_transError(tts=self, response=r) 419 | except requests.exceptions.RequestException as e: 420 | # Request failed 421 | log.debug(str(e)) 422 | raise google_new_transError(tts=self) -------------------------------------------------------------------------------- /anibot/plugins/livechartme.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import re 3 | import asyncio 4 | import time 5 | from traceback import format_exc as err 6 | from bs4 import BeautifulSoup as bs 7 | from collections import defaultdict 8 | from datetime import datetime as dt 9 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 10 | from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup 11 | from pyrogram.errors import WebpageCurlFailed, WebpageMediaEmpty, ChatAdminRequired 12 | from .. import anibot 13 | from ..utils.db import get_collection 14 | from ..utils.helper import clog 15 | 16 | failed_pic = "https://telegra.ph/file/09733b49f3a9d5b147d21.png" 17 | 18 | url_a = "https://www.livechart.me/feeds/episodes" 19 | url_b = 'https://feeds.feedburner.com/crunchyroll/rss/anime?format=xml' 20 | url_c = 'https://subsplease.org/rss/?t' 21 | url_d = 'https://www.livechart.me/feeds/headlines' 22 | url_e = 'https://myanimelist.net/rss/news.xml' 23 | 24 | A = get_collection('AIRING_TITLE') 25 | B = get_collection('CRUNCHY_TITLE') 26 | C = get_collection('SUBSPLEASE_TITLE') 27 | D = get_collection('HEADLINES_TITLE') 28 | E = get_collection('MAL_HEADLINES_TITLE') 29 | AR_GRPS = get_collection('AIRING_GROUPS') 30 | CR_GRPS = get_collection('CRUNCHY_GROUPS') 31 | SP_GRPS = get_collection('SUBSPLEASE_GROUPS') 32 | HD_GRPS = get_collection('HEADLINES_GROUPS') 33 | MAL_HD_GRPS = get_collection('MAL_HEADLINES_GROUPS') 34 | 35 | admin_error_msg = "Please give bot Pin Message and Delete Message permissions to pin new headlines!!!\nOr you can disable Pin and Unpin options in /settings command to stop seeing this message" 36 | 37 | async def livechart_parser(): 38 | print('Parsing data from rss') 39 | da = bs(requests.get(url_a).text, features="xml") 40 | db = bs(requests.get(url_b).text, features="xml") 41 | dc = bs(requests.get(url_c).text, features='xml') 42 | dd = bs(requests.get(url_d).text, features='xml') 43 | de = bs(requests.get(url_e).text, features='xml') 44 | if (await A.find_one()) is None: 45 | await A.insert_one( 46 | { 47 | '_id': str(da.find('item').find('title')), 48 | 'guid': str(da.find('item').find('guid')) 49 | } 50 | ) 51 | return 52 | if (await B.find_one()) is None: 53 | await B.insert_one( 54 | { 55 | '_id': str(db.find('item').find('title')), 56 | 'guid': str(db.find('item').find('guid')) 57 | } 58 | ) 59 | return 60 | if (await C.find_one()) is None: 61 | await C.insert_one({'_id': str(dc.find('item').find('title'))}) 62 | return 63 | if (await D.find_one()) is None: 64 | await D.insert_one( 65 | { 66 | '_id': str(dd.find('item').find('title')), 67 | 'guid': str(dd.find('item').find('guid')) 68 | } 69 | ) 70 | return 71 | if (await E.find_one()) is None: 72 | await E.insert_one( 73 | { 74 | '_id': str(de.find('item').find('title')), 75 | 'guid': str(de.find('item').find('guid')) 76 | } 77 | ) 78 | return 79 | msgslc = [] 80 | msgscr = [] 81 | msgssp = [] 82 | msgslch = [] 83 | msgsmh = [] 84 | lc = [] 85 | cr = [] 86 | sp = [] 87 | hd = [] 88 | mhd = [] 89 | lc_pin_data = [] 90 | mal_pin_data = [] 91 | 92 | #### LiveChart.me / airing #### 93 | try: 94 | clc = defaultdict(list) 95 | for i in da.findAll("item"): 96 | if (await A.find_one())['_id'] == str(i.find('title')): 97 | break 98 | lc.append( 99 | [ 100 | str(i.find('title')).split(' #'), 101 | re.sub(r'<.*?>(.*)<.*?>', r'\1', str(i.find('guid'))) 102 | ] 103 | ) 104 | if (await A.find_one())['guid'] == str(i.find('guid')): 105 | break 106 | for i in lc: 107 | if len(i[0])==2: 108 | clc[i[0][0]].append([i[0][1], i[1]]) 109 | else: 110 | text = f'{i[0][0]} just aired' 111 | msgslc.append([text, i[1]]) 112 | for i in list(clc.keys()): 113 | if len(clc[i])>1: 114 | aep = [clc[i][len(clc[i])-1][0], clc[i][0][0]] 115 | text = f'\nEpisode {min(aep)} - {max(aep)} of {i} just aired' 116 | else: 117 | text = f'\nEpisode {clc[i][0][0]} of {i} just aired' 118 | msgslc.append([text, clc[i][0][1]]) 119 | except Exception: 120 | e = err() 121 | await clog("ANIBOT", "```"+e+"```", "RSS") 122 | ############################### 123 | 124 | 125 | #### CrunchyRoll.com #### 126 | try: 127 | clc = defaultdict(list) 128 | fk = [] 129 | for i in db.findAll('item'): 130 | if (await B.find_one())['_id'] == str(i.find('title')): 131 | break 132 | if not "Dub" in str(i.find('title')): 133 | cr.append( 134 | [ 135 | str(i.find('title')).split(' - '), 136 | re.sub(r'<.*?>(.*)<.*?>', r'\1', str(i.find('guid'))) 137 | ] 138 | ) 139 | if (await B.find_one())['guid'] == str(i.find('guid')): 140 | break 141 | for i in cr: 142 | if len(i[0])==3: 143 | clc[i[0][0]].append([i[0][1], i[0][2], i[1]]) 144 | elif len(i[0])==2: 145 | if 'Episode' in i[0][1]: 146 | clc[i[0][0]].append([i[0][1], i[1]]) 147 | else: 148 | msgscr.append([ 149 | f"""**New anime released on Crunchyroll** 150 | **Title:** {i[0][0]}""", 151 | i[1] 152 | ]) 153 | else: 154 | fk.append(i) 155 | for i in list(clc.keys()): 156 | hmm = [] 157 | for ii in clc[i]: 158 | try: 159 | hmm.append(int((ii[0].split())[1])) 160 | except ValueError: 161 | fk.append(clc[i]) 162 | try: 163 | aep = [min(hmm), max(hmm)] 164 | epnum = f"{aep[0]} - {aep[1]}" if aep[1]!=aep[0] else aep[0] 165 | msgscr.append([ 166 | f"""**New anime released on Crunchyroll** 167 | 168 | **Title:** {i} 169 | **Episode:** {epnum} 170 | {'**EP Title:** '+ii[1] if len(ii)==3 else ''}""", 171 | ii[1] if len(ii)!=3 else ii[2] 172 | ]) 173 | except Exception as e: 174 | fk.append(i) 175 | if len(fk)==0: 176 | for i in fk: 177 | await clog( 178 | "ANIBOT", 179 | "Missed crunchyroll update\nCheck out code", 180 | "MISSED_UPDATE", 181 | send_as_file=str(i) 182 | ) 183 | except Exception: 184 | e = err() 185 | await clog("ANIBOT", "```"+e+"```", "RSS") 186 | ######################### 187 | 188 | 189 | ##### Subsplease.org ##### 190 | try: 191 | ls = defaultdict(list) 192 | for i in dc.findAll('item'): 193 | if (await C.find_one())['_id'] in str(i.find('title')): 194 | break 195 | text = re.sub( 196 | r'.*\[.+?\] (.+) (\(.+p\)) \[.+?\].*', 197 | r'\1__________\2', 198 | str(i.find('title')) 199 | ) 200 | link = re.sub( 201 | r'.*<.+?>(.+)<.+?>.*', 202 | r'\1', 203 | str(i.find('link')) 204 | ) 205 | sp.append([text, link]) 206 | for i in sp: 207 | hmm = i[0].split('__________') 208 | ls[hmm[0]].append([hmm[1].replace(')', '').replace('(', ''), i[1]]) 209 | updated = False 210 | for i in ls.keys(): 211 | if len(ls[i])==3: 212 | if not updated: 213 | await C.drop() 214 | await C.insert_one({'_id': i}) 215 | updated = True 216 | listlinks = "" 217 | for ii in ls[i]: 218 | listlinks += '\n__'+ii[0]+'__: [Link]('+ii[1]+')' 219 | msgssp.append( 220 | [ 221 | '**New anime uploaded on Subsplease**\n\n' 222 | +i 223 | +listlinks, 224 | 'https://nyaa.si/?q=' 225 | +re.sub( 226 | r' ', 227 | '%20', 228 | re.sub(r'(\().*?(\))', r'', i).strip() 229 | ) 230 | ] 231 | ) 232 | except Exception: 233 | e = err() 234 | await clog("ANIBOT", "```"+e+"```", "RSS") 235 | ########################## 236 | 237 | 238 | #### LiveChart.me / headlines #### 239 | try: 240 | for i in dd.findAll("item"): 241 | update = "" 242 | if (await D.find_one())['_id'] == str(i.find('title')): 243 | break 244 | elif (await D.find_one())['guid'] == str(i.find('guid')): 245 | update = "**[UPDATED]** " 246 | title = str(i.find('title')) 247 | guid = str(i.find('guid')) 248 | url = str(i.find('link')) 249 | enclosure = i.find('enclosure') 250 | if not None in [title, guid, url, enclosure]: 251 | hd.append([ 252 | update+re.sub(r'<.*?>(.*)<.*?>', r'\1', title), 253 | re.sub(r'<.*?>(.*)<.*?>', r'\1', guid), 254 | re.sub(r'<.*?>(.*)<.*?>', r'\1', url), 255 | str(i.find('enclosure').get('url')).split('?')[0] 256 | ]) 257 | else: 258 | await clog( 259 | "ANIBOT", 260 | "Missed headline\nCheck out code", 261 | "MISSED_UPDATE", 262 | send_as_file=str(i) 263 | ) 264 | if (await D.find_one())['guid'] == str(i.find('guid')): 265 | break 266 | for i in hd: 267 | msgslch.append([i[3], i[0], i[1], i[2]]) 268 | except Exception: 269 | e = err() 270 | await clog("ANIBOT", "```"+e+"```", "RSS") 271 | ################################## 272 | 273 | 274 | #### MyAnimeList / headlines #### 275 | try: 276 | for i in de.findAll("item"): 277 | update = "" 278 | if (await E.find_one())['_id'] == str(i.find('title')): 279 | break 280 | elif (await E.find_one())['guid'] == str(i.find('guid')): 281 | update = "**[UPDATED]** " 282 | title = str(i.find('title')) 283 | guid = str(i.find('guid')) 284 | description = str(i.find('description')) 285 | thumbnail = str(i.find('media:thumbnail')) 286 | if not None in [title, guid, description, thumbnail]: 287 | mhd.append([ 288 | re.sub(r'<.*?>(.*)<.*?>', r'\1', title), 289 | re.sub(r'<.*?>(.*)<.*?>', r'\1', description), 290 | re.sub(r'<.*?>(.*)<.*?>', r'\1', guid), 291 | re.sub(r'<.*?>(.*)<.*?>', r'\1', thumbnail) 292 | ]) 293 | else: 294 | await clog( 295 | "ANIBOT", 296 | f"Missed MAL headline\nCheck out code", 297 | "MISSED_UPDATE", 298 | send_as_file=str(i) 299 | ) 300 | if (await E.find_one())['guid'] == str(i.find('guid')): 301 | break 302 | for i in mhd: 303 | msgsmh.append([i[3], f"**{i[0]}**\n\n{i[1]}", i[2]]) 304 | except Exception: 305 | e = err() 306 | await clog("ANIBOT", "```"+e+"```", "RSS") 307 | ################################# 308 | 309 | 310 | print('Notifying Livachart.me airings!!!') 311 | if await AR_GRPS.find_one() is not None: 312 | for i in msgslc: 313 | async for id_ in AR_GRPS.find(): 314 | btn = InlineKeyboardMarkup([[ 315 | InlineKeyboardButton("More Info", url=i[1]) 316 | ]]) 317 | try: 318 | await anibot.send_message( 319 | id_['_id'], i[0], reply_markup=btn 320 | ) 321 | await asyncio.sleep(1.5) 322 | except Exception: 323 | e = err() 324 | await clog("ANIBOT", f"Group: {id_['_id']}\n\n```{e}```", "AIRING") 325 | if len(msgslc)!=0: 326 | await A.drop() 327 | await A.insert_one( 328 | { 329 | '_id': str(da.find('item').find('title')), 330 | 'guid': str(da.find('item').find('guid')) 331 | } 332 | ) 333 | await asyncio.sleep(10) 334 | 335 | 336 | print('Notifying Crunchyroll releases!!!') 337 | if await CR_GRPS.find_one() is not None: 338 | for i in msgscr: 339 | async for id_ in CR_GRPS.find(): 340 | btn = InlineKeyboardMarkup([[ 341 | InlineKeyboardButton("More Info", url=i[1]) 342 | ]]) 343 | try: 344 | await anibot.send_message( 345 | id_['_id'], i[0], reply_markup=btn 346 | ) 347 | await asyncio.sleep(1.5) 348 | except Exception: 349 | e = err() 350 | await clog("ANIBOT", f"Group: {id_['_id']}\n\n```{e}```", "CRUNCHYROLL") 351 | if len(msgscr)!=0: 352 | await B.drop() 353 | await B.insert_one( 354 | { 355 | '_id': str(db.find('item').find('title')), 356 | 'guid': str(db.find('item').find('guid')) 357 | } 358 | ) 359 | await asyncio.sleep(10) 360 | 361 | 362 | print('Notifying Subsplease releases!!!') 363 | if await SP_GRPS.find_one() is not None: 364 | for i in msgssp: 365 | async for id_ in SP_GRPS.find(): 366 | btn = InlineKeyboardMarkup([[ 367 | InlineKeyboardButton("Download", url=i[1]) 368 | ]]) 369 | try: 370 | await anibot.send_message( 371 | id_['_id'], i[0], reply_markup=btn 372 | ) 373 | await asyncio.sleep(1.5) 374 | except Exception: 375 | e = err() 376 | await clog("ANIBOT", f"Group: {id_['_id']}\n\n```{e}```", "SUBSPLEASE") 377 | await asyncio.sleep(10) 378 | 379 | 380 | list_keys = ["_id", "pin", "unpin", "next_unpin", "last"] 381 | print('Notifying LiveChart.me Headlines!!!') 382 | if await HD_GRPS.find_one() is not None: 383 | for i in msgslch: 384 | async for id_ in HD_GRPS.find(): 385 | var_dict = {} 386 | btn = InlineKeyboardMarkup([[ 387 | InlineKeyboardButton("More Info", url=i[2]), 388 | InlineKeyboardButton("Source", url=i[3]), 389 | ]]) 390 | try: 391 | try: 392 | x = await anibot.send_photo( 393 | id_['_id'], 394 | i[0], 395 | caption=i[1]+'\n\n#LiveChart', 396 | reply_markup=btn 397 | ) 398 | except (WebpageMediaEmpty, WebpageCurlFailed): 399 | x = await anibot.send_photo( 400 | id_['_id'], 401 | failed_pic, 402 | caption=i[1]+'\n\n#LiveChart', 403 | reply_markup=btn 404 | ) 405 | await clog("ANIBOT", i[0], "HEADLINES LINK") 406 | for var in list_keys: 407 | try: 408 | var_dict[var] = id_[var] 409 | except KeyError: 410 | var_dict[var] = None 411 | var_dict["current"] = x.id 412 | lc_pin_data.append(var_dict) 413 | await asyncio.sleep(1.5) 414 | except Exception: 415 | e = err() 416 | await clog("ANIBOT", f"Group: {id_['_id']}\n\n```{e}```", "HEADLINES") 417 | if len(msgslch)!=0: 418 | await D.drop() 419 | await D.insert_one( 420 | { 421 | '_id': str(dd.find('item').find('title')), 422 | 'guid': str(dd.find('item').find('guid')) 423 | } 424 | ) 425 | 426 | 427 | print('Notifying MyAnimeList.net Headlines!!!') 428 | if await MAL_HD_GRPS.find_one() is not None: 429 | for i in msgsmh: 430 | async for id_ in MAL_HD_GRPS.find(): 431 | var_dict = {} 432 | btn = InlineKeyboardMarkup([[ 433 | InlineKeyboardButton("More Info", url=i[2]), 434 | ]]) 435 | try: 436 | try: 437 | x = await anibot.send_photo( 438 | id_['_id'], 439 | i[0], 440 | caption=i[1]+'\n\n#MyAnimeList', 441 | reply_markup=btn 442 | ) 443 | except (WebpageMediaEmpty, WebpageCurlFailed): 444 | x = await anibot.send_photo( 445 | id_['_id'], 446 | failed_pic, 447 | caption=i[1]+'\n\n#MyAnimeList', 448 | reply_markup=btn 449 | ) 450 | await clog("ANIBOT", i[0], "HEADLINES LINK") 451 | for var in list_keys: 452 | try: 453 | var_dict[var] = id_[var] 454 | except KeyError: 455 | var_dict[var] = None 456 | var_dict["current"] = x.id 457 | mal_pin_data.append(var_dict) 458 | await asyncio.sleep(1.5) 459 | except Exception: 460 | e = err() 461 | await clog("ANIBOT", f"Group: {id_['_id']}\n\n```{e}```", "HEADLINES") 462 | if len(msgsmh)!=0: 463 | await E.drop() 464 | await E.insert_one( 465 | { 466 | '_id': str(de.find('item').find('title')), 467 | 'guid': str(de.find('item').find('guid')) 468 | } 469 | ) 470 | 471 | 472 | print("Handling Pins and Unpins!!!") 473 | lc_final_dict = [] 474 | lc_listed = [] 475 | for i in lc_pin_data: 476 | if i["_id"] in lc_listed: 477 | lc_final_dict[lc_listed.index(i["_id"])]["current"].append(i["current"]) 478 | else: 479 | lc_listed.append(i['_id']) 480 | lc_final_dict.append({"_id": i["_id"], "pin": i["pin"], "current": [i["current"]]}) 481 | mal_final_dict = [] 482 | mal_listed = [] 483 | for i in mal_pin_data: 484 | if i["_id"] in mal_listed: 485 | mal_final_dict[mal_listed.index(i["_id"])]["current"].append(i["current"]) 486 | else: 487 | mal_listed.append(i['_id']) 488 | mal_final_dict.append({"_id": i["_id"], "pin": i["pin"], "current": [i["current"]]}) 489 | for i in lc_final_dict: 490 | if i['pin'] not in ["OFF", None]: 491 | if i['unpin'] == 0: 492 | i['current'] = [i['current'].pop()] 493 | for mid in i['current']: 494 | try: 495 | pin_msg = await anibot.pin_chat_message(i['_id'], mid) 496 | await pin_msg.delete() 497 | except ChatAdminRequired: 498 | await anibot.send_message(i['_id'], admin_error_msg) 499 | except: 500 | e = err() 501 | await clog("ANIBOT", f"Group: {i['_id']}\n\n```{e}```", "UN_PIN") 502 | await asyncio.sleep(0.7) 503 | unpin_now = False 504 | if i['unpin']: 505 | if i['next_unpin']: 506 | if time.time() > i['next_unpin']: 507 | unpin_now = True 508 | else: 509 | unpin_now = True 510 | if unpin_now: 511 | await HD_GRPS.find_one_and_update({"_id": i['_id']}, {"$set": {"last": i['current']}}) 512 | for mid in i['last']: 513 | try: 514 | await anibot.unpin_chat_message(i['_id'], mid) 515 | except ChatAdminRequired: 516 | await anibot.send_message(i['_id'], admin_error_msg) 517 | except: 518 | e = err() 519 | await clog("ANIBOT", f"Group: {i['_id']}\n\n```{e}```", "UN_PIN") 520 | elif (len(lc_final_dict) != 0) and (i['unpin'] not in [None, 0]): 521 | tbud = await HD_GRPS.find_one({"_id": i['_id']}) 522 | await HD_GRPS.find_one_and_update(tbud, {"$set": {"last": i['current']+tbud['last']}}) 523 | for i in mal_final_dict: 524 | if i['pin'] not in ["OFF", None]: 525 | if i['unpin'] == 0: 526 | i['current'] = [i['current'].pop()] 527 | for mid in i['current']: 528 | try: 529 | pin_msg = await anibot.pin_chat_message(i['_id'], mid) 530 | await pin_msg.delete() 531 | except ChatAdminRequired: 532 | await anibot.send_message(i['_id'], admin_error_msg) 533 | except: 534 | e = err() 535 | await clog("ANIBOT", f"Group: {i['_id']}\n\n```{e}```", "UN_PIN") 536 | await asyncio.sleep(0.7) 537 | unpin_now = False 538 | if i['unpin']: 539 | if i['next_unpin']: 540 | if time.time() > i['next_unpin']: 541 | unpin_now = True 542 | else: 543 | unpin_now = True 544 | if unpin_now: 545 | await MAL_HD_GRPS.find_one_and_update({"_id": i['_id']}, {"$set": {"last": i['current']}}) 546 | for mid in i['last']: 547 | try: 548 | await anibot.unpin_chat_message(i['_id'], mid) 549 | except ChatAdminRequired: 550 | await anibot.send_message(i['_id'], admin_error_msg) 551 | except: 552 | e = err() 553 | await clog("ANIBOT", f"Group: {i['_id']}\n\n```{e}```", "UN_PIN") 554 | elif (len(lc_final_dict) != 0) and (i['unpin'] not in [None, 0]): 555 | tbud = await HD_GRPS.find_one({"_id": i['_id']}) 556 | await MAL_HD_GRPS.find_one_and_update(tbud, {"$set": {"last": i['current']+tbud['last']}}) 557 | 558 | 559 | scheduler = AsyncIOScheduler() 560 | scheduler.add_job(livechart_parser, "interval", minutes=5) 561 | scheduler.start() -------------------------------------------------------------------------------- /anibot/utils/helper.py: -------------------------------------------------------------------------------- 1 | import json 2 | import requests 3 | import asyncio 4 | import os 5 | import shlex 6 | from traceback import format_exc as err 7 | from time import time 8 | from datetime import datetime 9 | from os.path import basename 10 | from typing import Tuple, Optional 11 | from uuid import uuid4 12 | from pyrogram.enums import ChatType 13 | from pyrogram.errors import FloodWait, MessageNotModified 14 | from pyrogram.types import ( 15 | InlineKeyboardButton, 16 | CallbackQuery, 17 | Message, 18 | InlineKeyboardMarkup 19 | ) 20 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 21 | from .. import OWNER, DOWN_PATH, anibot, LOG_CHANNEL_ID, has_user 22 | from ..utils.db import get_collection 23 | 24 | if has_user: 25 | from .. import user 26 | 27 | AUTH_USERS = get_collection("AUTH_USERS") 28 | IGNORE = get_collection("IGNORED_USERS") 29 | PIC_DB = get_collection("PIC_DB") 30 | GROUPS = get_collection("GROUPS") 31 | CC = get_collection('CONNECTED_CHANNELS') 32 | USER_JSON = {} 33 | USER_WC = {} 34 | 35 | ###### credits to @deleteduser420 on tg, code from USERGE-X ###### 36 | 37 | 38 | def rand_key(): 39 | return str(uuid4())[:8] 40 | 41 | 42 | def control_user(func): 43 | async def wrapper(_, message: Message): 44 | msg = json.loads(str(message)) 45 | gid = msg['chat']['id'] 46 | gidtype = msg['chat']['type'] 47 | if gidtype in [ChatType.SUPERGROUP, ChatType.GROUP] and not ( 48 | await GROUPS.find_one({"_id": gid}) 49 | ): 50 | try: 51 | gidtitle = msg['chat']['username'] 52 | except KeyError: 53 | gidtitle = msg['chat']['title'] 54 | await GROUPS.insert_one({"_id": gid, "grp": gidtitle}) 55 | await clog( 56 | "ANIBOT", 57 | f"Bot added to a new group\n\n{gidtitle}\nID: `{gid}`", 58 | "NEW_GROUP" 59 | ) 60 | try: 61 | user = msg['from_user']['id'] 62 | except KeyError: 63 | user = msg['chat']['id'] 64 | if await IGNORE.find_one({'_id': user}): 65 | return 66 | nut = time() 67 | if user not in OWNER: 68 | try: 69 | out = USER_JSON[user] 70 | if nut-out<1.2: 71 | USER_WC[user] += 1 72 | if USER_WC[user] == 3: 73 | await message.reply_text( 74 | ( 75 | "Stop spamming bot!!!" 76 | +"\nElse you will be blacklisted" 77 | ), 78 | ) 79 | await clog('ANIBOT', f'UserID: {user}', 'SPAM') 80 | if USER_WC[user] == 5: 81 | await IGNORE.insert_one({'_id': user}) 82 | await message.reply_text( 83 | ( 84 | "You have been exempted from using this bot " 85 | +"now due to spamming 5 times consecutively!!!" 86 | +"\nTo remove restriction plead to " 87 | +"@hanabi_support" 88 | ) 89 | ) 90 | await clog('ANIBOT', f'UserID: {user}', 'BAN') 91 | return 92 | await asyncio.sleep(USER_WC[user]) 93 | else: 94 | USER_WC[user] = 0 95 | except KeyError: 96 | pass 97 | USER_JSON[user] = nut 98 | try: 99 | await func(_, message, msg) 100 | except FloodWait as e: 101 | await asyncio.sleep(e.x + 5) 102 | except MessageNotModified: 103 | pass 104 | except Exception: 105 | e = err() 106 | reply_msg = None 107 | if func.__name__ == "trace_bek": 108 | reply_msg = message.reply_to_message 109 | try: 110 | await clog( 111 | 'ANIBOT', 112 | 'Message:\n'+msg['text']+'\n\n'+"```"+e+"```", 'COMMAND', 113 | msg=message, 114 | replied=reply_msg 115 | ) 116 | except Exception: 117 | await clog('ANIBOT', e, 'FAILURE', msg=message) 118 | return wrapper 119 | 120 | 121 | def check_user(func): 122 | async def wrapper(_, c_q: CallbackQuery): 123 | cq = json.loads(str(c_q)) 124 | user = cq['from_user']['id'] 125 | if await IGNORE.find_one({'_id': user}): 126 | return 127 | cqowner_is_ch = False 128 | cqowner = cq['data'].split("_").pop() 129 | if "-100" in cqowner: 130 | cqowner_is_ch = True 131 | ccdata = await CC.find_one({"_id": cqowner}) 132 | if ccdata and ccdata['usr'] == user: 133 | user_valid = True 134 | else: 135 | user_valid = False 136 | if user in OWNER or user==int(cqowner): 137 | if user not in OWNER: 138 | nt = time() 139 | try: 140 | ot = USER_JSON[user] 141 | if nt-ot<1.4: 142 | await c_q.answer( 143 | ( 144 | "Stop spamming bot!!!\n" 145 | +"Else you will be blacklisted" 146 | ), 147 | show_alert=True 148 | ) 149 | await clog('ANIBOT', f'UserID: {user}', 'SPAM') 150 | except KeyError: 151 | pass 152 | USER_JSON[user] = nt 153 | try: 154 | await func(_, c_q, cq) 155 | except FloodWait as e: 156 | await asyncio.sleep(e.x + 5) 157 | except MessageNotModified: 158 | pass 159 | except Exception: 160 | e = err() 161 | reply_msg = None 162 | if func.__name__ == "tracemoe_btn": 163 | reply_msg = c_q.message.reply_to_message 164 | try: 165 | await clog( 166 | 'ANIBOT', 167 | 'Callback:\n'+cq['data']+'\n\n'+"```"+e+"```", 168 | 'CALLBACK', 169 | cq=c_q, 170 | replied=reply_msg 171 | ) 172 | except Exception: 173 | await clog('ANIBOT', e, 'FAILURE', cq=c_q) 174 | else: 175 | if cqowner_is_ch: 176 | if user_valid: 177 | try: 178 | await func(_, c_q, cq) 179 | except FloodWait as e: 180 | await asyncio.sleep(e.x + 5) 181 | except MessageNotModified: 182 | pass 183 | except Exception: 184 | e = err() 185 | reply_msg = None 186 | if func.__name__ == "tracemoe_btn": 187 | reply_msg = c_q.message.reply_to_message 188 | try: 189 | await clog( 190 | 'ANIBOT', 191 | 'Callback:\n'+cq['data']+'\n\n'+"```"+e+"```", 192 | 'CALLBACK_ANON', 193 | cq=c_q, 194 | replied=reply_msg 195 | ) 196 | except Exception: 197 | await clog('ANIBOT', e, 'FAILURE', cq=c_q) 198 | else: 199 | await c_q.answer( 200 | ( 201 | "No one can click buttons on queries made by " 202 | +"channels unless connected with /connect!!!" 203 | ), 204 | show_alert=True, 205 | ) 206 | else: 207 | await c_q.answer( 208 | "Not your query!!!", 209 | show_alert=True, 210 | ) 211 | return wrapper 212 | 213 | 214 | async def media_to_image( 215 | client: anibot, message: Message, x: Message, replied: Message 216 | ): 217 | if not ( 218 | replied.photo 219 | or replied.sticker 220 | or replied.animation 221 | or replied.video 222 | ): 223 | await x.edit_text("Media Type Is Invalid !") 224 | await asyncio.sleep(5) 225 | await x.delete() 226 | return 227 | media = ( 228 | replied.photo 229 | or replied.sticker 230 | or replied.animation 231 | or replied.video 232 | ) 233 | if not os.path.isdir(DOWN_PATH): 234 | os.makedirs(DOWN_PATH) 235 | dls = await client.download_media( 236 | media, 237 | file_name=DOWN_PATH + rand_key(), 238 | ) 239 | dls_loc = os.path.join(DOWN_PATH, os.path.basename(dls)) 240 | if replied.sticker and replied.sticker.file_name.endswith(".tgs"): 241 | png_file = os.path.join(DOWN_PATH, f"{rand_key()}.png") 242 | cmd = ( 243 | f"lottie_convert.py --frame 0 -if lottie " 244 | +f"-of png {dls_loc} {png_file}" 245 | ) 246 | stdout, stderr = (await runcmd(cmd))[:2] 247 | os.remove(dls_loc) 248 | if not os.path.lexists(png_file): 249 | await x.edit_text( 250 | "This sticker is Gey, Task Failed Successfully ≧ω≦" 251 | ) 252 | await asyncio.sleep(5) 253 | await x.delete() 254 | raise Exception(stdout + stderr) 255 | dls_loc = png_file 256 | elif replied.sticker and replied.sticker.file_name.endswith(".webp"): 257 | stkr_file = os.path.join(DOWN_PATH, f"{rand_key()}.png") 258 | os.rename(dls_loc, stkr_file) 259 | if not os.path.lexists(stkr_file): 260 | await x.edit_text("```Sticker not found...```") 261 | await asyncio.sleep(5) 262 | await x.delete() 263 | return 264 | dls_loc = stkr_file 265 | elif replied.animation or replied.video: 266 | await x.edit_text("`Converting Media To Image ...`") 267 | jpg_file = os.path.join(DOWN_PATH, f"{rand_key()}.jpg") 268 | await take_screen_shot(dls_loc, 0, jpg_file) 269 | os.remove(dls_loc) 270 | if not os.path.lexists(jpg_file): 271 | await x.edit_text( 272 | "This Gif is Gey (。ì _ í。), Task Failed Successfully !" 273 | ) 274 | await asyncio.sleep(5) 275 | await x.delete() 276 | return 277 | dls_loc = jpg_file 278 | return dls_loc 279 | 280 | 281 | async def runcmd(cmd: str) -> Tuple[str, str, int, int]: 282 | """ run command in terminal """ 283 | args = shlex.split(cmd) 284 | process = await asyncio.create_subprocess_exec( 285 | *args, 286 | stdout=asyncio.subprocess.PIPE, 287 | stderr=asyncio.subprocess.PIPE 288 | ) 289 | stdout, stderr = await process.communicate() 290 | return ( 291 | stdout.decode("utf-8", "replace").strip(), 292 | stderr.decode("utf-8", "replace").strip(), 293 | process.returncode, 294 | process.pid, 295 | ) 296 | 297 | 298 | async def take_screen_shot( 299 | video_file: str, duration: int, path: str = "" 300 | ) -> Optional[str]: 301 | """ take a screenshot """ 302 | print( 303 | "[[[Extracting a frame from %s ||| Video duration => %s]]]", 304 | video_file, 305 | duration, 306 | ) 307 | thumb_image_path = path or os.path.join( 308 | DOWN_PATH, f"{basename(video_file)}.jpg" 309 | ) 310 | command = ( 311 | f"ffmpeg -ss {duration} " 312 | +f'-i "{video_file}" -vframes 1 "{thumb_image_path}"' 313 | ) 314 | err = (await runcmd(command))[1] 315 | if err: 316 | print(err) 317 | return thumb_image_path if os.path.exists(thumb_image_path) else None 318 | 319 | 320 | ################################################################## 321 | 322 | async def get_user_from_channel(cid): 323 | try: 324 | k = (await CC.find_one({"_id": str(cid)}))['usr'] 325 | return k 326 | except TypeError: 327 | return None 328 | 329 | 330 | async def return_json_senpai( 331 | query: str, 332 | vars_: dict, 333 | auth: bool = False, 334 | user: int = None 335 | ): 336 | url = "https://graphql.anilist.co" 337 | headers = None 338 | if auth: 339 | headers = { 340 | 'Authorization': ( 341 | 'Bearer ' 342 | +str((await AUTH_USERS.find_one({"id": int(user)}))['token']) 343 | ), 344 | 'Content-Type': 'application/json', 345 | 'Accept': 'application/json', 346 | } 347 | return requests.post( 348 | url, 349 | json={"query": query, "variables": vars_}, 350 | headers=headers 351 | ).json() 352 | 353 | 354 | def cflag(country): 355 | if country == "JP": 356 | return "\U0001F1EF\U0001F1F5" 357 | if country == "CN": 358 | return "\U0001F1E8\U0001F1F3" 359 | if country == "KR": 360 | return "\U0001F1F0\U0001F1F7" 361 | if country == "TW": 362 | return "\U0001F1F9\U0001F1FC" 363 | 364 | 365 | def pos_no(no): 366 | ep_ = list(str(no)) 367 | x = ep_.pop() 368 | if ep_ != [] and ep_.pop()=='1': 369 | return 'th' 370 | th = ( 371 | "st" if x == "1" 372 | else "nd" if x == "2" 373 | else "rd" if x == "3" 374 | else "th" 375 | ) 376 | return th 377 | 378 | 379 | def make_it_rw(time_stamp): 380 | """Converting Time Stamp to Readable Format""" 381 | seconds, milliseconds = divmod(int(time_stamp), 1000) 382 | minutes, seconds = divmod(seconds, 60) 383 | hours, minutes = divmod(minutes, 60) 384 | days, hours = divmod(hours, 24) 385 | tmp = ( 386 | ((str(days) + " Days, ") if days else "") 387 | + ((str(hours) + " Hours, ") if hours else "") 388 | + ((str(minutes) + " Minutes, ") if minutes else "") 389 | + ((str(seconds) + " Seconds, ") if seconds else "") 390 | + ((str(milliseconds) + " ms, ") if milliseconds else "") 391 | ) 392 | return tmp[:-2] 393 | 394 | 395 | async def clog( 396 | name: str, 397 | text: str, 398 | tag: str, 399 | msg: Message = None, 400 | cq: CallbackQuery = None, 401 | replied: Message = None, 402 | file: str = None, 403 | send_as_file: str = None 404 | ): 405 | log = f"#{name.upper()} #{tag.upper()}\n\n{text}" 406 | data = "" 407 | if msg: 408 | data += str(msg) 409 | data += "\n\n\n\n" 410 | if cq: 411 | data += str(cq) 412 | data += "\n\n\n\n" 413 | await anibot.send_message(chat_id=LOG_CHANNEL_ID, text=log) 414 | if msg or cq: 415 | with open("query_data.txt", "x") as output: 416 | output.write(data) 417 | await anibot.send_document(LOG_CHANNEL_ID, "query_data.txt") 418 | os.remove("query_data.txt") 419 | if replied: 420 | media = ( 421 | replied.photo 422 | or replied.sticker 423 | or replied.animation 424 | or replied.video 425 | ) 426 | media_path = await anibot.download_media(media) 427 | await anibot.send_document(LOG_CHANNEL_ID, media_path) 428 | if file: 429 | await anibot.send_document(LOG_CHANNEL_ID, file) 430 | if send_as_file: 431 | with open("dataInQuestion.txt", "x") as text_file: 432 | text_file.write() 433 | await anibot.send_document(LOG_CHANNEL_ID, "dataInQuestion.txt") 434 | os.remove("dataInQuestion.txt") 435 | 436 | 437 | def get_btns( 438 | media, 439 | user: int, 440 | result: list, 441 | lsqry: str = None, 442 | lspage: int = None, 443 | auth: bool = False, 444 | sfw: str = "False" 445 | ): 446 | buttons = [] 447 | qry = f"_{lsqry}" if lsqry is not None else "" 448 | pg = f"_{lspage}" if lspage is not None else "" 449 | if media == "ANIME" and sfw == "False": 450 | buttons.append([ 451 | InlineKeyboardButton( 452 | text="Characters", 453 | callback_data=( 454 | f"char_{result[2][0]}_ANI" 455 | +f"{qry}{pg}_{str(auth)}_1_{user}" 456 | ) 457 | ), 458 | InlineKeyboardButton( 459 | text="Description", 460 | callback_data=( 461 | f"desc_{result[2][0]}_ANI" 462 | +f"{qry}{pg}_{str(auth)}_{user}" 463 | ) 464 | ), 465 | InlineKeyboardButton( 466 | text="List Series", 467 | callback_data=( 468 | f"ls_{result[2][0]}_ANI" 469 | +f"{qry}{pg}_{str(auth)}_{user}" 470 | ) 471 | ), 472 | ]) 473 | if media == "CHARACTER": 474 | buttons.append([ 475 | InlineKeyboardButton( 476 | "Description", 477 | callback_data=( 478 | f"desc_{result[2][0]}_CHAR" 479 | +f"{qry}{pg}_{str(auth)}_{user}" 480 | ) 481 | ) 482 | ]) 483 | buttons.append([ 484 | InlineKeyboardButton( 485 | "List Series", 486 | callback_data=f"lsc_{result[2][0]}{qry}{pg}_{str(auth)}_{user}" 487 | ) 488 | ]) 489 | if media == "SCHEDULED": 490 | if result[0]!=0 and result[0]!=6: 491 | buttons.append([ 492 | InlineKeyboardButton( 493 | str(day_(result[0]-1)), 494 | callback_data=f"sched_{result[0]-1}_{user}" 495 | ), 496 | InlineKeyboardButton( 497 | str(day_(result[0]+1)), 498 | callback_data=f"sched_{result[0]+1}_{user}" 499 | ) 500 | ]) 501 | if result[0] == 0: 502 | buttons.append([ 503 | InlineKeyboardButton( 504 | str(day_(result[0]+1)), 505 | callback_data=f"sched_{result[0]+1}_{user}" 506 | ) 507 | ]) 508 | if result[0] == 6: 509 | buttons.append([ 510 | InlineKeyboardButton( 511 | str(day_(result[0]-1)), 512 | callback_data=f"sched_{result[0]-1}_{user}" 513 | ) 514 | ]) 515 | if media == "MANGA" and sfw == "False": 516 | buttons.append([ 517 | InlineKeyboardButton("More Info", url=result[1][2]) 518 | ]) 519 | if media == "AIRING" and sfw == "False": 520 | buttons.append([ 521 | InlineKeyboardButton("More Info", url=result[1][0]) 522 | ]) 523 | if auth is True and media!="SCHEDULED" and sfw == "False": 524 | auth_btns = get_auth_btns( 525 | media,user, result[2], lspage=lspage, lsqry=lsqry 526 | ) 527 | buttons.append(auth_btns) 528 | if len(result)>3: 529 | if result[3] == "None": 530 | if result[4] != "None": 531 | buttons.append([ 532 | InlineKeyboardButton( 533 | text="Sequel", 534 | callback_data=f"btn_{result[4]}_{str(auth)}_{user}" 535 | ) 536 | ]) 537 | else: 538 | if result[4] != "None": 539 | buttons.append([ 540 | InlineKeyboardButton( 541 | text="Prequel", 542 | callback_data=f"btn_{result[3]}_{str(auth)}_{user}" 543 | ), 544 | InlineKeyboardButton( 545 | text="Sequel", 546 | callback_data=f"btn_{result[4]}_{str(auth)}_{user}" 547 | ), 548 | ]) 549 | else: 550 | buttons.append([ 551 | InlineKeyboardButton( 552 | text="Prequel", 553 | callback_data=f"btn_{result[3]}_{str(auth)}_{user}" 554 | ) 555 | ]) 556 | if (lsqry is not None) and (len(result)!=1): 557 | if lspage==1: 558 | if result[1][1] is True: 559 | buttons.append([ 560 | InlineKeyboardButton( 561 | text="Next", 562 | callback_data=( 563 | f"page_{media}{qry}_{int(lspage)+1}_{str(auth)}_{user}" 564 | ) 565 | ) 566 | ]) 567 | else: 568 | pass 569 | elif lspage!=1: 570 | if result[1][1] is False: 571 | buttons.append([ 572 | InlineKeyboardButton( 573 | text="Prev", 574 | callback_data=( 575 | f"page_{media}{qry}_{int(lspage)-1}_{str(auth)}_{user}" 576 | ) 577 | ) 578 | ]) 579 | else: 580 | buttons.append([ 581 | InlineKeyboardButton( 582 | text="Prev", 583 | callback_data=( 584 | f"page_{media}{qry}_{int(lspage)-1}_{str(auth)}_{user}" 585 | ) 586 | ), 587 | InlineKeyboardButton( 588 | text="Next", 589 | callback_data=( 590 | f"page_{media}{qry}_{int(lspage)+1}_{str(auth)}_{user}" 591 | ) 592 | ) 593 | ]) 594 | return InlineKeyboardMarkup(buttons) 595 | 596 | 597 | def get_auth_btns(media, user, data, lsqry: str = None, lspage: int = None): 598 | btn = [] 599 | qry = f"_{lsqry}" if lsqry is not None else "" 600 | pg = f"_{lspage}" if lspage is not None else "" 601 | if media=="CHARACTER": 602 | btn.append( 603 | InlineKeyboardButton( 604 | text=( 605 | "Add to Favs" if data[1] is not True 606 | else "Remove from Favs" 607 | ), 608 | callback_data=f"fav_{media}_{data[0]}{qry}{pg}_{user}" 609 | ) 610 | ) 611 | else: 612 | btn.append( 613 | InlineKeyboardButton( 614 | text=( 615 | "Add to Favs" if data[3] is not True 616 | else "Remove from Favs" 617 | ), 618 | callback_data=f"fav_{media}_{data[0]}{qry}{pg}_{user}" 619 | ) 620 | ) 621 | btn.append(InlineKeyboardButton( 622 | text="Add to List" if data[1] is False else "Update in List", 623 | callback_data=( 624 | f"lsadd_{media}_{data[0]}{qry}{pg}_{user}" if data[1] is False 625 | else f"lsupdt_{media}_{data[0]}_{data[2]}{qry}{pg}_{user}" 626 | ) 627 | )) 628 | return btn 629 | 630 | 631 | def day_(x: int): 632 | if x == 0: return "Monday" 633 | if x == 1: return "Tuesday" 634 | if x == 2: return "Wednesday" 635 | if x == 3: return "Thursday" 636 | if x == 4: return "Friday" 637 | if x == 5: return "Saturday" 638 | if x == 6: return "Sunday" 639 | 640 | 641 | def season_(future: bool = False): 642 | k = datetime.now() 643 | m = k.month 644 | if future: 645 | m = m+3 646 | y = k.year 647 | if m > 12: 648 | y = y+1 649 | if m in [1, 2, 3] or m > 12: 650 | return 'WINTER', y 651 | if m in [4, 5, 6]: 652 | return 'SPRING', y 653 | if m in [7, 8, 9]: 654 | return 'SUMMER', y 655 | if m in [10, 11, 12]: 656 | return 'FALL', y 657 | 658 | 659 | #### Update Pics cache using @webpagebot #### 660 | m = datetime.now().month 661 | y = datetime.now().year 662 | ts = datetime(y, m, 1, 0, 0, 0, 0).timestamp() 663 | PIC_LS = [] 664 | async def update_pics_cache(): 665 | if not has_user: 666 | return 667 | k = await PIC_DB.find_one({'_id': 'month'}) 668 | if k is None: 669 | await PIC_DB.insert_one({'_id': 'month', 'm': m}) 670 | elif m != k['m']: 671 | await PIC_DB.drop() 672 | await PIC_DB.insert_one({'_id': 'month', 'm': m}) 673 | for link in PIC_LS: 674 | if (await PIC_DB.find_one({'_id': link})) is None: 675 | await PIC_DB.insert_one({'_id': link}) 676 | try: 677 | me = await user.send_photo("me", link+f"?a={ts}") 678 | msg = await user.send_photo("me", link) 679 | except ConnectionError: 680 | await asyncio.sleep(5) 681 | me = await user.send_photo("me", link+f"?a={ts}") 682 | msg = await user.send_photo("me", link) 683 | await asyncio.sleep(7) 684 | dls1 = await user.download_media( 685 | msg.photo, 686 | file_name=DOWN_PATH + link.split("/").pop()+'(1).png', 687 | ) 688 | dls2 = await user.download_media( 689 | me.photo, 690 | file_name=DOWN_PATH + link.split("/").pop()+'(2).png', 691 | ) 692 | await asyncio.sleep(10) 693 | with open(dls1, 'rb') as p1: 694 | b1 = p1.read() 695 | with open(dls2, 'rb') as p2: 696 | b2 = p2.read() 697 | await user.delete_messages("me", [me.id, msg.id]) 698 | if b1!=b2: 699 | try: 700 | await user.send_message("webpagebot", link) 701 | except ConnectionError: 702 | await asyncio.sleep(5) 703 | await user.send_message("webpagebot", link) 704 | else: 705 | continue 706 | 707 | 708 | async def remove_useless_elements(): 709 | for i in PIC_LS: 710 | if (await PIC_DB.find_one({'_id': i[0]})) is not None: 711 | PIC_LS.remove(i) 712 | else: 713 | continue 714 | 715 | 716 | j1 = AsyncIOScheduler() 717 | j1.add_job(update_pics_cache, "interval", minutes=60) 718 | j1.start() 719 | 720 | 721 | j2 = AsyncIOScheduler() 722 | j2.add_job(remove_useless_elements, "interval", minutes=3) 723 | j2.start() 724 | -------------------------------------------------------------------------------- /anibot/plugins/bot.py: -------------------------------------------------------------------------------- 1 | import io 2 | import sys 3 | import traceback 4 | import os 5 | import re 6 | import subprocess 7 | import asyncio 8 | import requests 9 | import tracemoepy 10 | from bson.objectid import ObjectId 11 | from bs4 import BeautifulSoup as bs 12 | from datetime import datetime 13 | from natsort import natsorted 14 | from pyrogram import filters, enums, Client 15 | from pyrogram.enums import ChatMemberStatus 16 | from pyrogram.types import ( 17 | Message, 18 | InlineKeyboardButton, 19 | InlineKeyboardMarkup, 20 | CallbackQuery 21 | ) 22 | from pyrogram.errors import ( 23 | ChannelInvalid as ci, 24 | ChannelPrivate as cp, 25 | PeerIdInvalid as pi, 26 | FloodWait as fw 27 | ) 28 | from .. import ( 29 | BOT_NAME, 30 | TRIGGERS as trg, 31 | OWNER, 32 | HELP_DICT, 33 | anibot, 34 | DOWN_PATH, 35 | LOG_CHANNEL_ID 36 | ) 37 | from ..utils.db import get_collection 38 | from ..utils.helper import ( 39 | AUTH_USERS, 40 | clog, 41 | check_user, 42 | control_user, 43 | get_btns, 44 | rand_key, 45 | return_json_senpai, 46 | runcmd, 47 | take_screen_shot, 48 | media_to_image, 49 | make_it_rw, 50 | IGNORE, 51 | USER_JSON, 52 | USER_WC 53 | ) 54 | from ..utils.data_parser import ( 55 | get_all_genres, 56 | get_all_tags, 57 | get_top_animes, 58 | get_user_activity, 59 | get_user_favourites, 60 | toggle_favourites, 61 | parse_filler, 62 | get_anime, 63 | get_airing, 64 | get_anilist, 65 | get_character, 66 | get_additional_info, 67 | get_manga, 68 | browse_, 69 | get_wo, 70 | get_wols, 71 | get_featured_in_lists, 72 | update_anilist, 73 | get_user, 74 | get_recommendations, 75 | get_scheduled, 76 | search_filler, 77 | ANIME_DB, 78 | AIR_QUERY, 79 | MANGA_DB, 80 | CHAR_DB, 81 | ANIME_QUERY, 82 | ACTIVITY_QUERY, 83 | ALLTOP_QUERY, 84 | ANILIST_MUTATION, 85 | ANILIST_MUTATION_DEL, 86 | ANILIST_MUTATION_UP, 87 | ANIME_MUTATION, 88 | BROWSE_QUERY, 89 | ANIME_TEMPLATE, 90 | CHA_INFO_QUERY, 91 | CHAR_MUTATION, 92 | CHARACTER_QUERY, 93 | DES_INFO_QUERY, 94 | DESC_INFO_QUERY, 95 | FAV_ANI_QUERY, 96 | GET_TAGS, 97 | FAV_CHAR_QUERY, 98 | FAV_MANGA_QUERY, 99 | GET_GENRES, 100 | ISADULT, 101 | LS_INFO_QUERY, 102 | MANGA_MUTATION, 103 | MANGA_QUERY, 104 | PAGE_QUERY, 105 | TOP_QUERY, 106 | REL_INFO_QUERY, 107 | TOPT_QUERY, 108 | USER_QRY, 109 | VIEWER_QRY, 110 | RECOMMENDTIONS_QUERY, 111 | ) 112 | from .anilist import auth_link_cmd, code_cmd, logout_cmd 113 | 114 | USERS = get_collection("USERS") 115 | GROUPS = get_collection("GROUPS") 116 | SFW_GROUPS = get_collection("SFW_GROUPS") 117 | DC = get_collection('DISABLED_CMDS') 118 | AG = get_collection('AIRING_GROUPS') 119 | CR_GRPS = get_collection('CRUNCHY_GROUPS') 120 | HD_GRPS = get_collection('HEADLINES_GROUPS') 121 | MAL_HD_GRPS = get_collection('MAL_HEADLINES_GROUPS') 122 | SP_GRPS = get_collection('SUBSPLEASE_GROUPS') 123 | CC = get_collection('CONNECTED_CHANNELS') 124 | CHAT_OWNER = ChatMemberStatus.OWNER 125 | MEMBER = ChatMemberStatus.MEMBER 126 | ADMINISTRATOR = ChatMemberStatus.ADMINISTRATOR 127 | 128 | CMD = [ 129 | 'anime', 130 | 'anilist', 131 | 'character', 132 | 'manga', 133 | 'airing', 134 | 'help', 135 | 'schedule', 136 | 'fillers', 137 | 'top', 138 | 'reverse', 139 | 'watch', 140 | 'start', 141 | 'ping', 142 | 'flex', 143 | 'me', 144 | 'activity', 145 | 'user', 146 | 'favourites', 147 | 'gettags', 148 | 'quote', 149 | 'getgenres', 150 | 'connect', 151 | 'browse', 152 | 'studio' 153 | ] 154 | 155 | 156 | @anibot.on_message( 157 | ~filters.private & filters.command( 158 | ['disable', f'disable{BOT_NAME}', 'enable', f'enable{BOT_NAME}'], 159 | prefixes=trg 160 | ) 161 | ) 162 | @control_user 163 | async def en_dis__able_cmd(client: Client, message: Message, mdata: dict): 164 | cmd = mdata['text'].split(" ", 1) 165 | gid = mdata['chat']['id'] 166 | try: 167 | user = mdata['from_user']['id'] 168 | except KeyError: 169 | user = mdata['sender_chat']['id'] 170 | if user in OWNER or ( 171 | await anibot.get_chat_member(gid, user) 172 | ).status in [ADMINISTRATOR, CHAT_OWNER] or user==gid: 173 | if len(cmd)==1: 174 | x = await message.reply_text( 175 | 'No command specified to be disabled!!!' 176 | ) 177 | await asyncio.sleep(5) 178 | await x.delete() 179 | return 180 | enable = False if not 'enable' in cmd[0] else True 181 | if set(cmd[1].split()).issubset(CMD): 182 | find_gc = await DC.find_one({'_id': gid}) 183 | if find_gc is None: 184 | if enable: 185 | x = await message.reply_text('Command already enabled!!!') 186 | await asyncio.sleep(5) 187 | await x.delete() 188 | return 189 | await DC.insert_one({'_id': gid, 'cmd_list': cmd[1]}) 190 | x = await message.reply_text("Command disabled!!!") 191 | await asyncio.sleep(5) 192 | await x.delete() 193 | return 194 | else: 195 | ocls: str = find_gc['cmd_list'] 196 | if set(cmd[1].split()).issubset(ocls.split()): 197 | if enable: 198 | if len(ocls.split())==1: 199 | await DC.delete_one({'_id': gid}) 200 | x = await message.reply_text("Command enabled!!!") 201 | await asyncio.sleep(5) 202 | await x.delete() 203 | return 204 | ncls = ocls.split() 205 | for i in cmd[1].split(): 206 | ncls.remove(i) 207 | ncls = " ".join(ncls) 208 | else: 209 | x = await message.reply_text( 210 | 'Command already disabled!!!' 211 | ) 212 | await asyncio.sleep(5) 213 | await x.delete() 214 | return 215 | else: 216 | if enable: 217 | x = await message.reply_text( 218 | 'Command already enabled!!!' 219 | ) 220 | await asyncio.sleep(5) 221 | await x.delete() 222 | return 223 | else: 224 | lsncls = [] 225 | prencls = (ocls+' '+cmd[1]).replace(' ', ' ') 226 | for i in prencls.split(): 227 | if i not in lsncls: 228 | lsncls.append(i) 229 | ncls = " ".join(lsncls) 230 | await DC.update_one({'_id': gid}, {'$set': {'cmd_list': ncls}}) 231 | x = await message.reply_text( 232 | f"Command {'dis' if enable is False else 'en'}abled!!!" 233 | ) 234 | await asyncio.sleep(5) 235 | await x.delete() 236 | return 237 | else: 238 | await message.reply_text("Hee, is that a command?!") 239 | 240 | 241 | @anibot.on_message( 242 | ~filters.private & filters.command( 243 | ['disabled', f'disabled{BOT_NAME}'], 244 | prefixes=trg 245 | ) 246 | ) 247 | @control_user 248 | async def list_disabled(client: Client, message: Message, mdata: dict): 249 | find_gc = await DC.find_one({'_id': mdata['chat']['id']}) 250 | if find_gc is None: 251 | await message.reply_text("No commands disabled in this group!!!") 252 | else: 253 | lscmd = find_gc['cmd_list'].replace(" ", "\n") 254 | await message.reply_text( 255 | f"""List of commands disabled in **{mdata['chat']['title']}** 256 | 257 | {lscmd}""" 258 | ) 259 | 260 | 261 | @anibot.on_message( 262 | filters.user(OWNER) & filters.command( 263 | ['dbcleanup', f'dbcleanup{BOT_NAME}'], prefixes=trg 264 | ) 265 | ) 266 | @control_user 267 | async def db_cleanup(client: Client, message: Message, mdata: dict): 268 | count = 0 269 | entries = "" 270 | st = datetime.now() 271 | x = await message.reply_text("Starting database cleanup in 5 seconds") 272 | et = datetime.now() 273 | pt = (et-st).microseconds / 1000 274 | await asyncio.sleep(5) 275 | await x.edit_text("Checking 1st collection!!!") 276 | async for i in GROUPS.find(): 277 | await asyncio.sleep(2) 278 | try: 279 | await client.get_chat(i['_id']) 280 | except (cp, ci, pi): 281 | count += 1 282 | entries += str(await GROUPS.find_one(i))+'\n\n' 283 | await GROUPS.find_one_and_delete(i) 284 | await SFW_GROUPS.find_one_and_delete({'id': i['_id']}) 285 | await DC.find_one_and_delete({'_id': i['_id']}) 286 | await AG.find_one_and_delete({'_id': i['_id']}) 287 | await HD_GRPS.find_one_and_delete({'_id': i['_id']}) 288 | await SP_GRPS.find_one_and_delete({'_id': i['_id']}) 289 | await CR_GRPS.find_one_and_delete({'_id': i['_id']}) 290 | except fw: 291 | await asyncio.sleep(fw.x + 5) 292 | await asyncio.sleep(5) 293 | await x.edit_text("Checking 2nd collection!!!") 294 | async for i in AUTH_USERS.find(): 295 | if i['id']=='pending': 296 | count += 1 297 | entries += str(await AUTH_USERS.find_one({'_id': i['_id']}))+'\n\n' 298 | await AUTH_USERS.find_one_and_delete({'_id': i['_id']}) 299 | async for i in AUTH_USERS.find(): 300 | await asyncio.sleep(2) 301 | try: 302 | await client.get_users(i['id']) 303 | except pi: 304 | count += 1 305 | entries += str(await AUTH_USERS.find_one({'id': i['id']}))+'\n\n' 306 | await AUTH_USERS.find_one_and_delete({'id': i['id']}) 307 | except fw: 308 | await asyncio.sleep(fw.x + 5) 309 | await asyncio.sleep(5) 310 | 311 | nosgrps = await GROUPS.estimated_document_count() 312 | nossgrps = await SFW_GROUPS.estimated_document_count() 313 | nosauus = await AUTH_USERS.estimated_document_count() 314 | if count == 0: 315 | msg = f"""Database seems to be accurate, no changes to be made!!! 316 | 317 | **Groups:** `{nosgrps}` 318 | **SFW Groups:** `{nossgrps}` 319 | **Authorised Users:** `{nosauus}` 320 | **Ping:** `{pt}` 321 | """ 322 | else: 323 | msg = f"""{count} entries removed from database!!! 324 | 325 | **New Data:** 326 | __Groups:__ `{nosgrps}` 327 | __SFW Groups:__ `{nossgrps}` 328 | __Authorised Users:__ `{nosauus}` 329 | 330 | **Ping:** `{pt}` 331 | """ 332 | if len(entries)>4095: 333 | with open('entries.txt', "w+") as file: 334 | file.write(entries) 335 | return await x.reply_document('entries.txt') 336 | await x.reply_text(entries) 337 | await x.edit_text(msg) 338 | 339 | 340 | @anibot.on_message( 341 | filters.command(['start', f'start{BOT_NAME}'], prefixes=trg) 342 | ) 343 | @control_user 344 | async def start_(client: Client, message: Message, mdata: dict): 345 | gid = mdata['chat']['id'] 346 | try: 347 | user = mdata['from_user']['id'] 348 | except KeyError: 349 | user = 00000000 350 | find_gc = await DC.find_one({'_id': gid}) 351 | if find_gc is not None and 'start' in find_gc['cmd_list'].split(): 352 | return 353 | bot = await client.get_me() 354 | if gid==user: 355 | if not (user in OWNER) and not (await USERS.find_one({"id": user})): 356 | try: 357 | usertitle = mdata['from_user']['username'] 358 | except KeyError: 359 | usertitle = mdata['from_user']['first_name'] 360 | await USERS.insert_one({"id": user, "user": usertitle}) 361 | await clog( 362 | "ANIBOT", 363 | f"""New User started bot 364 | 365 | {usertitle} 366 | ID: `{user}`""", 367 | "NEW_USER" 368 | ) 369 | if len(mdata['text'].split())!=1: 370 | deep_cmd = mdata['text'].split()[1] 371 | if deep_cmd=="help": 372 | await help_(client, message) 373 | return 374 | if deep_cmd=="auth": 375 | await auth_link_cmd(client, message) 376 | return 377 | if deep_cmd=="logout": 378 | await logout_cmd(client, message) 379 | return 380 | deep_cmd_list = deep_cmd.split("_") 381 | if deep_cmd_list[0]=="des": 382 | try: 383 | req = deep_cmd_list[3] 384 | except IndexError: 385 | req = "desc" 386 | pic, result = await get_additional_info( 387 | deep_cmd_list[2], 388 | deep_cmd_list[1], 389 | req 390 | ) 391 | await client.send_photo(user, pic) 392 | try: 393 | await client.send_message( 394 | user, 395 | result.replace("~!", "").replace("!~", "") 396 | ) 397 | except (TypeError, AttributeError): 398 | await client.send_message( 399 | user, 400 | "No description available!!!" 401 | ) 402 | return 403 | if deep_cmd_list[0]=="anime": 404 | auth = False 405 | if (await AUTH_USERS.find_one({"id": user})): 406 | auth = True 407 | result = await get_anime( 408 | {"id": int(deep_cmd_list[1])}, 409 | user=user, 410 | auth=auth 411 | ) 412 | pic, msg = result[0], result[1] 413 | buttons = get_btns( 414 | "ANIME", 415 | result=result, 416 | user=user, 417 | auth=auth 418 | ) 419 | await client.send_photo( 420 | user, 421 | pic, 422 | caption=msg, 423 | reply_markup=buttons 424 | ) 425 | return 426 | if deep_cmd_list[0]=="anirec": 427 | result = await get_recommendations(deep_cmd_list[1]) 428 | await client.send_message( 429 | user, result, disable_web_page_preview=True 430 | ) 431 | return 432 | if deep_cmd.split("_", 1)[0]=="code": 433 | if not os.environ.get('ANILIST_REDIRECT_URL'): 434 | return 435 | qry = deep_cmd.split("_", 1)[1] 436 | k = await AUTH_USERS.find_one({'_id': ObjectId(qry)}) 437 | await code_cmd(k['code'], message) 438 | return 439 | await client.send_message( 440 | gid, 441 | text=( 442 | f"Kon'nichiwa!!!\n" 443 | +f"I'm {bot.first_name} bot and I can help you get info on " 444 | +f"Animes, Mangas, Characters, Airings, Schedules, Watch " 445 | +f"Orders of Animes, etc." 446 | +f"\n\nFor more info send /help in here." 447 | +f"If you wish to use me in a group start me by " 448 | +f"/start{BOT_NAME} command after adding me in the group.") 449 | ) 450 | else: 451 | if not await (GROUPS.find_one({"_id": gid})): 452 | try: 453 | gidtitle = mdata['chat']['username'] 454 | except KeyError: 455 | gidtitle = mdata['chat']['title'] 456 | await GROUPS.insert_one({"_id": gid, "grp": gidtitle}) 457 | await clog( 458 | "ANIBOT", 459 | f"Bot added to a new group\n\n{gidtitle}\nID: `{gid}`", 460 | "NEW_GROUP" 461 | ) 462 | await client.send_message(gid, text="Bot seems online!!!") 463 | 464 | 465 | @anibot.on_message( 466 | filters.command(['help', f'help{BOT_NAME}'], prefixes=trg) 467 | ) 468 | @control_user 469 | async def help_(client: Client, message: Message, mdata: dict): 470 | gid = mdata['chat']['id'] 471 | find_gc = await DC.find_one({'_id': gid}) 472 | if find_gc is not None and 'help' in find_gc['cmd_list'].split(): 473 | return 474 | bot_us = (await client.get_me()).username 475 | try: 476 | id_ = mdata['from_user']['id'] 477 | except KeyError: 478 | await client.send_message( 479 | gid, 480 | text="Click below button for bot help", 481 | reply_markup=InlineKeyboardMarkup([[InlineKeyboardButton("Help", url=f"https://t.me/{bot_us}/?start=help")]]) 482 | ) 483 | return 484 | buttons = help_btns(id_) 485 | text='''This is a small guide on how to use me 486 | 487 | **Basic Commands:** 488 | Use /ping or !ping cmd to check if bot is online 489 | Use /start or !start cmd to start bot in group or pm 490 | Use /help or !help cmd to get interactive help on available bot cmds 491 | Use /feedback cmd to contact bot owner''' 492 | if id_ in OWNER: 493 | await client.send_message(gid, text=text, reply_markup=buttons) 494 | await client.send_message( 495 | gid, 496 | text="""Owners / Sudos can also use 497 | 498 | - __/term__ `to run a cmd in terminal` 499 | - __/eval__ `to run a python code like `__/eval print('UwU')__` ` 500 | - __/stats__ `to get stats on bot like no. of users, grps and authorised users` 501 | - __/dbcleanup__ `to remove obsolete/useless entries in database` 502 | 503 | Apart from above shown cmds""" 504 | ) 505 | else: 506 | if gid==id_: 507 | await client.send_message(gid, text=text, reply_markup=buttons) 508 | else: 509 | await client.send_message( 510 | gid, 511 | text="Click below button for bot help", 512 | reply_markup=InlineKeyboardMarkup( 513 | [[ 514 | InlineKeyboardButton( 515 | "Help", 516 | url=f"https://t.me/{bot_us}/?start=help" 517 | ) 518 | ]] 519 | ) 520 | ) 521 | 522 | 523 | @anibot.on_message( 524 | filters.command( 525 | [ 526 | 'connect', 527 | f'connect{BOT_NAME}', 528 | 'disconnect', 529 | f'disconnect{BOT_NAME}' 530 | ], 531 | prefixes=trg 532 | ) 533 | ) 534 | @control_user 535 | async def connect_(client: Client, message: Message, mdata: dict): 536 | gid = mdata['chat']['id'] 537 | find_gc = await DC.find_one({'_id': gid}) 538 | if find_gc is not None and 'connect' in find_gc['cmd_list'].split(): 539 | return 540 | bot_us = (await client.get_me()).username 541 | try: 542 | id_ = mdata['from_user']['id'] 543 | except KeyError: 544 | await client.send_message( 545 | gid, 546 | text="Go to bot pm to connect channel", 547 | reply_markup=InlineKeyboardMarkup( 548 | [[ 549 | InlineKeyboardButton( 550 | "Bot PM", url=f"https://t.me/{bot_us}" 551 | ) 552 | ]] 553 | ) 554 | ) 555 | return 556 | if gid==id_: 557 | data = (mdata['text'].split()) 558 | try: 559 | channel = data[1] 560 | except: 561 | return await client.send_message( 562 | gid, 563 | text=( 564 | "Please provide the channel id you wish to connect!!!" 565 | +"\nExample: /connect -100xxxxxxxxx" 566 | ) 567 | ) 568 | if not "-100" in channel: 569 | return await client.send_message( 570 | gid, text="Please enter the full channel ID!!!" 571 | ) 572 | if data[0]=='connect': 573 | if (await CC.find_one({"_id": str(channel)})): 574 | await client.send_message( 575 | gid, 576 | text=( 577 | "Channel already connected" 578 | +"\nIf someone else has access to it who doesn't own " 579 | +"the channel, contact @hanabi_support" 580 | ) 581 | ) 582 | return 583 | await CC.insert_one({"_id": str(channel), "usr": id_}) 584 | await client.send_message( 585 | gid, text="Successfully connected the channel" 586 | ) 587 | else: 588 | k = await CC.find_one({"_id": str(channel)}) 589 | if k and k['usr']==id_: 590 | await CC.find_one_and_delete({"_id": str(channel)}) 591 | await client.send_message( 592 | gid, text="Successfully disconnected the channel" 593 | ) 594 | else: 595 | await client.send_message(gid, text="Channel not connected") 596 | else: 597 | k = (await client.get_chat_member(gid, id_)).status 598 | if k == CHAT_OWNER: 599 | if 'connect' in mdata['text']: 600 | await CC.insert_one({"_id": str(message.chat.id), "usr": id_}) 601 | await client.send_message( 602 | gid, text="Successfully connected the channel" 603 | ) 604 | else: 605 | await CC.find_one_and_delete({"_id": str(message.chat.id)}) 606 | await client.send_message( 607 | gid, text="Successfully disconnected the channel" 608 | ) 609 | return 610 | await client.send_message( 611 | gid, 612 | text="Click below button for bot help", 613 | reply_markup=InlineKeyboardMarkup( 614 | [[ 615 | InlineKeyboardButton( 616 | "Bot PM", url=f"https://t.me/{bot_us}" 617 | ) 618 | ]] 619 | ) 620 | ) 621 | 622 | 623 | @anibot.on_callback_query(filters.regex(pattern=r"help_(.*)")) 624 | @check_user 625 | async def help_dicc_parser(client: Client, cq: CallbackQuery, cdata: dict): 626 | await cq.answer() 627 | kek, qry, user = cdata['data'].split("_") 628 | text = HELP_DICT[qry] 629 | btn = InlineKeyboardMarkup( 630 | [[InlineKeyboardButton("Back", callback_data=f"hlplist_{user}")]] 631 | ) 632 | await cq.edit_message_text(text=text, reply_markup=btn) 633 | 634 | 635 | @anibot.on_callback_query(filters.regex(pattern=r"hlplist_(.*)")) 636 | @check_user 637 | async def help_list_parser(client: Client, cq: CallbackQuery, cdata: dict): 638 | await cq.answer() 639 | user = cdata['data'].split("_")[1] 640 | buttons = help_btns(user) 641 | text='''This is a small guide on how to use me 642 | 643 | **Basic Commands:** 644 | Use /ping or !ping cmd to check if bot is online 645 | Use /start or !start cmd to start bot in group or pm 646 | Use /help or !help cmd to get interactive help on available bot cmds 647 | Use /feedback cmd to contact bot owner''' 648 | await cq.edit_message_text(text=text, reply_markup=buttons) 649 | 650 | 651 | def help_btns(user): 652 | but_rc = [] 653 | buttons = [] 654 | hd_ = list(natsorted(HELP_DICT.keys())) 655 | for i in hd_: 656 | but_rc.append( 657 | InlineKeyboardButton(i, callback_data=f"help_{i}_{user}") 658 | ) 659 | if len(but_rc)==2: 660 | buttons.append(but_rc) 661 | but_rc = [] 662 | if len(but_rc)!=0: 663 | buttons.append(but_rc) 664 | return InlineKeyboardMarkup(buttons) 665 | 666 | 667 | @anibot.on_message( 668 | filters.user(OWNER) & filters.command( 669 | ['stats', f'stats{BOT_NAME}'], 670 | prefixes=trg 671 | ) 672 | ) 673 | @control_user 674 | async def stats_(client: Client, message: Message, mdata: dict): 675 | st = datetime.now() 676 | x = await message.reply_text("Collecting Stats!!!") 677 | et = datetime.now() 678 | pt = (et-st).microseconds / 1000 679 | nosus = await USERS.estimated_document_count() 680 | nosauus = await AUTH_USERS.estimated_document_count() 681 | nosgrps = await GROUPS.estimated_document_count() 682 | nossgrps = await SFW_GROUPS.estimated_document_count() 683 | noshdgrps = await HD_GRPS.estimated_document_count() 684 | nosmhdgrps = await MAL_HD_GRPS.estimated_document_count() 685 | s = await SP_GRPS.estimated_document_count() 686 | a = await AG.estimated_document_count() 687 | c = await CR_GRPS.estimated_document_count() 688 | kk = requests.get("https://api.github.com/repos/lostb053/anibot").json() 689 | await x.edit_text(f""" 690 | Stats:- 691 | 692 | **Users:** {nosus} 693 | **Authorised Users:** {nosauus} 694 | **Groups:** {nosgrps} 695 | **Airing Groups:** {a} 696 | **Crunchyroll Groups:** {c} 697 | **Subsplease Groups:** {s} 698 | **LC Headline Groups:** {noshdgrps} 699 | **MAL Headline Groups:** {nosmhdgrps} 700 | **SFW Groups:** {nossgrps} 701 | **Stargazers:** {kk.get("stargazers_count")} 702 | **Forks:** {kk.get("forks")} 703 | **Ping:** `{pt} ms` 704 | """ 705 | ) 706 | 707 | 708 | @anibot.on_message(filters.command(['ping', f'ping{BOT_NAME}'], prefixes=trg)) 709 | @control_user 710 | async def pong_(client: Client, message: Message, mdata: dict): 711 | find_gc = await DC.find_one({'_id': mdata['chat']['id']}) 712 | if find_gc is not None and 'ping' in find_gc['cmd_list'].split(): 713 | return 714 | st = datetime.now() 715 | x = await message.reply_text("Ping...") 716 | et = datetime.now() 717 | pt = (et-st).microseconds / 1000 718 | await x.edit_text(f"__Pong!!!__\n`{pt} ms`") 719 | 720 | 721 | @anibot.on_message( 722 | filters.private & filters.command( 723 | ['feedback', f'feedback{BOT_NAME}'], prefixes=trg 724 | ) 725 | ) 726 | @control_user 727 | async def feed_(client: Client, message: Message, mdata: dict): 728 | owner = (await client.get_users(OWNER[0])).username 729 | await client.send_message( 730 | mdata['chat']['id'], 731 | f"For issues or queries please contact " 732 | +f"@{owner} or join @hanabi_support" 733 | ) 734 | 735 | ###### credits to @NotThatMF on tg since he gave me the code for it ###### 736 | 737 | 738 | @anibot.on_message( 739 | filters.command( 740 | ['eval', f'eval{BOT_NAME}'], prefixes=trg 741 | ) & filters.user(OWNER) 742 | ) 743 | @control_user 744 | async def eval_(client: Client, message: Message, mdata: dict): 745 | status_message = await message.reply_text("Processing ...") 746 | cmd = message.text.split(" ", maxsplit=1)[1] 747 | reply_to_ = message 748 | if message.reply_to_message: 749 | reply_to_ = message.reply_to_message 750 | old_stderr = sys.stderr 751 | old_stdout = sys.stdout 752 | redirected_output = sys.stdout = io.StringIO() 753 | redirected_error = sys.stderr = io.StringIO() 754 | stdout, stderr, exc = None, None, None 755 | try: 756 | await aexec(cmd, client, message) 757 | except Exception: 758 | exc = traceback.format_exc() 759 | stdout = redirected_output.getvalue() 760 | stderr = redirected_error.getvalue() 761 | sys.stdout = old_stdout 762 | sys.stderr = old_stderr 763 | evaluation = "" 764 | if exc: 765 | evaluation = exc 766 | elif stderr: 767 | evaluation = stderr 768 | elif stdout: 769 | evaluation = stdout 770 | else: 771 | evaluation = "Success" 772 | final_output = "EVAL: " 773 | final_output += f"{cmd}\n\n" 774 | final_output += "OUTPUT:\n" 775 | final_output += f"{evaluation.strip()} \n" 776 | if len(final_output) > 4096: 777 | with io.BytesIO(str.encode(final_output)) as out_file: 778 | out_file.name = "eval.txt" 779 | await reply_to_.reply_document( 780 | document=out_file, 781 | caption=cmd[:1000], 782 | disable_notification=True 783 | ) 784 | else: 785 | await reply_to_.reply_text(final_output) 786 | await status_message.delete() 787 | 788 | 789 | async def aexec(code, client, message): 790 | exec( 791 | "async def __aexec(client, message): " 792 | + "".join(f"\n {l_}" for l_ in code.split("\n")) 793 | ) 794 | return await locals()["__aexec"](client, message) 795 | 796 | 797 | @anibot.on_message( 798 | filters.user(OWNER) & filters.command( 799 | ["term", f"term{BOT_NAME}"], prefixes=trg 800 | ) 801 | ) 802 | @control_user 803 | async def terminal(client: Client, message: Message, mdata: dict): 804 | if len(message.text.split()) == 1: 805 | await message.reply_text("Usage: `/term echo owo`") 806 | return 807 | args = message.text.split(None, 1) 808 | teks = args[1] 809 | if "\n" in teks: 810 | code = teks.split("\n") 811 | output = "" 812 | for x in code: 813 | shell = re.split(""" (?=(?:[^'"]|'[^']*'|"[^"]*")*$)""", x) 814 | try: 815 | process = subprocess.Popen( 816 | shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE 817 | ) 818 | except Exception as err: 819 | print(err) 820 | await message.reply_text( 821 | """ 822 | **Error:** 823 | ```{}``` 824 | """.format( 825 | err 826 | ), 827 | parse_mode=enums.ParseMode.MARKDOWN, 828 | ) 829 | output += "**{}**\n".format(code) 830 | output += process.stdout.read()[:-1].decode("utf-8") 831 | output += "\n" 832 | else: 833 | shell = re.split(""" (?=(?:[^'"]|'[^']*'|"[^"]*")*$)""", teks) 834 | for a in range(len(shell)): 835 | shell[a] = shell[a].replace('"', "") 836 | try: 837 | process = subprocess.Popen( 838 | shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE 839 | ) 840 | except Exception as err: 841 | exc_type, exc_obj, exc_tb = sys.exc_info() 842 | errors = traceback.format_exception( 843 | etype=exc_type, value=exc_obj, tb=exc_tb 844 | ) 845 | await message.reply_text( 846 | """**Error:**\n```{}```""".format("".join(errors)), 847 | parse_mode=enums.ParseMode.MARKDOWN 848 | ) 849 | return 850 | output = process.stdout.read()[:-1].decode("utf-8") 851 | if str(output) == "\n": 852 | output = None 853 | if output: 854 | if len(output) > 4096: 855 | filename = "output.txt" 856 | with open(filename, "w+") as file: 857 | file.write(output) 858 | await client.send_document( 859 | message.chat.id, 860 | filename, 861 | reply_to_message_id=message.id, 862 | caption="`Output file`", 863 | ) 864 | os.remove(filename) 865 | return 866 | await message.reply_text( 867 | f"**Output:**\n```{output}```", 868 | parse_mode=enums.ParseMode.MARKDOWN 869 | ) 870 | else: 871 | await message.reply_text("**Output:**\n`No Output`") 872 | 873 | 874 | ########################################################################## 875 | 876 | @anibot.on_edited_message( 877 | ~filters.private & filters.command( 878 | ['disable', f'disable{BOT_NAME}', 'enable', f'enable{BOT_NAME}'], 879 | prefixes=trg 880 | ) 881 | ) 882 | @control_user 883 | async def en_dis__able_cmd_edit(client: Client, message: Message, mdata: dict): 884 | await en_dis__able_cmd(client, message) 885 | 886 | 887 | @anibot.on_edited_message( 888 | ~filters.private & filters.command( 889 | ['disabled', f'disabled{BOT_NAME}'], 890 | prefixes=trg 891 | ) 892 | ) 893 | @control_user 894 | async def list_disabled_edit(client: Client, message: Message, mdata: dict): 895 | await list_disabled(client, message) 896 | 897 | @anibot.on_edited_message( 898 | filters.user(OWNER) & filters.command( 899 | ['dbcleanup', f'dbcleanup{BOT_NAME}'], prefixes=trg 900 | ) 901 | ) 902 | @control_user 903 | async def db_cleanup_edit(client: Client, message: Message, mdata: dict): 904 | await db_cleanup(client, message) 905 | 906 | @anibot.on_edited_message( 907 | filters.command(['start', f'start{BOT_NAME}'], prefixes=trg) 908 | ) 909 | @control_user 910 | async def start_edit(client: Client, message: Message, mdata: dict): 911 | await start_(client, message) 912 | 913 | @anibot.on_edited_message( 914 | filters.command(['help', f'help{BOT_NAME}'], prefixes=trg) 915 | ) 916 | @control_user 917 | async def help_edit(client: Client, message: Message, mdata: dict): 918 | await help_(client, message) 919 | 920 | @anibot.on_edited_message( 921 | filters.command( 922 | [ 923 | 'connect', 924 | f'connect{BOT_NAME}', 925 | 'disconnect', 926 | f'disconnect{BOT_NAME}' 927 | ], 928 | prefixes=trg 929 | ) 930 | ) 931 | @control_user 932 | async def connect_edit(client: Client, message: Message, mdata: dict): 933 | await connect_(client, message) 934 | 935 | @anibot.on_edited_message( 936 | filters.user(OWNER) & filters.command( 937 | ['stats', f'stats{BOT_NAME}'], prefixes=trg 938 | ) 939 | ) 940 | @control_user 941 | async def stats_edit(client: Client, message: Message, mdata: dict): 942 | await stats_(client, message) 943 | 944 | @anibot.on_edited_message( 945 | filters.command(['ping', f'ping{BOT_NAME}'], prefixes=trg) 946 | ) 947 | @control_user 948 | async def pong_edit(client: Client, message: Message, mdata: dict): 949 | await pong_(client, message) 950 | 951 | @anibot.on_edited_message( 952 | filters.private & filters.command( 953 | ['feedback', f'feedback{BOT_NAME}'], prefixes=trg 954 | ) 955 | ) 956 | @control_user 957 | async def feed_edit(client: Client, message: Message, mdata: dict): 958 | await feed_(client, message) 959 | 960 | @anibot.on_edited_message( 961 | filters.command( 962 | ['eval', f'eval{BOT_NAME}'], prefixes=trg 963 | ) & filters.user(OWNER) 964 | ) 965 | @control_user 966 | async def eval_edit(client: Client, message: Message, mdata: dict): 967 | await eval_(client, message) 968 | 969 | @anibot.on_edited_message( 970 | filters.user(OWNER) & filters.command( 971 | ["term", f"term{BOT_NAME}"], prefixes=trg 972 | ) 973 | ) 974 | @control_user 975 | async def terminal_edit(client: Client, message: Message, mdata: dict): 976 | await terminal(client, message) -------------------------------------------------------------------------------- /anibot/utils/data_parser.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import time 3 | import os 4 | from bs4 import BeautifulSoup 5 | from .db import get_collection 6 | from .google_trans_new import google_translator 7 | from .helper import ( 8 | cflag, 9 | make_it_rw, 10 | pos_no, 11 | return_json_senpai, 12 | day_, 13 | season_ 14 | ) 15 | from .. import BOT_NAME 16 | from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup 17 | from datetime import datetime 18 | 19 | tr = google_translator() 20 | ANIME_DB, MANGA_DB, CHAR_DB, STUDIO_DB, AIRING_DB = {}, {}, {}, {}, {} 21 | GUI = get_collection('GROUP_UI') 22 | 23 | async def uidata(id_): 24 | data = await GUI.find_one({'_id': str(id_)}) 25 | if data is not None: 26 | bullet = str(data['bl'])+" " 27 | if data['bl'] is None: 28 | bullet = "" 29 | return bullet, data['cs'] 30 | return ["➤ ", "UPPER"] 31 | 32 | 33 | async def get_ui_text(case): 34 | if case=="UPPER": 35 | return [ 36 | "SOURCE", 37 | "TYPE", 38 | "SCORE", 39 | "DURATION", 40 | "USER DATA", 41 | "ADULT RATED", 42 | "STATUS", 43 | "GENRES", 44 | "TAGS", 45 | "SEQUEL", 46 | "PREQUEL", 47 | "NEXT AIRING", 48 | "DESCRIPTION", 49 | "VOLUMES", 50 | "CHAPTERS" 51 | ] 52 | else: 53 | return [ 54 | "Source", 55 | "Type", 56 | "Score", 57 | "Duration", 58 | "User Data", 59 | "Adult Rated", 60 | "Status", 61 | "Genres", 62 | "Tags", 63 | "Sequel", 64 | "Prequel", 65 | "Next Airing", 66 | "Description", 67 | "Volumes", 68 | "Chapters" 69 | ] 70 | 71 | 72 | 73 | #### Anilist part #### 74 | 75 | ANIME_TEMPLATE = """{name} 76 | 77 | **ID | MAL ID:** `{idm}` | `{idmal}` 78 | {bl}**{psrc}:** `{source}` 79 | {bl}**{ptype}:** `{formats}`{avscd}{dura}{user_data} 80 | {status_air}{gnrs_}{tags_} 81 | 82 | 🎬 {trailer_link} 83 | 📖 Synopsis 84 | 📖 Official Site 85 | Recommendations 86 | 87 | {additional}""" 88 | 89 | 90 | # GraphQL Queries. 91 | ANIME_QUERY = """ 92 | query ($id: Int, $idMal:Int, $search: String) { 93 | Media (id: $id, idMal: $idMal, search: $search, type: ANIME) { 94 | id 95 | idMal 96 | title { 97 | romaji 98 | english 99 | native 100 | } 101 | format 102 | status 103 | episodes 104 | duration 105 | countryOfOrigin 106 | source (version: 2) 107 | trailer { 108 | id 109 | site 110 | } 111 | genres 112 | tags { 113 | name 114 | } 115 | averageScore 116 | relations { 117 | edges { 118 | node { 119 | title { 120 | romaji 121 | english 122 | } 123 | id 124 | type 125 | } 126 | relationType 127 | } 128 | } 129 | nextAiringEpisode { 130 | timeUntilAiring 131 | episode 132 | } 133 | isAdult 134 | isFavourite 135 | mediaListEntry { 136 | status 137 | score 138 | id 139 | } 140 | siteUrl 141 | } 142 | } 143 | """ 144 | 145 | ISADULT = """ 146 | query ($id: Int) { 147 | Media (id: $id) { 148 | isAdult 149 | } 150 | } 151 | """ 152 | 153 | BROWSE_QUERY = """ 154 | query ($s: MediaSeason, $y: Int, $sort: [MediaSort]) { 155 | Page { 156 | media (season: $s, seasonYear: $y, sort: $sort) { 157 | title { 158 | romaji 159 | } 160 | format 161 | } 162 | } 163 | } 164 | """ 165 | 166 | FAV_ANI_QUERY = """ 167 | query ($id: Int, $page: Int) { 168 | User (id: $id) { 169 | favourites { 170 | anime (page: $page, perPage: 10) { 171 | pageInfo { 172 | lastPage 173 | hasNextPage 174 | } 175 | edges { 176 | node { 177 | title { 178 | romaji 179 | } 180 | siteUrl 181 | } 182 | } 183 | } 184 | } 185 | } 186 | } 187 | """ 188 | 189 | FAV_MANGA_QUERY = """ 190 | query ($id: Int, $page: Int) { 191 | User (id: $id) { 192 | favourites { 193 | manga (page: $page, perPage: 10) { 194 | pageInfo { 195 | lastPage 196 | hasNextPage 197 | } 198 | edges { 199 | node { 200 | title { 201 | romaji 202 | } 203 | siteUrl 204 | } 205 | } 206 | } 207 | } 208 | } 209 | } 210 | """ 211 | 212 | FAV_CHAR_QUERY = """ 213 | query ($id: Int, $page: Int) { 214 | User (id: $id) { 215 | favourites { 216 | characters (page: $page, perPage: 10) { 217 | pageInfo { 218 | lastPage 219 | hasNextPage 220 | } 221 | edges { 222 | node { 223 | name { 224 | full 225 | } 226 | siteUrl 227 | } 228 | } 229 | } 230 | } 231 | } 232 | } 233 | """ 234 | 235 | VIEWER_QRY = """ 236 | query { 237 | Viewer { 238 | id 239 | name 240 | siteUrl 241 | statistics { 242 | anime { 243 | count 244 | minutesWatched 245 | episodesWatched 246 | meanScore 247 | } 248 | manga { 249 | count 250 | chaptersRead 251 | volumesRead 252 | meanScore 253 | } 254 | } 255 | } 256 | } 257 | """ 258 | 259 | USER_QRY = """ 260 | query ($search: String) { 261 | User (name: $search) { 262 | id 263 | name 264 | siteUrl 265 | statistics { 266 | anime { 267 | count 268 | minutesWatched 269 | episodesWatched 270 | meanScore 271 | } 272 | manga { 273 | count 274 | chaptersRead 275 | volumesRead 276 | meanScore 277 | } 278 | } 279 | } 280 | } 281 | """ 282 | 283 | ANIME_MUTATION = """ 284 | mutation ($id: Int) { 285 | ToggleFavourite (animeId: $id) { 286 | anime { 287 | pageInfo { 288 | total 289 | } 290 | } 291 | } 292 | } 293 | """ 294 | 295 | MANGA_MUTATION = """ 296 | mutation ($id: Int) { 297 | ToggleFavourite (mangaId: $id) { 298 | manga { 299 | pageInfo { 300 | total 301 | } 302 | } 303 | } 304 | } 305 | """ 306 | 307 | STUDIO_MUTATION = """ 308 | mutation ($id: Int) { 309 | ToggleFavourite (studioId: $id) { 310 | studios { 311 | pageInfo { 312 | total 313 | } 314 | } 315 | } 316 | } 317 | """ 318 | 319 | CHAR_MUTATION = """ 320 | mutation ($id: Int) { 321 | ToggleFavourite (characterId: $id) { 322 | characters { 323 | pageInfo { 324 | total 325 | } 326 | } 327 | } 328 | } 329 | """ 330 | 331 | ANILIST_MUTATION = """ 332 | mutation ($id: Int, $status: MediaListStatus) { 333 | SaveMediaListEntry (mediaId: $id, status: $status) { 334 | media { 335 | title { 336 | romaji 337 | } 338 | } 339 | } 340 | } 341 | """ 342 | 343 | ANILIST_MUTATION_UP = """ 344 | mutation ($id: [Int], $status: MediaListStatus) { 345 | UpdateMediaListEntries (ids: $id, status: $status) { 346 | media { 347 | title { 348 | romaji 349 | } 350 | } 351 | } 352 | } 353 | """ 354 | 355 | ANILIST_MUTATION_DEL = """ 356 | mutation ($id: Int) { 357 | DeleteMediaListEntry (id: $id) { 358 | deleted 359 | } 360 | } 361 | """ 362 | 363 | AIR_QUERY = """ 364 | query ($search: String, $page: Int) { 365 | Page (perPage: 1, page: $page) { 366 | pageInfo { 367 | total 368 | hasNextPage 369 | } 370 | media (search: $search, type: ANIME) { 371 | id 372 | title { 373 | romaji 374 | english 375 | } 376 | status 377 | countryOfOrigin 378 | nextAiringEpisode { 379 | timeUntilAiring 380 | episode 381 | } 382 | siteUrl 383 | isFavourite 384 | isAdult 385 | mediaListEntry { 386 | status 387 | id 388 | } 389 | } 390 | } 391 | } 392 | """ 393 | 394 | DES_INFO_QUERY = """ 395 | query ($id: Int) { 396 | Media (id: $id) { 397 | id 398 | description (asHtml: false) 399 | } 400 | } 401 | """ 402 | 403 | CHA_INFO_QUERY = """ 404 | query ($id: Int, $page: Int) { 405 | Media (id: $id, type: ANIME) { 406 | id 407 | characters (page: $page, perPage: 25, sort: ROLE) { 408 | pageInfo { 409 | hasNextPage 410 | lastPage 411 | total 412 | } 413 | edges { 414 | node { 415 | name { 416 | full 417 | } 418 | } 419 | role 420 | } 421 | } 422 | } 423 | } 424 | """ 425 | 426 | REL_INFO_QUERY = """ 427 | query ($id: Int) { 428 | Media (id: $id, type: ANIME) { 429 | id 430 | relations { 431 | edges { 432 | node { 433 | title { 434 | romaji 435 | } 436 | type 437 | } 438 | relationType 439 | } 440 | } 441 | } 442 | } 443 | """ 444 | 445 | PAGE_QUERY = """ 446 | query ($search: String, $page: Int) { 447 | Page (perPage: 1, page: $page) { 448 | pageInfo { 449 | total 450 | hasNextPage 451 | } 452 | media (search: $search, type: ANIME) { 453 | id 454 | idMal 455 | title { 456 | romaji 457 | english 458 | native 459 | } 460 | format 461 | status 462 | episodes 463 | duration 464 | countryOfOrigin 465 | source (version: 2) 466 | trailer { 467 | id 468 | site 469 | } 470 | genres 471 | tags { 472 | name 473 | } 474 | averageScore 475 | relations { 476 | edges { 477 | node { 478 | title { 479 | romaji 480 | english 481 | } 482 | type 483 | } 484 | relationType 485 | } 486 | } 487 | nextAiringEpisode { 488 | timeUntilAiring 489 | episode 490 | } 491 | isAdult 492 | isFavourite 493 | mediaListEntry { 494 | status 495 | score 496 | id 497 | } 498 | siteUrl 499 | } 500 | } 501 | } 502 | """ 503 | 504 | CHARACTER_QUERY = """ 505 | query ($id: Int, $search: String, $page: Int) { 506 | Page (perPage: 1, page: $page) { 507 | pageInfo { 508 | total 509 | hasNextPage 510 | } 511 | characters (id: $id, search: $search) { 512 | id 513 | name { 514 | full 515 | native 516 | } 517 | image { 518 | large 519 | } 520 | media (type: ANIME) { 521 | edges { 522 | node { 523 | title { 524 | romaji 525 | } 526 | type 527 | } 528 | voiceActors (language: JAPANESE) { 529 | name { 530 | full 531 | } 532 | siteUrl 533 | } 534 | } 535 | } 536 | isFavourite 537 | siteUrl 538 | } 539 | } 540 | } 541 | """ 542 | 543 | MANGA_QUERY = """ 544 | query ($search: String, $page: Int) { 545 | Page (perPage: 1, page: $page) { 546 | pageInfo { 547 | total 548 | hasNextPage 549 | } 550 | media (search: $search, type: MANGA) { 551 | id 552 | title { 553 | romaji 554 | english 555 | native 556 | } 557 | format 558 | countryOfOrigin 559 | source (version: 2) 560 | status 561 | description(asHtml: true) 562 | chapters 563 | isFavourite 564 | mediaListEntry { 565 | status 566 | score 567 | id 568 | } 569 | volumes 570 | averageScore 571 | siteUrl 572 | isAdult 573 | } 574 | } 575 | } 576 | """ 577 | 578 | 579 | DESC_INFO_QUERY = """ 580 | query ($id: Int) { 581 | Character (id: $id) { 582 | image { 583 | large 584 | } 585 | description(asHtml: false) 586 | } 587 | } 588 | """ 589 | 590 | LS_INFO_QUERY = """ 591 | query ($id: Int) { 592 | Character (id: $id) { 593 | image { 594 | large 595 | } 596 | media (page: 1, perPage: 25) { 597 | nodes { 598 | title { 599 | romaji 600 | english 601 | } 602 | type 603 | } 604 | } 605 | } 606 | } 607 | """ 608 | 609 | ACTIVITY_QUERY = """ 610 | query ($id: Int) { 611 | Page (perPage: 12) { 612 | activities (userId: $id, type: MEDIA_LIST, sort: ID_DESC) { 613 | ...kek 614 | } 615 | } 616 | } 617 | fragment kek on ListActivity { 618 | type 619 | media { 620 | title { 621 | romaji 622 | } 623 | siteUrl 624 | } 625 | progress 626 | status 627 | } 628 | """ 629 | 630 | TOP_QUERY = """ 631 | query ($gnr: String, $page: Int) { 632 | Page (perPage: 15, page: $page) { 633 | pageInfo { 634 | lastPage 635 | total 636 | hasNextPage 637 | } 638 | media (genre: $gnr, sort: SCORE_DESC, type: ANIME) { 639 | title { 640 | romaji 641 | } 642 | } 643 | } 644 | } 645 | """ 646 | 647 | TOPT_QUERY = """ 648 | query ($gnr: String, $page: Int) { 649 | Page (perPage: 15, page: $page) { 650 | pageInfo { 651 | lastPage 652 | total 653 | hasNextPage 654 | } 655 | media (tag: $gnr, sort: SCORE_DESC, type: ANIME) { 656 | title { 657 | romaji 658 | } 659 | } 660 | } 661 | } 662 | """ 663 | 664 | ALLTOP_QUERY = """ 665 | query ($page: Int) { 666 | Page (perPage: 15, page: $page) { 667 | pageInfo { 668 | lastPage 669 | total 670 | hasNextPage 671 | } 672 | media (sort: SCORE_DESC, type: ANIME) { 673 | title { 674 | romaji 675 | } 676 | } 677 | } 678 | } 679 | """ 680 | 681 | GET_GENRES = """ 682 | query { 683 | GenreCollection 684 | } 685 | """ 686 | 687 | GET_TAGS = """ 688 | query{ 689 | MediaTagCollection { 690 | name 691 | isAdult 692 | } 693 | } 694 | """ 695 | 696 | RECOMMENDTIONS_QUERY = ''' 697 | query ($id: Int) { 698 | Media (id: $id) { 699 | recommendations (perPage: 25) { 700 | edges { 701 | node { 702 | mediaRecommendation { 703 | title { 704 | romaji 705 | } 706 | id 707 | siteUrl 708 | } 709 | } 710 | } 711 | } 712 | } 713 | } 714 | ''' 715 | 716 | STUDIO_QUERY = ''' 717 | query ($search: String, $page: Int) { 718 | Page (page: $page, perPage: 1) { 719 | pageInfo { 720 | total 721 | hasNextPage 722 | } 723 | studios (search: $search) { 724 | id 725 | name 726 | siteUrl 727 | isFavourite 728 | } 729 | } 730 | } 731 | ''' 732 | 733 | STUDIO_ANI_QUERY = ''' 734 | query ($id: Int, $page: Int) { 735 | Studio (id: $id) { 736 | name 737 | media (page: $page) { 738 | pageInfo { 739 | total 740 | lastPage 741 | hasNextPage 742 | } 743 | edges { 744 | node { 745 | title { 746 | romaji 747 | } 748 | seasonYear 749 | } 750 | } 751 | } 752 | } 753 | } 754 | ''' 755 | 756 | 757 | async def get_studios(qry, page, user, duser = None, auth: bool = False): 758 | page = int(page) 759 | vars_ = {'search': STUDIO_DB[qry], 'page': int(page)} 760 | result = await return_json_senpai(STUDIO_QUERY, vars_, auth, user) 761 | if result["data"]['Page']['studios']==[]: 762 | return ["Not Found"] 763 | data = result["data"]['Page']['studios'][0] 764 | isFav = data['isFavourite'] 765 | msg = ( 766 | f"**{data['name']}**{', ♥️' if isFav is True else ''}" 767 | +f"\n\n**ID:** {data['id']}\n[Website]({data['siteUrl']})" 768 | ) 769 | if not duser: 770 | duser = user 771 | btns = [] 772 | btns.append([ 773 | InlineKeyboardButton( 774 | "List Animes", 775 | callback_data=f"stuani_1_{data['id']}_{page}_{qry}_{auth}_{duser}" 776 | ) 777 | ]) 778 | if auth: 779 | btns.append([ 780 | InlineKeyboardButton( 781 | "Remove from Favs" if isFav else "Add To Favs", 782 | callback_data=f"fav_STUDIO_{data['id']}_{qry}_{page}_{duser}" 783 | ) 784 | ]) 785 | pi = result["data"]['Page']['pageInfo']['hasNextPage'] 786 | if pi is False: 787 | if int(page)==1: 788 | return msg, btns 789 | else: 790 | btns.append([ 791 | InlineKeyboardButton( 792 | "Prev", 793 | callback_data=f"pgstudio_{page-1}_{qry}_{auth}_{duser}" 794 | ) 795 | ]) 796 | else: 797 | if int(page)==1: 798 | btns.append([ 799 | InlineKeyboardButton( 800 | "Next", callback_data=f"pgstudio_2_{qry}_{auth}_{duser}" 801 | ) 802 | ]) 803 | else: 804 | btns.append( 805 | [ 806 | InlineKeyboardButton( 807 | "Prev", callback_data=f"pgstudio_{page-1}_{qry}_{auth}_{duser}" 808 | ), 809 | InlineKeyboardButton( 810 | "Next", callback_data=f"pgstudio_{page+1}_{qry}_{auth}_{duser}" 811 | ) 812 | ] 813 | ) 814 | return msg, InlineKeyboardMarkup(btns) 815 | 816 | 817 | async def get_studio_animes(id_, page, qry, rp, user, duser = None, auth: bool = False): 818 | vars_ = {'id': id_, 'page': int(page)} 819 | result = await return_json_senpai(STUDIO_ANI_QUERY, vars_, auth, user) 820 | data = result['data']['Studio']['media']['edges'] 821 | if data==[]: 822 | return ["No results found"] 823 | msg = f"List of animes by {result['data']['Studio']['name']} studio\n" 824 | for i in data: 825 | msg += ( 826 | f"\n⚬ `{i['node']['title']['romaji']}`" 827 | +f" __({i['node']['seasonYear']})__" 828 | ) 829 | btns = [] 830 | if not duser: 831 | duser = user 832 | pi = result["data"]['Studio']['media']['pageInfo'] 833 | if pi['hasNextPage'] is False: 834 | if int(page)==1: 835 | btns.append([ 836 | InlineKeyboardButton( 837 | "Back", callback_data=f"pgstudio_{rp}_{qry}_{auth}_{duser}" 838 | ) 839 | ]) 840 | return msg, btns 841 | else: 842 | btns.append([ 843 | InlineKeyboardButton( 844 | "Prev", 845 | callback_data=f"stuani_{int(page)-1}_{id_}_{rp}_{qry}_{auth}_{duser}" 846 | ) 847 | ]) 848 | else: 849 | if int(page)==1: 850 | btns.append([ 851 | InlineKeyboardButton( 852 | "Next", callback_data=f"stuani_2_{id_}_{rp}_{qry}_{auth}_{duser}" 853 | ) 854 | ]) 855 | else: 856 | btns.append([ 857 | InlineKeyboardButton( 858 | "Prev", 859 | callback_data=f"stuani_{int(page)-1}_{id_}_{rp}_{qry}_{auth}_{duser}" 860 | ), 861 | InlineKeyboardButton( 862 | "Next", 863 | callback_data=f"stuani_{int(page)+1}_{id_}_{rp}_{qry}_{auth}_{duser}" 864 | ) 865 | ]) 866 | btns.append([ 867 | InlineKeyboardButton( 868 | "Back", callback_data=f"pgstudio_{rp}_{qry}_{auth}_{duser}" 869 | ) 870 | ]) 871 | return msg, InlineKeyboardMarkup(btns) 872 | 873 | 874 | async def get_all_tags(text: str = None): 875 | vars_ = {} 876 | result = await return_json_senpai(GET_TAGS, vars_, auth=False, user=None) 877 | msg = "**Tags List:**\n\n`" 878 | kek = [] 879 | for i in result['data']['MediaTagCollection']: 880 | if text is not None and 'nsfw' in text: 881 | if str(i['isAdult'])!='False': 882 | kek.append(i['name']) 883 | else: 884 | if str(i['isAdult'])=='False': 885 | kek.append(i['name']) 886 | msg += ", ".join(kek) 887 | msg += "`" 888 | return msg 889 | 890 | 891 | async def get_all_genres(): 892 | vars_ = {} 893 | result = await return_json_senpai(GET_GENRES, vars_, auth=False) 894 | msg = "**Genres List:**\n\n" 895 | for i in result['data']['GenreCollection']: 896 | msg += f"`{i}`\n" 897 | return msg 898 | 899 | 900 | async def get_user_activity(id_, user, duser = None): 901 | vars_ = {"id": id_} 902 | result = await return_json_senpai( 903 | ACTIVITY_QUERY, vars_, auth=True, user=user 904 | ) 905 | data = result["data"]["Page"]["activities"] 906 | msg = "" 907 | for i in data: 908 | try: 909 | name = ( 910 | f"[{i['media']['title']['romaji']}]" 911 | +f"({i['media']['siteUrl']})" 912 | ) 913 | if i['status'] in ["watched episode", "read chapter"]: 914 | msg += ( 915 | f"⚬ {str(i['status']).capitalize()} " 916 | +f"{i['progress']} of {name}\n" 917 | ) 918 | else: 919 | progress = i['progress'] 920 | of = "of" 921 | if i['status'] == "dropped": 922 | of = "at" 923 | msg += ( 924 | f"⚬ {str(i['status']).capitalize()}" 925 | +f"{f'{progress} {of} ' if progress is not None else ' '}" 926 | +f"{name}\n" 927 | ) 928 | except KeyError: 929 | pass 930 | if duser is None: 931 | duser = user 932 | btn = [[InlineKeyboardButton("Back", callback_data=f"getusrbc_{duser}")]] 933 | return [ 934 | f"https://img.anili.st/user/{id_}?a={time.time()}", 935 | msg, 936 | InlineKeyboardMarkup(btn) 937 | ] 938 | 939 | 940 | async def get_recommendations(id_): 941 | vars_ = {'id': int(id_)} 942 | result = await return_json_senpai(RECOMMENDTIONS_QUERY, vars_) 943 | data = result['data']['Media']['recommendations']['edges'] 944 | rc_ls = [] 945 | for i in data: 946 | ii = i['node']['mediaRecommendation'] 947 | rc_ls.append([ii['title']['romaji'], ii['id'], ii['siteUrl']]) 948 | if rc_ls == []: 949 | return "No Recommendations available related to given anime!!!" 950 | outstr = "Recommended animes:\n\n" 951 | for i in rc_ls: 952 | outstr += ( 953 | f"**{i[0]}**\n ➥[Synopsis]" 954 | +f"(https://t.me/{BOT_NAME.replace('@', '')}?start=anime_{i[1]})" 955 | +f"\n ➥[Official Site]({i[2]})\n\n" 956 | ) 957 | return outstr 958 | 959 | 960 | async def get_top_animes(gnr: str, page, user): 961 | vars_ = {"gnr": gnr.lower(), "page": int(page)} 962 | query = TOP_QUERY 963 | msg = f"Top animes for genre `{gnr.capitalize()}`:\n\n" 964 | if gnr=="None": 965 | query = ALLTOP_QUERY 966 | vars_ = {"page": int(page)} 967 | msg = f"Top animes:\n\n" 968 | nsfw = False 969 | result = await return_json_senpai(query, vars_, auth=False, user=user) 970 | if len(result['data']['Page']['media'])==0: 971 | query = TOPT_QUERY 972 | msg = f"Top animes for tag `{gnr.capitalize()}`:\n\n" 973 | result = await return_json_senpai(query, vars_, auth=False, user=user) 974 | if len(result['data']['Page']['media'])==0: 975 | return [f"No results Found"] 976 | nsls = await get_all_tags('nsfw') 977 | nsfw = True if gnr.lower() in nsls.lower() else False 978 | data = result["data"]["Page"] 979 | for i in data['media']: 980 | msg += f"⚬ `{i['title']['romaji']}`\n" 981 | msg += f"\nTotal available animes: `{data['pageInfo']['total']}`" 982 | btn = [] 983 | if int(page)==1: 984 | if int(data['pageInfo']['lastPage'])!=1: 985 | btn.append([ 986 | InlineKeyboardButton( 987 | "Next", 988 | callback_data=f"topanimu_{gnr}_{int(page)+1}_{user}" 989 | ) 990 | ]) 991 | elif int(page) == int(data['pageInfo']['lastPage']): 992 | btn.append([ 993 | InlineKeyboardButton( 994 | "Prev", 995 | callback_data=f"topanimu_{gnr}_{int(page)-1}_{user}" 996 | ) 997 | ]) 998 | else: 999 | btn.append([ 1000 | InlineKeyboardButton( 1001 | "Prev", 1002 | callback_data=f"topanimu_{gnr}_{int(page)-1}_{user}" 1003 | ), 1004 | InlineKeyboardButton( 1005 | "Next", 1006 | callback_data=f"topanimu_{gnr}_{int(page)+1}_{user}" 1007 | ) 1008 | ]) 1009 | return [msg, nsfw], InlineKeyboardMarkup(btn) if len(btn)!=0 else "" 1010 | 1011 | 1012 | async def get_user_favourites(id_, user, req, page, sighs, duser = None): 1013 | vars_ = {"id": int(id_), "page": int(page)} 1014 | result = await return_json_senpai( 1015 | FAV_ANI_QUERY if req=="ANIME" 1016 | else FAV_CHAR_QUERY if req=="CHAR" 1017 | else FAV_MANGA_QUERY, 1018 | vars_, 1019 | auth=True, 1020 | user=int(user) 1021 | ) 1022 | data = ( 1023 | result["data"]["User"]["favourites"][ 1024 | "anime" if req=="ANIME" 1025 | else "characters" if req=="CHAR" 1026 | else "manga" 1027 | ] 1028 | ) 1029 | msg = ( 1030 | "Favourite Animes:\n\n" if req=="ANIME" 1031 | else "Favourite Characters:\n\n" if req=="CHAR" 1032 | else "Favourite Manga:\n\n" 1033 | ) 1034 | for i in data["edges"]: 1035 | node_name = ( 1036 | i['node']['title']['romaji'] if req!='CHAR' 1037 | else i['node']['name']['full'] 1038 | ) 1039 | msg += ( 1040 | f"⚬ [{node_name}]({i['node']['siteUrl']})\n" 1041 | ) 1042 | btn = [] 1043 | if duser is None: 1044 | duser = user 1045 | if int(page)==1: 1046 | if int(data['pageInfo']['lastPage'])!=1: 1047 | btn.append([ 1048 | InlineKeyboardButton( 1049 | "Next", 1050 | callback_data=( 1051 | f"myfavqry_{req}_{id_}_{str(int(page)+1)}" 1052 | +f"_{sighs}_{duser}" 1053 | ) 1054 | ) 1055 | ]) 1056 | elif int(page) == int(data['pageInfo']['lastPage']): 1057 | btn.append([ 1058 | InlineKeyboardButton( 1059 | "Prev", 1060 | callback_data=( 1061 | f"myfavqry_{req}_{id_}_{str(int(page)-1)}_{sighs}_{duser}" 1062 | ) 1063 | ) 1064 | ]) 1065 | else: 1066 | btn.append([ 1067 | InlineKeyboardButton( 1068 | "Prev", 1069 | callback_data=( 1070 | f"myfavqry_{req}_{id_}_{str(int(page)-1)}_{sighs}_{duser}" 1071 | ) 1072 | ), 1073 | InlineKeyboardButton( 1074 | "Next", 1075 | callback_data=( 1076 | f"myfavqry_{req}_{id_}_{str(int(page)+1)}_{sighs}_{duser}" 1077 | ) 1078 | ) 1079 | ]) 1080 | btn.append([ 1081 | InlineKeyboardButton( 1082 | "Back", callback_data=f"myfavs_{id_}_{sighs}_{user}" 1083 | ) 1084 | ]) 1085 | return [ 1086 | f"https://img.anili.st/user/{id_}?a=({time.time()})", 1087 | msg, 1088 | InlineKeyboardMarkup(btn) 1089 | ] 1090 | 1091 | 1092 | async def get_featured_in_lists( 1093 | idm, 1094 | req, 1095 | auth: bool = False, 1096 | user: int = None, 1097 | page: int = 0 1098 | ): 1099 | vars_ = {"id": int(idm)} 1100 | result = await return_json_senpai( 1101 | LS_INFO_QUERY, vars_, auth=auth, user=user 1102 | ) 1103 | data = result["data"]["Character"]["media"]["nodes"] 1104 | if req == "ANI": 1105 | out = "ANIMES:\n\n" 1106 | out_ = [] 1107 | for ani in data: 1108 | k = ani["title"]["english"] or ani["title"]["romaji"] 1109 | kk = ani["type"] 1110 | if kk == "ANIME": 1111 | out_.append(f"• __{k}__\n") 1112 | else: 1113 | out = "MANGAS:\n\n" 1114 | out_ = [] 1115 | for ani in data: 1116 | k = ani["title"]["english"] or ani["title"]["romaji"] 1117 | kk = ani["type"] 1118 | if kk == "MANGA": 1119 | out_.append(f"• __{k}__\n") 1120 | total = len(out_) 1121 | for _ in range(15*page): 1122 | out_.pop(0) 1123 | out_ = "".join(out_[:15]) 1124 | return ( 1125 | [out+out_, total] if len(out_) != 0 else False 1126 | ), result["data"]["Character"]["image"]["large"] 1127 | 1128 | 1129 | async def get_additional_info( 1130 | idm, 1131 | ctgry, 1132 | req = None, 1133 | auth: bool = False, 1134 | user: int = None, 1135 | page: int = 0 1136 | ): 1137 | vars_ = {"id": int(idm)} 1138 | if req=='char': 1139 | vars_['page'] = page 1140 | result = await return_json_senpai( 1141 | ( 1142 | ( 1143 | DES_INFO_QUERY 1144 | if req == "desc" 1145 | else CHA_INFO_QUERY 1146 | if req == "char" 1147 | else REL_INFO_QUERY 1148 | ) 1149 | if ctgry == "ANI" 1150 | else DESC_INFO_QUERY 1151 | ), 1152 | vars_, 1153 | ) 1154 | data = ( 1155 | result["data"]["Media"] if ctgry == "ANI" 1156 | else result["data"]["Character"] 1157 | ) 1158 | pic = f"https://img.anili.st/media/{idm}" 1159 | if req == "desc": 1160 | synopsis = data.get("description") 1161 | if os.environ.get("PREFERRED_LANGUAGE"): 1162 | synopsis = tr.translate( 1163 | synopsis, lang_tgt=os.environ.get("PREFERRED_LANGUAGE") 1164 | ) 1165 | return (pic if ctgry == "ANI" else data["image"]["large"]), synopsis 1166 | elif req == "char": 1167 | charlist = [] 1168 | for char in data["characters"]['edges']: 1169 | charlist.append( 1170 | f"• `{char['node']['name']['full']}` ({char['role']})" 1171 | ) 1172 | chrctrs = ("\n").join(charlist) 1173 | charls = f"{chrctrs}" if len(charlist) != 0 else "" 1174 | return pic, charls, data["characters"]['pageInfo'] 1175 | else: 1176 | prqlsql = data.get("relations").get("edges") 1177 | ps = "" 1178 | for i in prqlsql: 1179 | ps += ( 1180 | f'• {i["node"]["title"]["romaji"]} ' 1181 | +f'({i["node"]["type"]}) `{i["relationType"]}`\n' 1182 | ) 1183 | return pic, ps 1184 | 1185 | 1186 | async def get_anime( 1187 | vars_, 1188 | auth: bool = False, 1189 | user: int = None, 1190 | cid: int = None 1191 | ): 1192 | result = await return_json_senpai( 1193 | ANIME_QUERY, vars_, auth=auth, user=user 1194 | ) 1195 | 1196 | error = result.get("errors") 1197 | if error: 1198 | error_sts = error[0].get("message") 1199 | return [f"[{error_sts}]"] 1200 | 1201 | data = result["data"]["Media"] 1202 | 1203 | # Data of all fields in returned json 1204 | # pylint: disable=possibly-unused-variable 1205 | idm = data.get("id") 1206 | idmal = data.get("idMal") 1207 | romaji = data["title"]["romaji"] 1208 | english = data["title"]["english"] 1209 | native = data["title"]["native"] 1210 | formats = data.get("format") 1211 | status = data.get("status") 1212 | episodes = data.get("episodes") 1213 | duration = data.get("duration") 1214 | country = data.get("countryOfOrigin") 1215 | c_flag = cflag(country) 1216 | source = data.get("source") 1217 | prqlsql = data.get("relations").get("edges") 1218 | adult = data.get("isAdult") 1219 | url = data.get("siteUrl") 1220 | trailer_link = "N/A" 1221 | gnrs = ", ".join(data['genres']) 1222 | score = data['averageScore'] 1223 | bl, cs = await uidata(cid) 1224 | text = await get_ui_text(cs) 1225 | psrc, ptype = text[0], text[1] 1226 | avscd = ( 1227 | f"\n{bl}**{text[2]}:** `{score}%` 🌟" if score is not None 1228 | else "" 1229 | ) 1230 | tags = [] 1231 | for i in data['tags']: 1232 | tags.append(i["name"]) 1233 | tags_ = ( 1234 | f"\n{bl}**{text[8]}:** `{', '.join(tags[:5])}`" if tags != [] 1235 | else "" 1236 | ) 1237 | bot = BOT_NAME.replace("@", "") 1238 | gnrs_ = "" 1239 | if len(gnrs)!=0: 1240 | gnrs_ = f"\n{bl}**{text[7]}:** `{gnrs}`" 1241 | isfav = data.get("isFavourite") 1242 | fav = ", in Favourites" if isfav is True else "" 1243 | user_data = "" 1244 | in_ls = False 1245 | in_ls_id = "" 1246 | if auth is True: 1247 | in_list = data.get("mediaListEntry") 1248 | if in_list is not None: 1249 | in_ls = True 1250 | in_ls_id = in_list['id'] 1251 | in_ls_stts = in_list['status'] 1252 | in_ls_score = ( 1253 | f" and scored {in_list['score']}" if in_list['score']!=0 1254 | else "" 1255 | ) 1256 | user_data = ( 1257 | f"\n{bl}**{text[4]}:** `{in_ls_stts}{fav}{in_ls_score}`" 1258 | ) 1259 | if data["title"]["english"] is not None: 1260 | name = f"""[{c_flag}]**{romaji}** 1261 | __{english}__ 1262 | {native}""" 1263 | else: 1264 | name = f"""[{c_flag}]**{romaji}** 1265 | {native}""" 1266 | prql, prql_id, sql, sql_id = "", "None", "", "None" 1267 | for i in prqlsql: 1268 | if i["relationType"] == "PREQUEL" and i["node"]["type"]=="ANIME": 1269 | pname = ( 1270 | i["node"]["title"]["english"] 1271 | if i["node"]["title"]["english"] is not None 1272 | else i["node"]["title"]["romaji"] 1273 | ) 1274 | prql += f"**{text[10]}:** `{pname}`\n" 1275 | prql_id = i["node"]["id"] 1276 | break 1277 | for i in prqlsql: 1278 | if i["relationType"] == "SEQUEL" and i["node"]["type"]=="ANIME": 1279 | sname = ( 1280 | i["node"]["title"]["english"] 1281 | if i["node"]["title"]["english"] is not None 1282 | else i["node"]["title"]["romaji"] 1283 | ) 1284 | sql += f"**{text[9]}:** `{sname}`\n" 1285 | sql_id = i["node"]["id"] 1286 | break 1287 | additional = f"{prql}{sql}" 1288 | surl = f"https://t.me/{bot}/?start=des_ANI_{idm}_desc" 1289 | dura = ( 1290 | f"\n{bl}**{text[3]}:** `{duration} min/ep`" 1291 | if duration is not None 1292 | else "" 1293 | ) 1294 | air_on = None 1295 | if data["nextAiringEpisode"]: 1296 | nextAir = data["nextAiringEpisode"]["timeUntilAiring"] 1297 | air_on = make_it_rw(nextAir*1000) 1298 | eps = data["nextAiringEpisode"]["episode"] 1299 | th = pos_no(str(eps)) 1300 | air_on += f" | {eps}{th} eps" 1301 | if air_on is None: 1302 | eps_ = f"` | `{episodes} eps" if episodes is not None else "" 1303 | status_air = f"{bl}**{text[6]}:** `{status}{eps_}`" 1304 | else: 1305 | status_air = ( 1306 | f"{bl}**{text[6]}:** `{status}`\n{bl}**{text[11]}:** `{air_on}`" 1307 | ) 1308 | if data["trailer"] and data["trailer"]["site"] == "youtube": 1309 | trailer_link = ( 1310 | f"Trailer" 1311 | ) 1312 | title_img = f"https://img.anili.st/media/{idm}" 1313 | try: 1314 | finals_ = ANIME_TEMPLATE.format(**locals()) 1315 | except KeyError as kys: 1316 | return [f"{kys}"] 1317 | return title_img, finals_, [ 1318 | idm, in_ls, in_ls_id, isfav, str(adult) 1319 | ], prql_id, sql_id 1320 | 1321 | 1322 | async def get_anilist( 1323 | qdb, page, auth: bool = False, user: int = None, cid: int = None 1324 | ): 1325 | vars_ = {"search": ANIME_DB[qdb], "page": page} 1326 | result = await return_json_senpai(PAGE_QUERY, vars_, auth=auth, user=user) 1327 | 1328 | if len(result['data']['Page']['media'])==0: 1329 | return [f"No results Found"] 1330 | 1331 | data = result["data"]["Page"]["media"][0] 1332 | # Data of all fields in returned json 1333 | # pylint: disable=possibly-unused-variable 1334 | idm = data.get("id") 1335 | bot = BOT_NAME.replace("@", "") 1336 | idmal = data.get("idMal") 1337 | romaji = data["title"]["romaji"] 1338 | english = data["title"]["english"] 1339 | native = data["title"]["native"] 1340 | formats = data.get("format") 1341 | status = data.get("status") 1342 | episodes = data.get("episodes") 1343 | duration = data.get("duration") 1344 | country = data.get("countryOfOrigin") 1345 | c_flag = cflag(country) 1346 | source = data.get("source") 1347 | prqlsql = data.get("relations").get("edges") 1348 | adult = data.get("isAdult") 1349 | trailer_link = "N/A" 1350 | isfav = data.get("isFavourite") 1351 | gnrs = ", ".join(data['genres']) 1352 | gnrs_ = "" 1353 | bl, cs = await uidata(cid) 1354 | text = await get_ui_text(cs) 1355 | psrc, ptype = text[0], text[1] 1356 | if len(gnrs)!=0: 1357 | gnrs_ = f"\n{bl}**{text[7]}:** `{gnrs}`" 1358 | fav = ", in Favourites" if isfav is True else "" 1359 | score = data['averageScore'] 1360 | avscd = ( 1361 | f"\n{bl}**{text[2]}:** `{score}%` 🌟" if score is not None else "" 1362 | ) 1363 | tags = [] 1364 | for i in data['tags']: 1365 | tags.append(i["name"]) 1366 | tags_ = ( 1367 | f"\n{bl}**{text[8]}:** `{', '.join(tags[:5])}`" if tags != [] else "" 1368 | ) 1369 | in_ls = False 1370 | in_ls_id = "" 1371 | user_data = "" 1372 | if auth is True: 1373 | in_list = data.get("mediaListEntry") 1374 | if in_list is not None: 1375 | in_ls = True 1376 | in_ls_id = in_list['id'] 1377 | in_ls_stts = in_list['status'] 1378 | in_ls_score = ( 1379 | f" and scored {in_list['score']}" if in_list['score']!=0 1380 | else "" 1381 | ) 1382 | user_data = ( 1383 | f"\n{bl}**{text[4]}:** `{in_ls_stts}{fav}{in_ls_score}`" 1384 | ) 1385 | if data["title"]["english"] is not None: 1386 | name = f"[{c_flag}]**{english}** (`{native}`)" 1387 | else: 1388 | name = f"[{c_flag}]**{romaji}** (`{native}`)" 1389 | prql, sql = "", "" 1390 | for i in prqlsql: 1391 | if i["relationType"] == "PREQUEL" and i["node"]["type"]=="ANIME": 1392 | pname = ( 1393 | i["node"]["title"]["english"] 1394 | if i["node"]["title"]["english"] is not None 1395 | else i["node"]["title"]["romaji"] 1396 | ) 1397 | prql += f"**{text[10]}:** `{pname}`\n" 1398 | break 1399 | for i in prqlsql: 1400 | if i["relationType"] == "SEQUEL" and i["node"]["type"]=="ANIME": 1401 | sname = ( 1402 | i["node"]["title"]["english"] 1403 | if i["node"]["title"]["english"] is not None 1404 | else i["node"]["title"]["romaji"] 1405 | ) 1406 | sql += f"**{text[9]}:** `{sname}`\n" 1407 | break 1408 | additional = f"{prql}{sql}" 1409 | additional.replace("-", "") 1410 | dura = ( 1411 | f"\n{bl}**{text[3]}:** `{duration} min/ep`" 1412 | if duration is not None 1413 | else "" 1414 | ) 1415 | air_on = None 1416 | if data["nextAiringEpisode"]: 1417 | nextAir = data["nextAiringEpisode"]["timeUntilAiring"] 1418 | air_on = make_it_rw(nextAir*1000) 1419 | eps = data["nextAiringEpisode"]["episode"] 1420 | th = pos_no(str(eps)) 1421 | air_on += f" | {eps}{th} eps" 1422 | if air_on is None: 1423 | eps_ = f"` | `{episodes} eps" if episodes is not None else "" 1424 | status_air = f"{bl}**{text[6]}:** `{status}{eps_}`" 1425 | else: 1426 | status_air = ( 1427 | f"{bl}**{text[6]}:** `{status}`\n{bl}**{text[11]}:** `{air_on}`" 1428 | ) 1429 | if data["trailer"] and data["trailer"]["site"] == "youtube": 1430 | trailer_link = ( 1431 | f"Trailer" 1432 | ) 1433 | url = data.get("siteUrl") 1434 | title_img = f"https://img.anili.st/media/{idm}" 1435 | surl = f"https://t.me/{bot}/?start=des_ANI_{idm}_desc" 1436 | hasNextPage = result["data"]["Page"]["pageInfo"]["hasNextPage"] 1437 | try: 1438 | finals_ = ANIME_TEMPLATE.format(**locals()) 1439 | except KeyError as kys: 1440 | return [f"{kys}"] 1441 | return title_img, [ 1442 | finals_, hasNextPage 1443 | ], [ 1444 | idm, in_ls, in_ls_id, isfav, str(adult) 1445 | ] 1446 | 1447 | 1448 | async def get_character(query, page, auth: bool = False, user: int = None): 1449 | var = {"search": CHAR_DB[query], "page": int(page)} 1450 | result = await return_json_senpai( 1451 | CHARACTER_QUERY, var, auth=auth, user=user 1452 | ) 1453 | if len(result['data']['Page']['characters'])==0: 1454 | return [f"No results Found"] 1455 | data = result["data"]["Page"]["characters"][0] 1456 | # Character Data 1457 | id_ = data["id"] 1458 | name = data["name"]["full"] 1459 | native = data["name"]["native"] 1460 | img = data["image"]["large"] 1461 | site_url = data["siteUrl"] 1462 | isfav = data.get("isFavourite") 1463 | va = [] 1464 | for i in data['media']['edges']: 1465 | for ii in i['voiceActors']: 1466 | if f"[{ii['name']['full']}]({ii['siteUrl']})" not in va: 1467 | va.append(f"[{ii['name']['full']}]({ii['siteUrl']})") 1468 | lva = None 1469 | if len(va)>1: 1470 | lva = va.pop() 1471 | sva = ( 1472 | f"\n**Voice Actors:** {', '.join(va)}" 1473 | +f"{' and '+lva if lva is not None else ''}\n" if va!= [] 1474 | else "" 1475 | ) 1476 | cap_text = f""" 1477 | __{native}__ 1478 | (`{name}`) 1479 | **ID:** {id_} 1480 | {sva} 1481 | Visit Website""" 1482 | hasNextPage = result["data"]["Page"]["pageInfo"]["hasNextPage"] 1483 | return img, [cap_text, hasNextPage], [id_, isfav] 1484 | 1485 | 1486 | async def browse_(qry: str): 1487 | s, y = season_() 1488 | sort = "POPULARITY_DESC" 1489 | if qry == 'upcoming': 1490 | s, y = season_(True) 1491 | if qry == 'trending': 1492 | sort = "TRENDING_DESC" 1493 | vars_ = {"s": s, "y": y, "sort": sort} 1494 | result = await return_json_senpai(BROWSE_QUERY, vars_) 1495 | data = result["data"]["Page"]["media"] 1496 | ls = [] 1497 | for i in data: 1498 | if i['format'] in ['TV', 'MOVIE', 'ONA']: 1499 | ls.append('• `' + i['title']['romaji'] + '`') 1500 | out = f'{qry.capitalize()} animes in {s} {y}:\n\n' 1501 | return out + "\n".join(ls[:20]) 1502 | 1503 | 1504 | async def get_manga( 1505 | qdb, page, auth: bool = False, user: int = None, cid: int = None 1506 | ): 1507 | vars_ = {"search": MANGA_DB[qdb], "asHtml": True, "page": page} 1508 | result = await return_json_senpai( 1509 | MANGA_QUERY, vars_, auth=auth, user=user 1510 | ) 1511 | if len(result['data']['Page']['media'])==0: 1512 | return [f"No results Found"] 1513 | data = result["data"]["Page"]["media"][0] 1514 | 1515 | # Data of all fields in returned json 1516 | # pylint: disable=possibly-unused-variable 1517 | idm = data.get("id") 1518 | romaji = data["title"]["romaji"] 1519 | english = data["title"]["english"] 1520 | native = data["title"]["native"] 1521 | status = data.get("status") 1522 | synopsis = data.get("description") 1523 | description = synopsis[:500] 1524 | description_s = "" 1525 | if len(synopsis) > 500: 1526 | description += f"..." 1527 | description_s = ( 1528 | f"[Click for more info](https://t.me/{BOT_NAME.replace('@', '')}" 1529 | +f"/?start=des_ANI_{idm}_desc)" 1530 | ) 1531 | volumes = data.get("volumes") 1532 | chapters = data.get("chapters") 1533 | score = data.get("averageScore") 1534 | url = data.get("siteUrl") 1535 | format_ = data.get("format") 1536 | country = data.get("countryOfOrigin") 1537 | source = data.get("source") 1538 | c_flag = cflag(country) 1539 | isfav = data.get("isFavourite") 1540 | adult = data.get("isAdult") 1541 | fav = ", in Favourites" if isfav is True else "" 1542 | in_ls = False 1543 | in_ls_id = "" 1544 | bl, cs = await uidata(cid) 1545 | text = await get_ui_text(cs) 1546 | user_data = "" 1547 | if auth is True: 1548 | in_list = data.get("mediaListEntry") 1549 | if in_list is not None: 1550 | in_ls = True 1551 | in_ls_id = in_list['id'] 1552 | in_ls_stts = in_list['status'] 1553 | in_ls_score = ( 1554 | f" and scored {in_list['score']}" if in_list['score']!=0 1555 | else "" 1556 | ) 1557 | user_data = ( 1558 | f"{bl}**{text[4]}:** `{in_ls_stts}{fav}{in_ls_score}`\n" 1559 | ) 1560 | name = f"""[{c_flag}]**{romaji}** 1561 | __{english}__ 1562 | {native}""" 1563 | if english is None: 1564 | name = f"""[{c_flag}]**{romaji}** 1565 | {native}""" 1566 | finals_ = f"{name}\n\n" 1567 | finals_ += f"{bl}**ID:** `{idm}`\n" 1568 | finals_ += f"{bl}**{text[6]}:** `{status}`\n" 1569 | finals_ += f"{bl}**{text[13]}:** `{volumes}`\n" 1570 | finals_ += f"{bl}**{text[14]}:** `{chapters}`\n" 1571 | finals_ += f"{bl}**{text[2]}:** `{score}`\n" 1572 | finals_ += f"{bl}**{text[1]}:** `{format_}`\n" 1573 | finals_ += f"{bl}**{text[0]}:** `{source}`\n" 1574 | finals_ += user_data 1575 | if os.environ.get("PREFERRED_LANGUAGE"): 1576 | description = tr.translate( 1577 | description, lang_tgt=os.environ.get("PREFERRED_LANGUAGE") 1578 | ) 1579 | findesc = '' if description == '' else f'`{description}`' 1580 | finals_ += f"\n**{text[12]}**: {findesc}\n\n{description_s}" 1581 | pic = f"https://img.anili.st/media/{idm}" 1582 | return pic, [ 1583 | finals_, result["data"]["Page"]["pageInfo"]["hasNextPage"], url 1584 | ], [ 1585 | idm, in_ls, in_ls_id, isfav, str(adult) 1586 | ] 1587 | 1588 | 1589 | async def get_airing(qry, ind: int, auth: bool = False, user: int = None): 1590 | vars_ = {"search": AIRING_DB[qry], "page": int(ind)} 1591 | result = await return_json_senpai(AIR_QUERY, vars_, auth=auth, user=user) 1592 | error = result.get("errors") 1593 | if error: 1594 | error_sts = error[0].get("message") 1595 | return [f"{error_sts}"] 1596 | try: 1597 | data = result["data"]["Page"]["media"][0] 1598 | except IndexError: 1599 | return ["No results Found"] 1600 | # Airing Details 1601 | mid = data.get("id") 1602 | romaji = data["title"]["romaji"] 1603 | english = data["title"]["english"] 1604 | status = data.get("status") 1605 | country = data.get("countryOfOrigin") 1606 | c_flag = cflag(country) 1607 | coverImg = f"https://img.anili.st/media/{mid}" 1608 | isfav = data.get("isFavourite") 1609 | adult = data.get("isAdult") 1610 | in_ls = False 1611 | in_ls_id = "" 1612 | user_data = "" 1613 | if auth is True: 1614 | in_list = data.get("mediaListEntry") 1615 | if in_list is not None: 1616 | in_ls = True 1617 | in_ls_id = in_list['id'] 1618 | in_ls_stts = in_list['status'] 1619 | user_data = f"**USER DATA:** `{in_ls_stts}`\n" 1620 | air_on = None 1621 | if data["nextAiringEpisode"]: 1622 | nextAir = data["nextAiringEpisode"]["timeUntilAiring"] 1623 | episode = data["nextAiringEpisode"]["episode"] 1624 | th = pos_no(episode) 1625 | air_on = make_it_rw(nextAir*1000) 1626 | title_ = english or romaji 1627 | out = f"[{c_flag}] **{title_}**" 1628 | out += f"\n\n**ID:** `{mid}`" 1629 | out += f"\n**Status:** `{status}`\n" 1630 | out += user_data 1631 | if air_on: 1632 | out += f"Airing Episode `{episode}{th}` in `{air_on}`" 1633 | site = data["siteUrl"] 1634 | return [ 1635 | coverImg, out 1636 | ], [ 1637 | site, result["data"]["Page"]["pageInfo"]["hasNextPage"] 1638 | ], [ 1639 | mid, in_ls, in_ls_id, isfav, str(adult) 1640 | ] 1641 | 1642 | 1643 | async def toggle_favourites(id_: int, media: str, user: int): 1644 | vars_ = {"id": int(id_)} 1645 | query = ( 1646 | ANIME_MUTATION if media=="ANIME" or media=="AIRING" 1647 | else CHAR_MUTATION if media=="CHARACTER" 1648 | else MANGA_MUTATION if media=="MANGA" 1649 | else STUDIO_MUTATION 1650 | ) 1651 | k = await return_json_senpai( 1652 | query=query, vars_=vars_, auth=True, user=int(user) 1653 | ) 1654 | try: 1655 | kek = k['data']['ToggleFavourite'] 1656 | return "ok" 1657 | except KeyError: 1658 | return "failed" 1659 | 1660 | 1661 | async def get_user(vars_, req, user, display_user = None): 1662 | query = USER_QRY if "user" in req else VIEWER_QRY 1663 | k = await return_json_senpai( 1664 | query=query, 1665 | vars_=vars_, 1666 | auth=False if "user" in req else True, 1667 | user=int(user) 1668 | ) 1669 | error = k.get("errors") 1670 | if error: 1671 | error_sts = error[0].get("message") 1672 | return [f"{error_sts}"] 1673 | 1674 | data = k['data']['User' if "user" in req else 'Viewer'] 1675 | anime = data['statistics']['anime'] 1676 | manga = data['statistics']['manga'] 1677 | stats = f""" 1678 | **Anime Stats**: 1679 | 1680 | Total Anime Watched: `{anime['count']}` 1681 | Total Episode Watched: `{anime['episodesWatched']}` 1682 | Total Time Spent: `{anime['minutesWatched']}` 1683 | Average Score: `{anime['meanScore']}` 1684 | 1685 | **Manga Stats**: 1686 | 1687 | Total Manga Read: `{manga['count']}` 1688 | Total Chapters Read: `{manga['chaptersRead']}` 1689 | Total Volumes Read: `{manga['volumesRead']}` 1690 | Average Score: `{manga['meanScore']}` 1691 | """ 1692 | btn = [] 1693 | if not "user" in req: 1694 | btn.append([ 1695 | InlineKeyboardButton( 1696 | "Favourites", 1697 | callback_data=f"myfavs_{data['id']}_yes_{display_user}" 1698 | ), 1699 | InlineKeyboardButton( 1700 | "Activity", 1701 | callback_data=f"myacc_{data['id']}_{display_user}" 1702 | ) 1703 | ]) 1704 | btn.append([ 1705 | InlineKeyboardButton( 1706 | "Profile", url=str(data['siteUrl']) 1707 | ) 1708 | ]) 1709 | return [ 1710 | f'https://img.anili.st/user/{data["id"]}?a={time.time()}', 1711 | stats, 1712 | InlineKeyboardMarkup(btn) 1713 | ] 1714 | 1715 | 1716 | async def update_anilist(id_, req, user, eid: int = None, status: str = None): 1717 | vars_ = {"id": int(id_), "status": status} 1718 | if req=="lsus": 1719 | vars_ = {"id": int(eid), "status": status} 1720 | if req=="dlt": 1721 | vars_ = {"id": int(eid)} 1722 | k = await return_json_senpai( 1723 | query=( 1724 | ANILIST_MUTATION if req=="lsas" 1725 | else ANILIST_MUTATION_UP if req=="lsus" 1726 | else ANILIST_MUTATION_DEL 1727 | ), 1728 | vars_=vars_, 1729 | auth=True, 1730 | user=int(user) 1731 | ) 1732 | try: 1733 | ( 1734 | k['data']['SaveMediaListEntry'] if req=="lsas" 1735 | else k['data']['UpdateMediaListEntries'] if req=="lsus" 1736 | else k["data"]['DeleteMediaListEntry'] 1737 | ) 1738 | return "ok" 1739 | except KeyError: 1740 | return "failed" 1741 | 1742 | 1743 | async def check_if_adult(id_): 1744 | vars_ = {"id": int(id_)} 1745 | k = await return_json_senpai(query=ISADULT, vars_=vars_, auth=False) 1746 | if str(k['data']['Media']['isAdult'])=="True": 1747 | return "True" 1748 | else: 1749 | return "False" 1750 | 1751 | #### END #### 1752 | 1753 | #### Jikanpy part #### 1754 | 1755 | async def get_scheduled(x: int = 9): 1756 | base_url = "https://api.jikan.moe/v4/schedules/" 1757 | day = str(day_(x if x!=9 else datetime.now().weekday())).lower() 1758 | out = f"Scheduled animes for {day.capitalize()}\n\n" 1759 | data = requests.get(base_url+day).json() 1760 | sched_ls = data["data"] 1761 | for i in sched_ls: 1762 | try: 1763 | title = i['titles'][0]['title'] 1764 | except IndexError: 1765 | title = i['title'] 1766 | out += f"• `{title}`\n" 1767 | return out, x if x!=9 else datetime.now().weekday() 1768 | 1769 | #### END #### 1770 | 1771 | #### chiaki part #### 1772 | 1773 | def get_wols(x: str): 1774 | data = requests.get( 1775 | f"https://chiaki.vercel.app/search2?query={x}" 1776 | ).json() 1777 | ls = [] 1778 | for i in data: 1779 | sls = [data[i], i] 1780 | ls.append(sls) 1781 | return ls 1782 | 1783 | 1784 | def get_wo(x: int, page: int): 1785 | data = requests.get( 1786 | f"https://chiaki.vercel.app/get2?group_id={x}" 1787 | ).json() 1788 | msg = "Watch order for the given query is:\n\n" 1789 | out = [] 1790 | for i in data: 1791 | out.append(f"{i['index']}. `{i['name']}`\n") 1792 | total = len(out) 1793 | for _ in range(50*page): 1794 | out.pop(0) 1795 | out_ = "".join(out[:50]) 1796 | return msg+out_, total 1797 | 1798 | #### END #### 1799 | 1800 | ##### Anime Fillers Part ##### 1801 | 1802 | def search_filler(query): 1803 | html = requests.get("https://www.animefillerlist.com/shows").text 1804 | soup = BeautifulSoup(html, "html.parser") 1805 | div = soup.findAll("div", attrs={"class": "Group"}) 1806 | index = {} 1807 | for i in div: 1808 | li = i.findAll("li") 1809 | for jk in li: 1810 | yum = jk.a["href"].split("/")[-1] 1811 | cum = jk.text 1812 | index[cum] = yum 1813 | ret = {} 1814 | keys = list(index.keys()) 1815 | for i in range(len(keys)): 1816 | if query.lower() in keys[i].lower(): 1817 | ret[keys[i]] = index[keys[i]] 1818 | return ret 1819 | 1820 | 1821 | def parse_filler(filler_id): 1822 | url = "https://www.animefillerlist.com/shows/" + filler_id 1823 | html = requests.get(url).text 1824 | soup = BeautifulSoup(html, "html.parser") 1825 | div = soup.find("div", attrs={"id": "Condensed"}) 1826 | all_ep = div.find_all("span", attrs={"class": "Episodes"}) 1827 | if len(all_ep) == 1: 1828 | ttl_ep = all_ep[0].findAll("a") 1829 | total_ep = [] 1830 | mix_ep = None 1831 | filler_ep = None 1832 | ac_ep = None 1833 | for tol in ttl_ep: 1834 | total_ep.append(tol.text) 1835 | dict_ = { 1836 | "filler_id": filler_id, 1837 | "total_ep": ", ".join(total_ep), 1838 | "mixed_ep": mix_ep, 1839 | "filler_ep": filler_ep, 1840 | "ac_ep": ac_ep 1841 | } 1842 | return dict_ 1843 | if len(all_ep) == 2: 1844 | ttl_ep = all_ep[0].findAll("a") 1845 | fl_ep = all_ep[1].findAll("a") 1846 | total_ep = [] 1847 | mix_ep = None 1848 | ac_ep = None 1849 | filler_ep = [] 1850 | for tol in ttl_ep: 1851 | total_ep.append(tol.text) 1852 | for fol in fl_ep: 1853 | filler_ep.append(fol.text) 1854 | dict_ = { 1855 | "filler_id": filler_id, 1856 | "total_ep": ", ".join(total_ep), 1857 | "mixed_ep": mix_ep, 1858 | "filler_ep": ", ".join(filler_ep), 1859 | "ac_ep": ac_ep 1860 | } 1861 | return dict_ 1862 | if len(all_ep) == 3: 1863 | ttl_ep = all_ep[0].findAll("a") 1864 | mxl_ep = all_ep[1].findAll("a") 1865 | fl_ep = all_ep[2].findAll("a") 1866 | total_ep = [] 1867 | mix_ep = [] 1868 | filler_ep = [] 1869 | ac_ep = None 1870 | for tol in ttl_ep: 1871 | total_ep.append(tol.text) 1872 | for fol in fl_ep: 1873 | filler_ep.append(fol.text) 1874 | for mol in mxl_ep: 1875 | mix_ep.append(mol.text) 1876 | dict_ = { 1877 | "filler_id": filler_id, 1878 | "total_ep": ", ".join(total_ep), 1879 | "mixed_ep": ", ".join(mix_ep), 1880 | "filler_ep": ", ".join(filler_ep), 1881 | "ac_ep": ac_ep 1882 | } 1883 | return dict_ 1884 | if len(all_ep) == 4: 1885 | ttl_ep = all_ep[0].findAll("a") 1886 | mxl_ep = all_ep[1].findAll("a") 1887 | fl_ep = all_ep[2].findAll("a") 1888 | al_ep = all_ep[3].findAll("a") 1889 | total_ep = [] 1890 | mix_ep = [] 1891 | filler_ep = [] 1892 | ac_ep = [] 1893 | for tol in ttl_ep: 1894 | total_ep.append(tol.text) 1895 | for fol in fl_ep: 1896 | filler_ep.append(fol.text) 1897 | for mol in mxl_ep: 1898 | mix_ep.append(mol.text) 1899 | for aol in al_ep: 1900 | ac_ep.append(aol.text) 1901 | dict_ = { 1902 | "filler_id": filler_id, 1903 | "total_ep": ", ".join(total_ep), 1904 | "mixed_ep": ", ".join(mix_ep), 1905 | "filler_ep": ", ".join(filler_ep), 1906 | "ac_ep": ", ".join(ac_ep), 1907 | } 1908 | return dict_ 1909 | 1910 | 1911 | ##### END ##### --------------------------------------------------------------------------------