├── web └── __init__.py ├── bot ├── helper │ ├── __init__.py │ ├── ext_utils │ │ ├── __init__.py │ │ ├── exceptions.py │ │ ├── bulk_links.py │ │ ├── links_utils.py │ │ ├── telegraph_helper.py │ │ └── shortener_utils.py │ ├── listeners │ │ ├── __init__.py │ │ ├── direct_listener.py │ │ ├── jdownloader_listener.py │ │ └── nzb_listener.py │ ├── mirror_leech_utils │ │ ├── __init__.py │ │ ├── download_utils │ │ │ ├── __init__.py │ │ │ ├── mega_download.py │ │ │ ├── gd_download.py │ │ │ ├── direct_downloader.py │ │ │ ├── aria2_download.py │ │ │ ├── direct_link_generator_license.md │ │ │ └── rclone_download.py │ │ ├── gdrive_utils │ │ │ ├── __init__.py │ │ │ ├── delete.py │ │ │ └── count.py │ │ ├── rclone_utils │ │ │ ├── __init__.py │ │ │ └── serve.py │ │ ├── status_utils │ │ │ ├── __init__.py │ │ │ ├── rclone_status.py │ │ │ ├── yt_dlp_status.py │ │ │ ├── queue_status.py │ │ │ ├── uphoster_status.py │ │ │ ├── telegram_status.py │ │ │ ├── direct_status.py │ │ │ ├── mega_status.py │ │ │ ├── yt_status.py │ │ │ ├── gdrive_status.py │ │ │ ├── ffmpeg_status.py │ │ │ ├── sevenz_status.py │ │ │ ├── metadata_status.py │ │ │ ├── qbit_status.py │ │ │ ├── jdownloader_status.py │ │ │ ├── aria2_status.py │ │ │ └── nzb_status.py │ │ ├── uphoster_utils │ │ │ └── multi_upload.py │ │ └── youtube_utils │ │ │ └── youtube_helper.py │ ├── telegram_helper │ │ ├── __init__.py │ │ ├── button_build.py │ │ ├── filters.py │ │ ├── bot_commands.py │ │ └── tg_utils.py │ └── languages │ │ ├── en.py │ │ ├── bn.py │ │ └── __init__.py ├── version.py ├── modules │ ├── gd_delete.py │ ├── shell.py │ ├── gd_count.py │ ├── speedtest.py │ ├── help.py │ ├── __init__.py │ ├── force_start.py │ ├── exec.py │ ├── gd_search.py │ ├── chat_permission.py │ ├── nzb_search.py │ └── mediainfo.py ├── __init__.py ├── core │ ├── jdownloader_booter.py │ ├── tg_client.py │ └── torrent_manager.py └── __main__.py ├── start.sh ├── captain-definition ├── sabnzbdapi ├── __init__.py ├── exception.py ├── bound_methods.py └── requests.py ├── docker-compose.yml ├── gen_scripts ├── requirements-cli.txt ├── gen_pyro_session.py ├── generate_drive_token.py ├── driveid.py └── add_to_team_drive.py ├── .gitignore ├── Dockerfile ├── cron_boot.py ├── requirements.txt ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ └── ruff_format.yml ├── setpkgs.sh ├── myjd ├── const.py └── __init__.py ├── qBittorrent └── config │ └── qBittorrent.conf ├── plugins └── speedtest_plugin.py ├── update.py └── config_sample.py /web/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/listeners/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/gdrive_utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/rclone_utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /start.sh: -------------------------------------------------------------------------------- 1 | source .venv/bin/activate && python3 update.py && python3 -m bot 2 | -------------------------------------------------------------------------------- /captain-definition: -------------------------------------------------------------------------------- 1 | { 2 | "schemaVersion": 2, 3 | "dockerfilePath": "./Dockerfile" 4 | } 5 | -------------------------------------------------------------------------------- /sabnzbdapi/__init__.py: -------------------------------------------------------------------------------- 1 | from sabnzbdapi.requests import SabnzbdClient 2 | 3 | __all__ = ["SabnzbdClient"] 4 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | app: 3 | build: . 4 | command: bash start.sh 5 | restart: on-failure 6 | network_mode: "host" 7 | -------------------------------------------------------------------------------- /gen_scripts/requirements-cli.txt: -------------------------------------------------------------------------------- 1 | oauth2client 2 | google-api-python-client 3 | progress 4 | progressbar2 5 | httplib2shim 6 | google_auth_oauthlib 7 | pyrotgfork 8 | tgcrypto 9 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/mega_download.py: -------------------------------------------------------------------------------- 1 | from ...listeners.mega_listener import MegaAppListener 2 | 3 | 4 | async def add_mega_download(listener, path): 5 | mega_listener = MegaAppListener(listener) 6 | await mega_listener.download(path) 7 | -------------------------------------------------------------------------------- /bot/helper/languages/en.py: -------------------------------------------------------------------------------- 1 | START_MSG = """ 2 | This bot can mirror from links|tgfiles|torrents|nzb|rclone-cloud to any rclone cloud, Google Drive or to telegram. 3 | Type /{cmd} to get a list of available commands 4 | """ 5 | START_BUTTON1 = "Git Repo" 6 | START_BUTTON2 = "Updates" 7 | -------------------------------------------------------------------------------- /bot/helper/languages/bn.py: -------------------------------------------------------------------------------- 1 | START_MSG = """ 2 | এই বট লিংক | টেলিগ্রাম ফাইল | টরেন্ট | NZB | Rclone-ক্লাউড থেকে যেকোনো Rclone ক্লাউড, গুগল ড্রাইভ বা টেলিগ্রামে মিরর করতে পারে। 3 | উপলব্ধ কমান্ডের তালিকা পেতে /{cmd} লিখুন। 4 | """ 5 | START_BUTTON1 = "গিট রিপো" 6 | START_BUTTON2 = "আপডেট" 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv/* 2 | config.py 3 | token.pickle 4 | rclone.conf 5 | .netrc 6 | log.txt 7 | cfg.zip 8 | accounts/* 9 | thumbnails/* 10 | rclone/* 11 | tokens/* 12 | cookies/* 13 | mediainfo/* 14 | sabnzbd/* 15 | list_drives.txt 16 | shortener.txt 17 | cookies.txt 18 | downloads/* 19 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mysterysd/wzmlx:v3 2 | 3 | WORKDIR /usr/src/app 4 | 5 | RUN chmod 777 /usr/src/app 6 | RUN uv venv --system-site-packages 7 | 8 | COPY requirements.txt . 9 | RUN uv pip install --no-cache-dir -r requirements.txt 10 | 11 | COPY . . 12 | 13 | CMD ["bash", "start.sh"] 14 | -------------------------------------------------------------------------------- /bot/version.py: -------------------------------------------------------------------------------- 1 | def get_version() -> str: 2 | """ 3 | Returns the version details. Do not Interfere with this ! 4 | 5 | :return: The version details in the format 'vMAJOR.MINOR.PATCH-STATE' 6 | :rtype: str 7 | """ 8 | MAJOR = "3" 9 | MINOR = "1" 10 | PATCH = "0" 11 | STATE = "x" 12 | return f"v{MAJOR}.{MINOR}.{PATCH}-{STATE}" 13 | 14 | 15 | if __name__ == "__main__": 16 | print(get_version()) 17 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/exceptions.py: -------------------------------------------------------------------------------- 1 | class DirectDownloadLinkException(Exception): 2 | """Not method found for extracting direct download link from the http link""" 3 | 4 | pass 5 | 6 | 7 | class NotSupportedExtractionArchive(Exception): 8 | """The archive format use is trying to extract is not supported""" 9 | 10 | pass 11 | 12 | 13 | class RssShutdownException(Exception): 14 | """This exception should be raised when shutdown is called to stop the montior""" 15 | 16 | pass 17 | 18 | 19 | class TgLinkException(Exception): 20 | """No Access granted for this chat""" 21 | 22 | pass 23 | -------------------------------------------------------------------------------- /cron_boot.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | from requests import get as rget 3 | from os import getenv 4 | from logging import error as logerror 5 | 6 | BASE_URL = getenv("BASE_URL", None) 7 | try: 8 | if len(BASE_URL) == 0: 9 | raise TypeError 10 | BASE_URL = BASE_URL.rstrip("/") 11 | except TypeError: 12 | BASE_URL = None 13 | 14 | PORT = getenv("PORT", None) 15 | if PORT is not None and BASE_URL is not None: 16 | while True: 17 | try: 18 | rget(BASE_URL).status_code 19 | sleep(600) 20 | except Exception as e: 21 | logerror(f"cron_boot.py: {e}") 22 | sleep(2) 23 | continue 24 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | asyncio 2 | aiofiles 3 | aiohttp 4 | aioshutil 5 | anytree 6 | apscheduler 7 | aioaria2 8 | aioqbt 9 | imdbinfo 10 | cloudscraper 11 | dnspython 12 | fastapi 13 | feedparser 14 | google-api-python-client 15 | google-auth-httplib2 16 | google-auth-oauthlib 17 | gunicorn 18 | httpx 19 | langcodes 20 | language-data 21 | jinja2 22 | lxml 23 | motor 24 | natsort 25 | par2cmdline-turbo 26 | pillow 27 | psutil 28 | pycountry 29 | pymongo 30 | pyrotgfork 31 | python-magic 32 | pytz 33 | qbittorrent-api 34 | requests 35 | speedtest-cli 36 | telegraph 37 | tenacity 38 | tgcrypto 39 | urllib3 40 | uvicorn 41 | uvloop==0.21.0 42 | xattr 43 | yt-dlp[default,curl-cffi] 44 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Used this Command with this link or file. 16 | 2. Before/after/while Upload .. 17 | 3. Check logs 18 | 19 | **Expected behavior** 20 | A clear and concise description of what you expected to happen. 21 | 22 | **Screenshots** 23 | If applicable, add screenshots to help explain your problem. 24 | 25 | **Additional context** 26 | Add any other context about the problem here. 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /sabnzbdapi/exception.py: -------------------------------------------------------------------------------- 1 | from httpx import RequestError, DecodingError 2 | from json import JSONDecodeError 3 | 4 | 5 | class APIError(Exception): 6 | """Base error for all exceptions from this Client.""" 7 | 8 | 9 | class APIConnectionError(RequestError, APIError): 10 | """Base class for all communications errors including HTTP errors.""" 11 | 12 | 13 | class APIResponseError(APIError, JSONDecodeError): 14 | """Base class for all errors from the API response.""" 15 | 16 | 17 | class LoginFailed(DecodingError, APIConnectionError, JSONDecodeError): 18 | """This can technically be raised with any request since log in may be attempted for 19 | any request and could fail.""" 20 | 21 | 22 | class NotLoggedIn(APIConnectionError): 23 | """Raised when login is not successful.""" 24 | -------------------------------------------------------------------------------- /bot/modules/gd_delete.py: -------------------------------------------------------------------------------- 1 | from .. import LOGGER 2 | from ..helper.ext_utils.bot_utils import sync_to_async, new_task 3 | from ..helper.ext_utils.links_utils import is_gdrive_link 4 | from ..helper.mirror_leech_utils.gdrive_utils.delete import GoogleDriveDelete 5 | from ..helper.telegram_helper.message_utils import auto_delete_message, send_message 6 | 7 | 8 | @new_task 9 | async def delete_file(_, message): 10 | args = message.text.split() 11 | user = message.from_user or message.sender_chat 12 | if len(args) > 1: 13 | link = args[1] 14 | elif reply_to := message.reply_to_message: 15 | link = reply_to.text.split(maxsplit=1)[0].strip() 16 | else: 17 | link = "" 18 | if is_gdrive_link(link): 19 | LOGGER.info(link) 20 | msg = await sync_to_async(GoogleDriveDelete().deletefile, link, user.id) 21 | else: 22 | msg = ( 23 | "Send Gdrive link along with command or by replying to the link by command" 24 | ) 25 | reply_message = await send_message(message, msg) 26 | await auto_delete_message(message, reply_message) 27 | -------------------------------------------------------------------------------- /setpkgs.sh: -------------------------------------------------------------------------------- 1 | ARIA2C=$1 2 | SABNZBDPLUS=$2 3 | 4 | tracker_list=$(curl -Ns https://ngosang.github.io/trackerslist/trackers_all_http.txt | awk '$0' | tr '\n\n' ',') 5 | $ARIA2C --allow-overwrite=true --auto-file-renaming=true --bt-enable-lpd=true --bt-detach-seed-only=true \ 6 | --bt-remove-unselected-file=true --bt-tracker="[$tracker_list]" --bt-max-peers=0 --enable-rpc=true \ 7 | --rpc-max-request-size=1024M --max-connection-per-server=10 --max-concurrent-downloads=1000 --split=10 \ 8 | --seed-ratio=0 --check-integrity=true --continue=true --daemon=true --disk-cache=40M --force-save=true \ 9 | --min-split-size=10M --follow-torrent=mem --check-certificate=false --optimize-concurrent-downloads=true \ 10 | --http-accept-gzip=true --max-file-not-found=0 --max-tries=20 --peer-id-prefix=-qB4520- --reuse-uri=true \ 11 | --content-disposition-default-utf8=true --user-agent=Wget/1.12 --peer-agent=qBittorrent/4.5.2 --quiet=true \ 12 | --summary-interval=0 --max-upload-limit=1K 13 | cpulimit -l 20 -- $SABNZBDPLUS -f sabnzbd/SABnzbd.ini -s :::8070 -b 0 -d -c -l 0 --console 14 | -------------------------------------------------------------------------------- /bot/modules/shell.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | from .. import LOGGER 4 | from ..helper.ext_utils.bot_utils import cmd_exec, new_task 5 | from ..helper.telegram_helper.message_utils import send_message, send_file 6 | 7 | 8 | @new_task 9 | async def run_shell(_, message): 10 | cmd = message.text.split(maxsplit=1) 11 | if len(cmd) == 1: 12 | await send_message(message, "No command to execute was given.") 13 | return 14 | cmd = cmd[1] 15 | stdout, stderr, _ = await cmd_exec(cmd, shell=True) 16 | reply = "" 17 | if len(stdout) != 0: 18 | reply += f"*Stdout*\n{stdout}\n" 19 | LOGGER.info(f"Shell - {cmd} - {stdout}") 20 | if len(stderr) != 0: 21 | reply += f"*Stderr*\n{stderr}" 22 | LOGGER.error(f"Shell - {cmd} - {stderr}") 23 | if len(reply) > 3000: 24 | with BytesIO(str.encode(reply)) as out_file: 25 | out_file.name = "shell_output.txt" 26 | await send_file(message, out_file) 27 | elif len(reply) != 0: 28 | await send_message(message, reply) 29 | else: 30 | await send_message(message, "No Reply") 31 | -------------------------------------------------------------------------------- /sabnzbdapi/bound_methods.py: -------------------------------------------------------------------------------- 1 | class SubFunctions: 2 | async def check_login(self): 3 | res = await self.get_config("servers") 4 | return res["config"] or False 5 | 6 | async def add_server(self, server: dict): 7 | """server = { 8 | "name": "main", 9 | "displayname": "main", 10 | "host": "", 11 | "port": 5126, 12 | "timeout": 60, 13 | "username": "", 14 | "password": "", 15 | "connections": 8, 16 | "ssl": 1, 17 | "ssl_verify": 2, 18 | "ssl_ciphers": "", 19 | "enable": 1, 20 | "required": 0, 21 | "optional": 0, 22 | "retention": 0, 23 | "send_group": 0, 24 | "priority": 0, 25 | }""" 26 | return await self.set_special_config("servers", server) 27 | 28 | async def create_category(self, name: str, dir: str): 29 | return await self.set_special_config("categories", {"name": name, "dir": dir}) 30 | 31 | async def delete_category(self, name: str): 32 | return await self.delete_config("categories", name) 33 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/rclone_status.py: -------------------------------------------------------------------------------- 1 | from ...ext_utils.status_utils import MirrorStatus, EngineStatus 2 | 3 | 4 | class RcloneStatus: 5 | def __init__(self, listener, obj, gid, status): 6 | self._obj = obj 7 | self._gid = gid 8 | self._status = status 9 | self.listener = listener 10 | self.engine = EngineStatus().STATUS_RCLONE 11 | 12 | def gid(self): 13 | return self._gid 14 | 15 | def progress(self): 16 | return self._obj.percentage 17 | 18 | def speed(self): 19 | return self._obj.speed 20 | 21 | def name(self): 22 | return self.listener.name 23 | 24 | def size(self): 25 | return self._obj.size 26 | 27 | def eta(self): 28 | return self._obj.eta 29 | 30 | def status(self): 31 | if self._status == "dl": 32 | return MirrorStatus.STATUS_DOWNLOAD 33 | elif self._status == "up": 34 | return MirrorStatus.STATUS_UPLOAD 35 | else: 36 | return MirrorStatus.STATUS_CLONE 37 | 38 | def processed_bytes(self): 39 | return self._obj.transferred_size 40 | 41 | def task(self): 42 | return self._obj 43 | -------------------------------------------------------------------------------- /.github/workflows/ruff_format.yml: -------------------------------------------------------------------------------- 1 | name: Format Code via Ruff 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | branch: 7 | description: 'Branch to format' 8 | required: true 9 | default: 'wzv3' 10 | type: string 11 | 12 | jobs: 13 | code-format: 14 | permissions: 15 | contents: write 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - name: Checkout Repo 20 | uses: actions/checkout@v4 21 | with: 22 | ref: ${{ github.event.inputs.branch }} 23 | 24 | - name: Set up Python3 25 | uses: actions/setup-python@v4 26 | 27 | - name: Install ruff Linter 28 | run: pip install ruff 29 | 30 | - name: Run ruff to format code 31 | run: | 32 | ruff check . --exit-zero 33 | ruff format . 34 | git add -u 35 | 36 | - name: Commit and Push Changes 37 | run: | 38 | git config --global user.name "github-actions[bot]" 39 | git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" 40 | if git diff-index --quiet HEAD --; then 41 | echo "No changes to commit." 42 | else 43 | git commit -m "style: Auto Ruff Formatter" 44 | git push origin ${{ github.ref }} 45 | fi 46 | -------------------------------------------------------------------------------- /gen_scripts/gen_pyro_session.py: -------------------------------------------------------------------------------- 1 | try: 2 | from pyrogram import Client 3 | except Exception: 4 | print( 5 | "\nInstall Pyrogram and try again: pip3 install -r requirements.txt --break-system-packages" 6 | ) 7 | exit(1) 8 | 9 | print( 10 | "Get your app credentials from https://my.telegram.org/apps and enter them below." 11 | ) 12 | 13 | API_KEY = int(input("Enter TELEGRAM API KEY. Ex: 12345678: ")) 14 | API_HASH = input("Enter TELEGRAM API HASH. Ex: 1a2b3c4d5e6f: ") 15 | PHONE_NO = input( 16 | "Enter your Telegram phone number including country code. Ex: +91xxxxxxxxxx: " 17 | ) 18 | 19 | with Client( 20 | name="WZUser", 21 | in_memory=True, 22 | api_id=API_KEY, 23 | api_hash=API_HASH, 24 | phone_number=PHONE_NO, 25 | app_version="@WZML_X User Session", 26 | device_model="@WZML_X Bot V3", 27 | system_version="@WZML_X Pyro Server", 28 | ) as user: 29 | user.send_message( 30 | "me", 31 | "**PyrogramV2 Session String**:\n\n" 32 | f"||{user.export_session_string()}||\n\n" 33 | "**Do not share this anywhere else account hack!**\n\n" 34 | "**Generated by @WZML_X Manual Script.**", 35 | ) 36 | print( 37 | f"User's (@{user.me.username}) Pyrogram Session String has been sent to " 38 | "Saved Messages of your Telegram account!" 39 | ) 40 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/bulk_links.py: -------------------------------------------------------------------------------- 1 | from aiofiles import open as aiopen 2 | from aiofiles.os import remove 3 | 4 | 5 | def filter_links(links_list, bulk_start, bulk_end): 6 | start = bulk_start if bulk_start > 0 else None 7 | end = bulk_end if bulk_end > 0 else None 8 | return links_list[start:end] 9 | 10 | 11 | def get_links_from_message(text): 12 | links_list = text.split("\n") 13 | return [item.strip() for item in links_list if len(item) != 0] 14 | 15 | 16 | async def get_links_from_file(message): 17 | links_list = [] 18 | text_file_dir = await message.download() 19 | async with aiopen(text_file_dir, "r+") as f: 20 | lines = await f.readlines() 21 | links_list.extend(line.strip() for line in lines if len(line) != 0) 22 | await remove(text_file_dir) 23 | return links_list 24 | 25 | 26 | async def extract_bulk_links(message, bulk_start, bulk_end): 27 | bulk_start = int(bulk_start) 28 | bulk_end = int(bulk_end) 29 | links_list = [] 30 | if reply_to := message.reply_to_message: 31 | if (file_ := reply_to.document) and (file_.mime_type == "text/plain"): 32 | links_list = await get_links_from_file(reply_to) 33 | elif text := reply_to.text: 34 | links_list = get_links_from_message(text) 35 | return filter_links(links_list, bulk_start, bulk_end) if links_list else links_list 36 | -------------------------------------------------------------------------------- /bot/helper/languages/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib import import_module 2 | from os import listdir 3 | 4 | from ...core.config_manager import Config 5 | 6 | LOCALES_DIR = "bot/helper/languages" 7 | 8 | 9 | class Language: 10 | _modules = {} 11 | _user_langs = {} 12 | 13 | def __init__(self, lang_code=None, user_id=None): 14 | self.load_translations() 15 | lang_code = lang_code or Config.DEFAULT_LANG 16 | 17 | if user_id: 18 | self._user_langs[user_id] = lang_code 19 | self.lang_code = self._user_langs.get( 20 | user_id, lang_code if lang_code in self._modules else Config.DEFAULT_LANG 21 | ) 22 | 23 | @classmethod 24 | def load_translations(cls): 25 | if cls._modules: 26 | return cls._modules 27 | 28 | cls._modules = {} 29 | for file in listdir(LOCALES_DIR): 30 | if file.endswith(".py") and file != "__init__.py": 31 | lang_code = file.split(".")[0] 32 | cls._modules[lang_code] = import_module( 33 | f"bot.helper.languages.{lang_code}" 34 | ) 35 | return cls._modules 36 | 37 | def __getattr__(self, key): 38 | lang_module = self._modules.get( 39 | self.lang_code, self._modules[Config.DEFAULT_LANG] 40 | ) 41 | return getattr( 42 | lang_module, key, getattr(self._modules[Config.DEFAULT_LANG], key, key) 43 | ) 44 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/yt_dlp_status.py: -------------------------------------------------------------------------------- 1 | from ...ext_utils.status_utils import ( 2 | MirrorStatus, 3 | EngineStatus, 4 | get_readable_file_size, 5 | get_readable_time, 6 | ) 7 | 8 | 9 | class YtDlpStatus: 10 | def __init__(self, listener, obj, gid): 11 | self._obj = obj 12 | self._gid = gid 13 | self.listener = listener 14 | self.engine = EngineStatus().STATUS_YTDLP 15 | 16 | def gid(self): 17 | return self._gid 18 | 19 | def processed_bytes(self): 20 | return get_readable_file_size(self._obj.downloaded_bytes) 21 | 22 | def size(self): 23 | return get_readable_file_size(self._obj.size) 24 | 25 | def status(self): 26 | return MirrorStatus.STATUS_DOWNLOAD 27 | 28 | def name(self): 29 | return self.listener.name 30 | 31 | def progress(self): 32 | return f"{round(self._obj.progress, 2)}%" 33 | 34 | def speed(self): 35 | return f"{get_readable_file_size(self._obj.download_speed)}/s" 36 | 37 | def eta(self): 38 | if self._obj.eta != "-": 39 | return get_readable_time(self._obj.eta) 40 | try: 41 | seconds = ( 42 | self._obj.size - self._obj.downloaded_bytes 43 | ) / self._obj.download_speed 44 | return get_readable_time(seconds) 45 | except Exception: 46 | return "-" 47 | 48 | def task(self): 49 | return self._obj 50 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/button_build.py: -------------------------------------------------------------------------------- 1 | from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup 2 | 3 | 4 | class ButtonMaker: 5 | def __init__(self): 6 | self.buttons = { 7 | "default": [], 8 | "header": [], 9 | "f_body": [], 10 | "l_body": [], 11 | "footer": [], 12 | } 13 | 14 | def url_button(self, key, link, position=None): 15 | self.buttons[position if position in self.buttons else "default"].append( 16 | InlineKeyboardButton(text=key, url=link) 17 | ) 18 | 19 | def data_button(self, key, data, position=None): 20 | self.buttons[position if position in self.buttons else "default"].append( 21 | InlineKeyboardButton(text=key, callback_data=data) 22 | ) 23 | 24 | def build_menu(self, b_cols=1, h_cols=8, fb_cols=2, lb_cols=2, f_cols=8): 25 | def chunk(lst, n): 26 | return [lst[i : i + n] for i in range(0, len(lst), n)] 27 | 28 | menu = chunk(self.buttons["default"], b_cols) 29 | menu = ( 30 | chunk(self.buttons["header"], h_cols) if self.buttons["header"] else [] 31 | ) + menu 32 | for key, cols in (("f_body", fb_cols), ("l_body", lb_cols), ("footer", f_cols)): 33 | if self.buttons[key]: 34 | menu += chunk(self.buttons[key], cols) 35 | return InlineKeyboardMarkup(menu) 36 | 37 | def reset(self): 38 | for key in self.buttons: 39 | self.buttons[key].clear() 40 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/gdrive_utils/delete.py: -------------------------------------------------------------------------------- 1 | from googleapiclient.errors import HttpError 2 | from logging import getLogger 3 | 4 | from ....helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper 5 | 6 | LOGGER = getLogger(__name__) 7 | 8 | 9 | class GoogleDriveDelete(GoogleDriveHelper): 10 | def __init__(self): 11 | super().__init__() 12 | 13 | def deletefile(self, link, user_id): 14 | try: 15 | file_id = self.get_id_from_url(link, user_id) 16 | except (KeyError, IndexError): 17 | return "Google Drive ID could not be found in the provided link" 18 | self.service = self.authorize() 19 | msg = "" 20 | try: 21 | self.service.files().delete( 22 | fileId=file_id, supportsAllDrives=True 23 | ).execute() 24 | msg = "Successfully deleted" 25 | LOGGER.info(f"Delete Result: {msg}") 26 | except HttpError as err: 27 | if "File not found" in str(err) or "insufficientFilePermissions" in str( 28 | err 29 | ): 30 | if not self.alt_auth and self.use_sa: 31 | self.alt_auth = True 32 | self.use_sa = False 33 | LOGGER.error("File not found. Trying with token.pickle...") 34 | return self.deletefile(link, user_id) 35 | err = "File not found or insufficientFilePermissions!" 36 | LOGGER.error(f"Delete Result: {err}") 37 | msg = str(err) 38 | return msg 39 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/queue_status.py: -------------------------------------------------------------------------------- 1 | from .... import LOGGER 2 | from ...ext_utils.status_utils import get_readable_file_size, MirrorStatus, EngineStatus 3 | 4 | 5 | class QueueStatus: 6 | def __init__(self, listener, gid, status): 7 | self.listener = listener 8 | self._size = self.listener.size 9 | self._gid = gid 10 | self._status = status 11 | self.engine = EngineStatus().STATUS_QUEUE 12 | 13 | def gid(self): 14 | return self._gid 15 | 16 | def name(self): 17 | return self.listener.name 18 | 19 | def size(self): 20 | return get_readable_file_size(self._size) 21 | 22 | def status(self): 23 | if self._status == "dl": 24 | return MirrorStatus.STATUS_QUEUEDL 25 | return MirrorStatus.STATUS_QUEUEUP 26 | 27 | def processed_bytes(self): 28 | return 0 29 | 30 | def progress(self): 31 | return "0%" 32 | 33 | def speed(self): 34 | return "0B/s" 35 | 36 | def eta(self): 37 | return "-" 38 | 39 | def task(self): 40 | return self 41 | 42 | async def cancel_task(self): 43 | self.listener.is_cancelled = True 44 | LOGGER.info(f"Cancelling Queue{self._status}: {self.listener.name}") 45 | if self._status == "dl": 46 | await self.listener.on_download_error( 47 | "task have been removed from queue/download" 48 | ) 49 | else: 50 | await self.listener.on_upload_error( 51 | "task have been removed from queue/upload" 52 | ) 53 | -------------------------------------------------------------------------------- /myjd/const.py: -------------------------------------------------------------------------------- 1 | """Constants of the MyJDownloader API.""" 2 | 3 | # API Documentation: https://my.jdownloader.org/developers 4 | 5 | # MyJdownloader exception source 6 | EXCEPTION_MYJD = "MYJD" 7 | EXCEPTION_DEVICE = "DEVICE" 8 | 9 | # MyJdownloader exception type 10 | EXCEPTION_API_COMMAND_NOT_FOUND = "API_COMMAND_NOT_FOUND" 11 | EXCEPTION_API_INTERFACE_NOT_FOUND = "API_INTERFACE_NOT_FOUND" 12 | EXCEPTION_AUTH_FAILED = "AUTH_FAILED" 13 | EXCEPTION_BAD_PARAMETERS = "BAD_PARAMETERS" 14 | EXCEPTION_BAD_REQUEST = "BAD_REQUEST" 15 | EXCEPTION_CHALLENGE_FAILED = "CHALLENGE_FAILED" 16 | EXCEPTION_EMAIL_FORBIDDEN = "EMAIL_FORBIDDEN" 17 | EXCEPTION_EMAIL_INVALID = "EMAIL_INVALID" 18 | EXCEPTION_ERROR_EMAIL_NOT_CONFIRMED = "ERROR_EMAIL_NOT_CONFIRMED" 19 | EXCEPTION_FAILED = "FAILED" 20 | EXCEPTION_FILE_NOT_FOUND = "FILE_NOT_FOUND" 21 | EXCEPTION_INTERNAL_SERVER_ERROR = "INTERNAL_SERVER_ERROR" 22 | EXCEPTION_MAINTENANCE = "MAINTENANCE" 23 | EXCEPTION_METHOD_FORBIDDEN = "METHOD_FORBIDDEN" 24 | EXCEPTION_OFFLINE = "OFFLINE" 25 | EXCEPTION_OUTDATED = "OUTDATED" 26 | EXCEPTION_OVERLOAD = "OVERLOAD" 27 | EXCEPTION_SESSION = "SESSION" 28 | EXCEPTION_STORAGE_ALREADY_EXISTS = "STORAGE_ALREADY_EXISTS" 29 | EXCEPTION_STORAGE_INVALID_KEY = "STORAGE_INVALID_KEY" 30 | EXCEPTION_STORAGE_INVALID_STORAGEID = "STORAGE_INVALID_STORAGEID" 31 | EXCEPTION_STORAGE_KEY_NOT_FOUND = "STORAGE_KEY_NOT_FOUND" 32 | EXCEPTION_STORAGE_LIMIT_REACHED = "STORAGE_LIMIT_REACHED" 33 | EXCEPTION_STORAGE_NOT_FOUND = "STORAGE_NOT_FOUND" 34 | EXCEPTION_TOKEN_INVALID = "TOKEN_INVALID" 35 | EXCEPTION_TOO_MANY_REQUESTS = "TOO_MANY_REQUESTS" 36 | EXCEPTION_UNKNOWN = "UNKNOWN" 37 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/uphoster_status.py: -------------------------------------------------------------------------------- 1 | from ....helper.ext_utils.status_utils import ( 2 | MirrorStatus, 3 | EngineStatus, 4 | get_readable_file_size, 5 | get_readable_time, 6 | ) 7 | 8 | 9 | class UphosterStatus: 10 | def __init__(self, listener, obj, gid, status): 11 | self.listener = listener 12 | self._obj = obj 13 | self._size = self.listener.size 14 | self._gid = gid 15 | self._status = status 16 | self.engine = EngineStatus().STATUS_UPHOSTER 17 | 18 | def processed_bytes(self): 19 | return get_readable_file_size(self._obj.processed_bytes) 20 | 21 | def size(self): 22 | return get_readable_file_size(self._size) 23 | 24 | def status(self): 25 | if self._status == "up": 26 | return MirrorStatus.STATUS_UPLOAD 27 | return MirrorStatus.STATUS_DOWNLOAD 28 | 29 | def name(self): 30 | return self.listener.name 31 | 32 | def gid(self) -> str: 33 | return self._gid 34 | 35 | def progress_raw(self): 36 | try: 37 | return self._obj.processed_bytes / self._size * 100 38 | except ZeroDivisionError: 39 | return 0 40 | 41 | def progress(self): 42 | return f"{round(self.progress_raw(), 2)}%" 43 | 44 | def speed(self): 45 | return f"{get_readable_file_size(self._obj.speed)}/s" 46 | 47 | def eta(self): 48 | try: 49 | seconds = (self._size - self._obj.processed_bytes) / self._obj.speed 50 | return get_readable_time(seconds) 51 | except Exception: 52 | return "-" 53 | 54 | def task(self): 55 | return self._obj 56 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/telegram_status.py: -------------------------------------------------------------------------------- 1 | from ...ext_utils.status_utils import ( 2 | MirrorStatus, 3 | EngineStatus, 4 | get_readable_file_size, 5 | get_readable_time, 6 | ) 7 | 8 | 9 | class TelegramStatus: 10 | def __init__(self, listener, obj, gid, status, hyper=False): 11 | self.listener = listener 12 | self._obj = obj 13 | self._size = self.listener.size 14 | self._gid = gid 15 | self._status = status 16 | self.engine = EngineStatus().STATUS_TGRAM + (" (HyperDL)" if hyper else "") 17 | 18 | def processed_bytes(self): 19 | return get_readable_file_size(self._obj.processed_bytes) 20 | 21 | def size(self): 22 | return get_readable_file_size(self._size) 23 | 24 | def status(self): 25 | if self._status == "up": 26 | return MirrorStatus.STATUS_UPLOAD 27 | return MirrorStatus.STATUS_DOWNLOAD 28 | 29 | def name(self): 30 | return self.listener.name 31 | 32 | def progress(self): 33 | try: 34 | progress_raw = self._obj.processed_bytes / self._size * 100 35 | except ZeroDivisionError: 36 | progress_raw = 0 37 | return f"{round(progress_raw, 2)}%" 38 | 39 | def speed(self): 40 | return f"{get_readable_file_size(self._obj.speed)}/s" 41 | 42 | def eta(self): 43 | try: 44 | seconds = (self._size - self._obj.processed_bytes) / self._obj.speed 45 | return get_readable_time(seconds) 46 | except ZeroDivisionError: 47 | return "-" 48 | 49 | def gid(self): 50 | return self._gid 51 | 52 | def task(self): 53 | return self._obj 54 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/direct_status.py: -------------------------------------------------------------------------------- 1 | from ...ext_utils.status_utils import ( 2 | EngineStatus, 3 | MirrorStatus, 4 | get_readable_file_size, 5 | get_readable_time, 6 | ) 7 | 8 | 9 | class DirectStatus: 10 | def __init__(self, listener, obj, gid): 11 | self._gid = gid 12 | self._obj = obj 13 | self.listener = listener 14 | self.engine = EngineStatus().STATUS_ARIA2 15 | 16 | def gid(self): 17 | return self._gid 18 | 19 | def progress_raw(self): 20 | try: 21 | return self._obj.processed_bytes / self.listener.size * 100 22 | except Exception: 23 | return 0 24 | 25 | def progress(self): 26 | return f"{round(self.progress_raw(), 2)}%" 27 | 28 | def speed(self): 29 | return f"{get_readable_file_size(self._obj.speed)}/s" 30 | 31 | def name(self): 32 | return self.listener.name 33 | 34 | def size(self): 35 | return get_readable_file_size(self.listener.size) 36 | 37 | def eta(self): 38 | try: 39 | seconds = (self.listener.size - self._obj.processed_bytes) / self._obj.speed 40 | return get_readable_time(seconds) 41 | except Exception: 42 | return "-" 43 | 44 | def status(self): 45 | if ( 46 | self._obj.download_task 47 | and self._obj.download_task.get("status", "") == "waiting" 48 | ): 49 | return MirrorStatus.STATUS_QUEUEDL 50 | return MirrorStatus.STATUS_DOWNLOAD 51 | 52 | def processed_bytes(self): 53 | return get_readable_file_size(self._obj.processed_bytes) 54 | 55 | def task(self): 56 | return self._obj 57 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/mega_status.py: -------------------------------------------------------------------------------- 1 | from ...ext_utils.status_utils import ( 2 | EngineStatus, 3 | get_readable_file_size, 4 | get_readable_time, 5 | ) 6 | 7 | 8 | class MegaDownloadStatus: 9 | def __init__(self, listener, obj, gid, status=""): 10 | self.listener = listener 11 | self._obj = obj 12 | self._gid = gid 13 | self._status = status 14 | self._speed = 0 15 | self._downloaded_bytes = 0 16 | self._size = self.listener.size 17 | self.engine = EngineStatus().STATUS_MEGA 18 | 19 | def name(self): 20 | return self.listener.name 21 | 22 | def progress_raw(self): 23 | try: 24 | return round(self._downloaded_bytes / self._size * 100, 2) 25 | except ZeroDivisionError: 26 | return 0.0 27 | 28 | def progress(self): 29 | return f"{self.progress_raw()}%" 30 | 31 | def status(self): 32 | return self._status 33 | 34 | def processed_bytes(self): 35 | return get_readable_file_size(self._downloaded_bytes) 36 | 37 | def eta(self): 38 | try: 39 | seconds = (self._size - self._downloaded_bytes) / self._speed 40 | return get_readable_time(seconds) 41 | except ZeroDivisionError: 42 | return "-" 43 | 44 | def size(self): 45 | return get_readable_file_size(self._size) 46 | 47 | def speed(self): 48 | return f"{get_readable_file_size(self._speed)}/s" 49 | 50 | def gid(self): 51 | return self._gid 52 | 53 | def task(self): 54 | return self 55 | 56 | async def cancel_task(self): 57 | await self._obj.cancel_task() 58 | await self.listener.on_download_error(f"{self._status} stopped by user!") 59 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/yt_status.py: -------------------------------------------------------------------------------- 1 | from ....helper.ext_utils.status_utils import ( 2 | MirrorStatus, 3 | EngineStatus, 4 | get_readable_file_size, 5 | get_readable_time, 6 | ) 7 | 8 | 9 | class YtStatus: 10 | def __init__(self, listener, obj, gid, status): 11 | self.listener = listener 12 | self._obj = obj 13 | self._size = self.listener.size 14 | self._gid = gid 15 | self._status = status 16 | self.engine = EngineStatus().STATUS_YT 17 | 18 | def processed_bytes(self): 19 | return get_readable_file_size(self._obj.processed_bytes) 20 | 21 | def size(self): 22 | return get_readable_file_size(self._size) 23 | 24 | def status(self): 25 | if self._status == "up": 26 | return MirrorStatus.STATUS_UPLOAD 27 | elif self._status == "dl": 28 | return MirrorStatus.STATUS_DOWNLOAD 29 | else: 30 | return MirrorStatus.STATUS_YT 31 | 32 | def name(self): 33 | return self.listener.name 34 | 35 | def gid(self) -> str: 36 | return self._gid 37 | 38 | def progress_raw(self): 39 | try: 40 | return self._obj.processed_bytes / self._size * 100 41 | except ZeroDivisionError: 42 | return 0 43 | 44 | def progress(self): 45 | return f"{round(self.progress_raw(), 2)}%" 46 | 47 | def speed(self): 48 | return f"{get_readable_file_size(self._obj.speed)}/s" 49 | 50 | def eta(self): 51 | try: 52 | seconds = (self._size - self._obj.processed_bytes) / self._obj.speed 53 | return get_readable_time(seconds) 54 | except Exception: 55 | return "-" 56 | 57 | def task(self): 58 | return self._obj 59 | -------------------------------------------------------------------------------- /bot/modules/gd_count.py: -------------------------------------------------------------------------------- 1 | from ..helper.ext_utils.bot_utils import sync_to_async, new_task 2 | from ..helper.ext_utils.links_utils import is_gdrive_link 3 | from ..helper.ext_utils.status_utils import get_readable_file_size 4 | from ..helper.mirror_leech_utils.gdrive_utils.count import GoogleDriveCount 5 | from ..helper.telegram_helper.message_utils import delete_message, send_message 6 | 7 | 8 | @new_task 9 | async def count_node(_, message): 10 | args = message.text.split() 11 | user = message.from_user or message.sender_chat 12 | if username := user.username: 13 | tag = f"@{username}" 14 | else: 15 | tag = message.from_user.mention 16 | 17 | link = args[1] if len(args) > 1 else "" 18 | if len(link) == 0 and (reply_to := message.reply_to_message): 19 | link = reply_to.text.split(maxsplit=1)[0].strip() 20 | 21 | if is_gdrive_link(link): 22 | msg = await send_message(message, f"Counting: {link}") 23 | name, mime_type, size, files, folders = await sync_to_async( 24 | GoogleDriveCount().count, link, user.id 25 | ) 26 | if mime_type is None: 27 | await send_message(message, name) 28 | return 29 | await delete_message(msg) 30 | msg = f"Name: {name}" 31 | msg += f"\n\nSize: {get_readable_file_size(size)}" 32 | msg += f"\n\nType: {mime_type}" 33 | if mime_type == "Folder": 34 | msg += f"\nSubFolders: {folders}" 35 | msg += f"\nFiles: {files}" 36 | msg += f"\n\ncc: {tag}" 37 | else: 38 | msg = ( 39 | "Send Gdrive link along with command or by replying to the link by command" 40 | ) 41 | 42 | await send_message(message, msg) 43 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/gdrive_status.py: -------------------------------------------------------------------------------- 1 | from ....helper.ext_utils.status_utils import ( 2 | MirrorStatus, 3 | EngineStatus, 4 | get_readable_file_size, 5 | get_readable_time, 6 | ) 7 | 8 | 9 | class GoogleDriveStatus: 10 | def __init__(self, listener, obj, gid, status): 11 | self.listener = listener 12 | self._obj = obj 13 | self._size = self.listener.size 14 | self._gid = gid 15 | self._status = status 16 | self.engine = EngineStatus().STATUS_GDAPI 17 | 18 | def processed_bytes(self): 19 | return get_readable_file_size(self._obj.processed_bytes) 20 | 21 | def size(self): 22 | return get_readable_file_size(self._size) 23 | 24 | def status(self): 25 | if self._status == "up": 26 | return MirrorStatus.STATUS_UPLOAD 27 | elif self._status == "dl": 28 | return MirrorStatus.STATUS_DOWNLOAD 29 | else: 30 | return MirrorStatus.STATUS_CLONE 31 | 32 | def name(self): 33 | return self.listener.name 34 | 35 | def gid(self) -> str: 36 | return self._gid 37 | 38 | def progress_raw(self): 39 | try: 40 | return self._obj.processed_bytes / self._size * 100 41 | except ZeroDivisionError: 42 | return 0 43 | 44 | def progress(self): 45 | return f"{round(self.progress_raw(), 2)}%" 46 | 47 | def speed(self): 48 | return f"{get_readable_file_size(self._obj.speed)}/s" 49 | 50 | def eta(self): 51 | try: 52 | seconds = (self._size - self._obj.processed_bytes) / self._obj.speed 53 | return get_readable_time(seconds) 54 | except Exception: 55 | return "-" 56 | 57 | def task(self): 58 | return self._obj 59 | -------------------------------------------------------------------------------- /gen_scripts/generate_drive_token.py: -------------------------------------------------------------------------------- 1 | from os.path import exists 2 | import pickle 3 | 4 | from google.auth.transport.requests import Request 5 | from google_auth_oauthlib.flow import InstalledAppFlow 6 | 7 | TOKEN_FILE = "token.pickle" 8 | OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"] 9 | 10 | 11 | def load_credentials(token_file: str): 12 | if exists(token_file): 13 | try: 14 | with open(token_file, "rb") as f: 15 | creds = pickle.load(f) 16 | return creds 17 | except Exception as e: 18 | print(f"Error loading credentials: {e}") 19 | return None 20 | 21 | 22 | def save_credentials(token_file: str, credentials) -> None: 23 | try: 24 | with open(token_file, "wb") as f: 25 | pickle.dump(credentials, f) 26 | except Exception as e: 27 | print(f"Error saving credentials: {e}") 28 | 29 | 30 | def get_credentials(): 31 | credentials = load_credentials(TOKEN_FILE) 32 | if credentials and credentials.valid: 33 | return credentials 34 | 35 | if credentials and credentials.expired and credentials.refresh_token: 36 | try: 37 | credentials.refresh(Request()) 38 | return credentials 39 | except Exception as e: 40 | print(f"Error refreshing credentials: {e}") 41 | 42 | try: 43 | flow = InstalledAppFlow.from_client_secrets_file( 44 | "credentials.json", OAUTH_SCOPE 45 | ) 46 | credentials = flow.run_local_server(port=0, open_browser=False) 47 | except Exception as e: 48 | print(f"Error during OAuth flow: {e}") 49 | raise 50 | 51 | return credentials 52 | 53 | 54 | def main(): 55 | try: 56 | credentials = get_credentials() 57 | save_credentials(TOKEN_FILE, credentials) 58 | except Exception as e: 59 | print(f"Failed to obtain credentials: {e}") 60 | 61 | 62 | if __name__ == "__main__": 63 | main() 64 | -------------------------------------------------------------------------------- /qBittorrent/config/qBittorrent.conf: -------------------------------------------------------------------------------- 1 | [Application] 2 | MemoryWorkingSetLimit=512 3 | 4 | [BitTorrent] 5 | Session\AddExtensionToIncompleteFiles=false 6 | Session\AddTrackersEnabled=false 7 | Session\AnnounceToAllTrackers=true 8 | Session\AnonymousModeEnabled=false 9 | Session\AsyncIOThreadsCount=16 10 | Session\ConnectionSpeed=-1 11 | Session\DHTEnabled=true 12 | Session\DiskCacheSize=-1 13 | Session\GlobalDLSpeedLimit=0 14 | Session\GlobalMaxRatio=-1 15 | Session\GlobalMaxSeedingMinutes=-1 16 | Session\GlobalUPSpeedLimit=0 17 | Session\HashingThreadsCount=1 18 | Session\IgnoreSlowTorrentsForQueueing=true 19 | Session\IncludeOverheadInLimits=false 20 | Session\LSDEnabled=true 21 | Session\MaxActiveCheckingTorrents=3 22 | Session\MaxActiveDownloads=1000 23 | Session\MaxActiveTorrents=1000 24 | Session\MaxActiveUploads=1000 25 | Session\MaxConnections=-1 26 | Session\MaxConnectionsPerTorrent=-1 27 | Session\MaxRatioAction=0 28 | Session\MaxUploads=-1 29 | Session\MaxUploadsPerTorrent=-1 30 | Session\MultiConnectionsPerIp=true 31 | Session\PexEnabled=true 32 | Session\PerformanceWarning=true 33 | Session\Preallocation=true 34 | Session\QueueingSystemEnabled=false 35 | Session\SlowTorrentsDownloadRate=2 36 | Session\SlowTorrentsInactivityTimer=600 37 | Session\SlowTorrentsUploadRate=2 38 | Session\StopTrackerTimeout=5 39 | TrackerEnabled=true 40 | 41 | [LegalNotice] 42 | Accepted=true 43 | 44 | [Meta] 45 | MigrationVersion=8 46 | 47 | [Preferences] 48 | Advanced\DisableRecursiveDownload=false 49 | Advanced\RecheckOnCompletion=true 50 | Advanced\trackerPortForwarding=true 51 | General\PreventFromSuspendWhenDownloading=true 52 | General\PreventFromSuspendWhenSeeding=true 53 | Search\SearchEnabled=true 54 | WebUI\Address=* 55 | WebUI\BanDuration=3600 56 | WebUI\CSRFProtection=false 57 | WebUI\ClickjackingProtection=false 58 | WebUI\Enabled=true 59 | WebUI\HTTPS\Enabled=false 60 | WebUI\HostHeaderValidation=false 61 | WebUI\LocalHostAuth=false 62 | WebUI\MaxAuthenticationFailCount=1000 63 | WebUI\Port=8090 64 | WebUI\SecureCookie=false 65 | WebUI\UseUPnP=false 66 | WebUI\Username=admin 67 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/rclone_utils/serve.py: -------------------------------------------------------------------------------- 1 | from aiofiles import open as aiopen 2 | from aiofiles.os import path as aiopath 3 | from asyncio import create_subprocess_exec 4 | from configparser import RawConfigParser 5 | 6 | from ....core.config_manager import Config, BinConfig 7 | 8 | RcloneServe = [] 9 | 10 | 11 | async def rclone_serve_booter(): 12 | if not Config.RCLONE_SERVE_URL or not await aiopath.exists("rclone.conf"): 13 | if RcloneServe: 14 | try: 15 | RcloneServe[0].kill() 16 | RcloneServe.clear() 17 | except Exception: 18 | pass 19 | return 20 | config = RawConfigParser() 21 | async with aiopen("rclone.conf", "r") as f: 22 | contents = await f.read() 23 | config.read_string(contents) 24 | if not config.has_section("combine"): 25 | upstreams = " ".join(f"{remote}={remote}:" for remote in config.sections()) 26 | config.add_section("combine") 27 | config.set("combine", "type", "combine") 28 | config.set("combine", "upstreams", upstreams) 29 | with open("rclone.conf", "w") as f: 30 | config.write(f, space_around_delimiters=False) 31 | if RcloneServe: 32 | try: 33 | RcloneServe[0].kill() 34 | RcloneServe.clear() 35 | except Exception: 36 | pass 37 | cmd = [ 38 | BinConfig.RCLONE_NAME, 39 | "serve", 40 | "http", 41 | "--config", 42 | "rclone.conf", 43 | "--no-modtime", 44 | "combine:", 45 | "--addr", 46 | f":{Config.RCLONE_SERVE_PORT}", 47 | "--vfs-cache-mode", 48 | "full", 49 | "--vfs-cache-max-age", 50 | "1m0s", 51 | "--buffer-size", 52 | "64M", 53 | "-v", 54 | "--log-file", 55 | "rlog.txt", 56 | ] 57 | if (user := Config.RCLONE_SERVE_USER) and (pswd := Config.RCLONE_SERVE_PASS): 58 | cmd.extend(("--user", user, "--pass", pswd)) 59 | rcs = await create_subprocess_exec(*cmd) 60 | RcloneServe.append(rcs) 61 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py: -------------------------------------------------------------------------------- 1 | from .... import LOGGER 2 | from ...ext_utils.status_utils import ( 3 | get_readable_file_size, 4 | EngineStatus, 5 | MirrorStatus, 6 | get_readable_time, 7 | ) 8 | 9 | 10 | class FFmpegStatus: 11 | def __init__(self, listener, obj, gid, status=""): 12 | self.listener = listener 13 | self._obj = obj 14 | self._gid = gid 15 | self._cstatus = status 16 | self.engine = EngineStatus().STATUS_FFMPEG 17 | 18 | def speed(self): 19 | return f"{get_readable_file_size(self._obj.speed_raw)}/s" 20 | 21 | def processed_bytes(self): 22 | return get_readable_file_size(self._obj.processed_bytes) 23 | 24 | def progress(self): 25 | return f"{round(self._obj.progress_raw, 2)}%" 26 | 27 | def gid(self): 28 | return self._gid 29 | 30 | def name(self): 31 | return self.listener.name 32 | 33 | def size(self): 34 | return get_readable_file_size(self.listener.size) 35 | 36 | def eta(self): 37 | return get_readable_time(self._obj.eta_raw) if self._obj.eta_raw else "-" 38 | 39 | def status(self): 40 | if self._cstatus == "Convert": 41 | return MirrorStatus.STATUS_CONVERT 42 | elif self._cstatus == "Split": 43 | return MirrorStatus.STATUS_SPLIT 44 | elif self._cstatus == "Sample Video": 45 | return MirrorStatus.STATUS_SAMVID 46 | else: 47 | return MirrorStatus.STATUS_FFMPEG 48 | 49 | def task(self): 50 | return self 51 | 52 | async def cancel_task(self): 53 | LOGGER.info(f"Cancelling {self._cstatus}: {self.listener.name}") 54 | self.listener.is_cancelled = True 55 | if ( 56 | self.listener.subproc is not None 57 | and self.listener.subproc.returncode is None 58 | ): 59 | try: 60 | self.listener.subproc.kill() 61 | except Exception: 62 | pass 63 | await self.listener.on_upload_error(f"{self._cstatus} stopped by user!") 64 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/links_utils.py: -------------------------------------------------------------------------------- 1 | from re import match as re_match 2 | from base64 import urlsafe_b64decode, urlsafe_b64encode 3 | 4 | 5 | def is_magnet(url: str): 6 | return bool( 7 | re_match( 8 | r"^magnet:\?.*xt=urn:(btih|btmh):([a-zA-Z0-9]{32,40}|[a-z2-7]{32}).*", url 9 | ) 10 | ) 11 | 12 | 13 | def is_url(url: str): 14 | return bool( 15 | re_match( 16 | r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$", 17 | url, 18 | ) 19 | ) 20 | 21 | 22 | def is_gdrive_link(url: str): 23 | return "drive.google.com" in url or "drive.usercontent.google.com" in url 24 | 25 | 26 | def is_telegram_link(url: str): 27 | return url.startswith(("https://t.me/", "tg://openmessage?user_id=")) 28 | 29 | 30 | def is_mega_link(url: str): 31 | return "mega.nz" in url or "mega.co.nz" in url 32 | 33 | 34 | def get_mega_link_type(url): 35 | return "folder" if "folder" in url or "/#F!" in url else "file" 36 | 37 | 38 | def is_share_link(url: str): 39 | return bool( 40 | re_match( 41 | r"https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+", 42 | url, 43 | ) 44 | ) 45 | 46 | 47 | def is_rclone_path(path: str): 48 | return bool( 49 | re_match( 50 | r"^(mrcc:)?(?!(magnet:|mtp:|sa:|tp:))(?![- ])[a-zA-Z0-9_\. -]+(?Initiating Speedtest...") 16 | try: 17 | speed_results = await sync_to_async(Speedtest) 18 | await sync_to_async(speed_results.get_best_server) 19 | await sync_to_async(speed_results.download) 20 | await sync_to_async(speed_results.upload) 21 | except ConfigRetrievalError: 22 | await edit_message( 23 | speed, 24 | "ERROR: Can't connect to Server at the Moment, Try Again Later !", 25 | ) 26 | return 27 | speed_results.results.share() 28 | result = speed_results.results.dict() 29 | string_speed = f""" 30 | ➲ SPEEDTEST INFO 31 | ┠ Upload: {get_readable_file_size(result['upload'] / 8)}/s 32 | ┠ Download: {get_readable_file_size(result['download'] / 8)}/s 33 | ┠ Ping: {result['ping']} ms 34 | ┠ Time: {result['timestamp']} 35 | ┠ Data Sent: {get_readable_file_size(int(result['bytes_sent']))} 36 | ┖ Data Received: {get_readable_file_size(int(result['bytes_received']))} 37 | 38 | ➲ SPEEDTEST SERVER 39 | ┠ Name: {result['server']['name']} 40 | ┠ Country: {result['server']['country']}, {result['server']['cc']} 41 | ┠ Sponsor: {result['server']['sponsor']} 42 | ┠ Latency: {result['server']['latency']} 43 | ┠ Latitude: {result['server']['lat']} 44 | ┖ Longitude: {result['server']['lon']} 45 | """ 46 | try: 47 | await send_message(message, string_speed, photo=result["share"]) 48 | await delete_message(speed) 49 | except Exception as e: 50 | LOGGER.error(str(e)) 51 | await edit_message(speed, string_speed) 52 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/gd_download.py: -------------------------------------------------------------------------------- 1 | from secrets import token_hex 2 | 3 | from .... import task_dict, task_dict_lock, LOGGER 4 | from ...ext_utils.bot_utils import sync_to_async 5 | from ...ext_utils.task_manager import ( 6 | check_running_tasks, 7 | stop_duplicate_check, 8 | limit_checker, 9 | ) 10 | from ...mirror_leech_utils.gdrive_utils.count import GoogleDriveCount 11 | from ...mirror_leech_utils.gdrive_utils.download import GoogleDriveDownload 12 | from ...mirror_leech_utils.status_utils.gdrive_status import GoogleDriveStatus 13 | from ...mirror_leech_utils.status_utils.queue_status import QueueStatus 14 | from ...telegram_helper.message_utils import send_status_message 15 | 16 | 17 | async def add_gd_download(listener, path): 18 | drive = GoogleDriveCount() 19 | name, mime_type, listener.size, _, _ = await sync_to_async( 20 | drive.count, listener.link, listener.user_id 21 | ) 22 | if mime_type is None: 23 | await listener.on_download_error(name) 24 | return 25 | 26 | listener.name = listener.name or name 27 | gid = token_hex(5) 28 | 29 | msg, button = await stop_duplicate_check(listener) 30 | if msg: 31 | await listener.on_download_error(msg, button) 32 | return 33 | 34 | if limit_exceeded := await limit_checker(listener): 35 | await listener.on_download_error(limit_exceeded, is_limit=True) 36 | return 37 | 38 | add_to_queue, event = await check_running_tasks(listener) 39 | if add_to_queue: 40 | LOGGER.info(f"Added to Queue/Download: {listener.name}") 41 | async with task_dict_lock: 42 | task_dict[listener.mid] = QueueStatus(listener, gid, "dl") 43 | await listener.on_download_start() 44 | if listener.multi <= 1: 45 | await send_status_message(listener.message) 46 | await event.wait() 47 | if listener.is_cancelled: 48 | return 49 | 50 | drive = GoogleDriveDownload(listener, path) 51 | async with task_dict_lock: 52 | task_dict[listener.mid] = GoogleDriveStatus(listener, drive, gid, "dl") 53 | 54 | if add_to_queue: 55 | LOGGER.info(f"Start Queued Download from GDrive: {listener.name}") 56 | else: 57 | LOGGER.info(f"Download from GDrive: {listener.name}") 58 | await listener.on_download_start() 59 | if listener.multi <= 1: 60 | await send_status_message(listener.message) 61 | 62 | await sync_to_async(drive.download) 63 | -------------------------------------------------------------------------------- /myjd/__init__.py: -------------------------------------------------------------------------------- 1 | from .exception import ( 2 | MYJDException, 3 | MYJDConnectionException, 4 | MYJDDeviceNotFoundException, 5 | MYJDDecodeException, 6 | MYJDApiException, 7 | MYJDApiCommandNotFoundException, 8 | MYJDApiInterfaceNotFoundException, 9 | MYJDAuthFailedException, 10 | MYJDBadParametersException, 11 | MYJDBadRequestException, 12 | MYJDChallengeFailedException, 13 | MYJDEmailForbiddenException, 14 | MYJDEmailInvalidException, 15 | MYJDErrorEmailNotConfirmedException, 16 | MYJDFailedException, 17 | MYJDFileNotFoundException, 18 | MYJDInternalServerErrorException, 19 | MYJDMaintenanceException, 20 | MYJDMethodForbiddenException, 21 | MYJDOfflineException, 22 | MYJDOutdatedException, 23 | MYJDOverloadException, 24 | MYJDSessionException, 25 | MYJDStorageAlreadyExistsException, 26 | MYJDStorageInvalidKeyException, 27 | MYJDStorageInvalidStorageIdException, 28 | MYJDStorageKeyNotFoundException, 29 | MYJDStorageLimitReachedException, 30 | MYJDStorageNotFoundException, 31 | MYJDTokenInvalidException, 32 | MYJDTooManyRequestsException, 33 | MYJDUnknownException, 34 | ) 35 | from .myjdapi import MyJdApi 36 | 37 | __version__ = "1.1.7" 38 | 39 | __all__ = [ 40 | "MYJDException", 41 | "MYJDConnectionException", 42 | "MYJDDeviceNotFoundException", 43 | "MYJDDecodeException", 44 | "MYJDApiException", 45 | "MYJDApiCommandNotFoundException", 46 | "MYJDApiInterfaceNotFoundException", 47 | "MYJDAuthFailedException", 48 | "MYJDBadParametersException", 49 | "MYJDBadRequestException", 50 | "MYJDChallengeFailedException", 51 | "MYJDEmailForbiddenException", 52 | "MYJDEmailInvalidException", 53 | "MYJDErrorEmailNotConfirmedException", 54 | "MYJDFailedException", 55 | "MYJDFileNotFoundException", 56 | "MYJDInternalServerErrorException", 57 | "MYJDMaintenanceException", 58 | "MYJDMethodForbiddenException", 59 | "MYJDOfflineException", 60 | "MYJDOutdatedException", 61 | "MYJDOverloadException", 62 | "MYJDSessionException", 63 | "MYJDStorageAlreadyExistsException", 64 | "MYJDStorageInvalidKeyException", 65 | "MYJDStorageInvalidStorageIdException", 66 | "MYJDStorageKeyNotFoundException", 67 | "MYJDStorageLimitReachedException", 68 | "MYJDStorageNotFoundException", 69 | "MYJDTokenInvalidException", 70 | "MYJDTooManyRequestsException", 71 | "MYJDUnknownException", 72 | "MyJdApi", 73 | ] 74 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/direct_downloader.py: -------------------------------------------------------------------------------- 1 | from secrets import token_hex 2 | 3 | from .... import ( 4 | LOGGER, 5 | task_dict, 6 | task_dict_lock, 7 | ) 8 | from ...ext_utils.task_manager import ( 9 | check_running_tasks, 10 | stop_duplicate_check, 11 | limit_checker, 12 | ) 13 | from ...listeners.direct_listener import DirectListener 14 | from ...mirror_leech_utils.status_utils.direct_status import DirectStatus 15 | from ...mirror_leech_utils.status_utils.queue_status import QueueStatus 16 | from ...telegram_helper.message_utils import send_status_message 17 | 18 | 19 | async def add_direct_download(listener, path): 20 | details = listener.link 21 | if not (contents := details.get("contents")): 22 | await listener.on_download_error("There is nothing to download!") 23 | return 24 | listener.size = details["total_size"] 25 | 26 | if not listener.name: 27 | listener.name = details["title"] 28 | path = f"{path}/{listener.name}" 29 | 30 | msg, button = await stop_duplicate_check(listener) 31 | if msg: 32 | await listener.on_download_error(msg, button) 33 | return 34 | 35 | if limit_exceeded := await limit_checker(listener): 36 | await listener.on_download_error(limit_exceeded, is_limit=True) 37 | return 38 | 39 | gid = token_hex(5) 40 | add_to_queue, event = await check_running_tasks(listener) 41 | if add_to_queue: 42 | LOGGER.info(f"Added to Queue/Download: {listener.name}") 43 | async with task_dict_lock: 44 | task_dict[listener.mid] = QueueStatus(listener, gid, "dl") 45 | await listener.on_download_start() 46 | if listener.multi <= 1: 47 | await send_status_message(listener.message) 48 | await event.wait() 49 | if listener.is_cancelled: 50 | return 51 | 52 | a2c_opt = {"follow-torrent": "false", "follow-metalink": "false"} 53 | if header := details.get("header"): 54 | a2c_opt["header"] = header 55 | directListener = DirectListener(path, listener, a2c_opt) 56 | 57 | async with task_dict_lock: 58 | task_dict[listener.mid] = DirectStatus(listener, directListener, gid) 59 | 60 | if add_to_queue: 61 | LOGGER.info(f"Start Queued Download from Direct Download: {listener.name}") 62 | else: 63 | LOGGER.info(f"Download from Direct Download: {listener.name}") 64 | await listener.on_download_start() 65 | if listener.multi <= 1: 66 | await send_status_message(listener.message) 67 | 68 | await directListener.download(contents) 69 | -------------------------------------------------------------------------------- /bot/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E402 2 | 3 | from uvloop import install 4 | 5 | install() 6 | 7 | from subprocess import run as srun 8 | from os import getcwd 9 | from asyncio import Lock, new_event_loop, set_event_loop 10 | from logging import ( 11 | ERROR, 12 | INFO, 13 | WARNING, 14 | FileHandler, 15 | StreamHandler, 16 | basicConfig, 17 | getLogger, 18 | ) 19 | from os import cpu_count 20 | from time import time 21 | 22 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 23 | 24 | from .core.config_manager import BinConfig 25 | from sabnzbdapi import SabnzbdClient 26 | 27 | getLogger("requests").setLevel(WARNING) 28 | getLogger("urllib3").setLevel(WARNING) 29 | getLogger("pyrogram").setLevel(ERROR) 30 | getLogger("aiohttp").setLevel(ERROR) 31 | getLogger("apscheduler").setLevel(ERROR) 32 | getLogger("httpx").setLevel(WARNING) 33 | getLogger("pymongo").setLevel(WARNING) 34 | getLogger("aiohttp").setLevel(WARNING) 35 | 36 | 37 | bot_start_time = time() 38 | 39 | bot_loop = new_event_loop() 40 | set_event_loop(bot_loop) 41 | 42 | basicConfig( 43 | format="[%(asctime)s] [%(levelname)s] - %(message)s", # [%(filename)s:%(lineno)d] 44 | datefmt="%d-%b-%y %I:%M:%S %p", 45 | handlers=[FileHandler("log.txt"), StreamHandler()], 46 | level=INFO, 47 | ) 48 | 49 | LOGGER = getLogger(__name__) 50 | cpu_no = cpu_count() 51 | threads = max(1, cpu_no // 2) 52 | cores = ",".join(str(i) for i in range(threads)) 53 | 54 | bot_cache = {} 55 | DOWNLOAD_DIR = "/usr/src/app/downloads/" 56 | intervals = {"status": {}, "qb": "", "jd": "", "nzb": "", "stopAll": False} 57 | qb_torrents = {} 58 | jd_downloads = {} 59 | nzb_jobs = {} 60 | user_data = {} 61 | aria2_options = {} 62 | qbit_options = {} 63 | nzb_options = {} 64 | queued_dl = {} 65 | queued_up = {} 66 | status_dict = {} 67 | task_dict = {} 68 | rss_dict = {} 69 | shortener_dict = {} 70 | var_list = [ 71 | "BOT_TOKEN", 72 | "TELEGRAM_API", 73 | "TELEGRAM_HASH", 74 | "OWNER_ID", 75 | "DATABASE_URL", 76 | "BASE_URL", 77 | "UPSTREAM_REPO", 78 | "UPSTREAM_BRANCH", 79 | "UPDATE_PKGS", 80 | ] 81 | auth_chats = {} 82 | excluded_extensions = ["aria2", "!qB"] 83 | drives_names = [] 84 | drives_ids = [] 85 | index_urls = [] 86 | sudo_users = [] 87 | non_queued_dl = set() 88 | non_queued_up = set() 89 | multi_tags = set() 90 | task_dict_lock = Lock() 91 | queue_dict_lock = Lock() 92 | qb_listener_lock = Lock() 93 | nzb_listener_lock = Lock() 94 | jd_listener_lock = Lock() 95 | cpu_eater_lock = Lock() 96 | same_directory_lock = Lock() 97 | 98 | sabnzbd_client = SabnzbdClient( 99 | host="http://localhost", 100 | api_key="admin", 101 | port="8070", 102 | ) 103 | srun([BinConfig.QBIT_NAME, "-d", f"--profile={getcwd()}"], check=False) 104 | 105 | scheduler = AsyncIOScheduler(event_loop=bot_loop) 106 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/filters.py: -------------------------------------------------------------------------------- 1 | from pyrogram.filters import create 2 | from pyrogram.enums import ChatType 3 | 4 | from ... import auth_chats, sudo_users, user_data 5 | from ...core.config_manager import Config 6 | from .tg_utils import chat_info 7 | 8 | 9 | class CustomFilters: 10 | async def owner_filter(self, _, update): 11 | user = update.from_user or update.sender_chat 12 | return user.id == Config.OWNER_ID 13 | 14 | owner = create(owner_filter) 15 | 16 | async def authorized_user(self, _, update): 17 | uid = (update.from_user or update.sender_chat).id 18 | chat_id = update.chat.id 19 | thread_id = update.message_thread_id if update.is_topic_message else None 20 | return bool( 21 | uid == Config.OWNER_ID 22 | or ( 23 | uid in user_data 24 | and ( 25 | user_data[uid].get("AUTH", False) 26 | or user_data[uid].get("SUDO", False) 27 | ) 28 | ) 29 | or ( 30 | chat_id in user_data 31 | and user_data[chat_id].get("AUTH", False) 32 | and ( 33 | thread_id is None 34 | or thread_id in user_data[chat_id].get("thread_ids", []) 35 | ) 36 | ) 37 | or uid in sudo_users 38 | or uid in auth_chats 39 | or chat_id in auth_chats 40 | and ( 41 | auth_chats[chat_id] 42 | and thread_id 43 | and thread_id in auth_chats[chat_id] 44 | or not auth_chats[chat_id] 45 | ) 46 | ) 47 | 48 | authorized = create(authorized_user) 49 | 50 | async def authorized_usetting(self, _, update): 51 | uid = (update.from_user or update.sender_chat).id 52 | is_exists = False 53 | if await CustomFilters.authorized("", update): 54 | is_exists = True 55 | elif update.chat.type == ChatType.PRIVATE: 56 | for channel_id in user_data: 57 | if not ( 58 | user_data[channel_id].get("is_auth") 59 | and str(channel_id).startswith("-100") 60 | ): 61 | continue 62 | try: 63 | if await (await chat_info(str(channel_id))).get_member(uid): 64 | is_exists = True 65 | break 66 | except Exception: 67 | continue 68 | return is_exists 69 | 70 | authorized_uset = create(authorized_usetting) 71 | 72 | async def sudo_user(self, _, update): 73 | user = update.from_user or update.sender_chat 74 | uid = user.id 75 | return bool( 76 | uid == Config.OWNER_ID 77 | or uid in user_data 78 | and user_data[uid].get("SUDO") 79 | or uid in sudo_users 80 | ) 81 | 82 | sudo = create(sudo_user) 83 | -------------------------------------------------------------------------------- /gen_scripts/driveid.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | from re import match 3 | 4 | 5 | def main(): 6 | info = ( 7 | "\n\n" 8 | " Bot can search files recursively, but you have to add the list of drives you want to search.\n" 9 | " Use the following format: (You can use 'root' in the ID in case you want to use main drive.)\n" 10 | " teamdrive NAME --> anything that you like\n" 11 | " teamdrive ID --> id of teamdrives in which you like to search ('root' for main drive)\n" 12 | " teamdrive INDEX URL --> enter index url for this drive.\n" 13 | " go to the respective drive and copy the url from address bar\n" 14 | ) 15 | print(info) 16 | msg = "" 17 | filename = "list_drives.txt" 18 | 19 | if path.exists(filename): 20 | try: 21 | with open(filename, "r") as f: 22 | lines = f.read() 23 | except Exception as e: 24 | print(f"Error reading {filename}: {e}") 25 | lines = "" 26 | if lines and not match(r"^\s*$", lines): 27 | print(lines) 28 | print( 29 | "\n\n" 30 | " DO YOU WISH TO KEEP THE ABOVE DETAILS THAT YOU PREVIOUSLY ADDED? ENTER (y/n)\n" 31 | " IF NOTHING SHOWS ENTER n" 32 | ) 33 | while True: 34 | choice = input().strip() 35 | if choice.lower() == "y": 36 | msg = lines 37 | break 38 | elif choice.lower() == "n": 39 | break 40 | else: 41 | print( 42 | "\n\n Invalid input. Please enter 'y' for yes or 'n' for no." 43 | ) 44 | while True: 45 | try: 46 | num = int(input(" How Many Drive/Folder You Like To Add : ")) 47 | break 48 | except ValueError: 49 | print(" Invalid number. Please enter an integer.") 50 | 51 | for count in range(1, num + 1): 52 | print(f"\n > DRIVE - {count}\n") 53 | name = input(" Enter Drive NAME (anything) : ").strip() 54 | drive_id = input(" Enter Drive ID : ").strip() 55 | index = input(" Enter Drive INDEX URL (optional) : ").strip() 56 | 57 | if not name or not drive_id: 58 | print("\n\n ERROR: Don't leave the name/ID empty.") 59 | exit(1) 60 | name = name.replace(" ", "_") 61 | if index: 62 | index = index.rstrip("/") 63 | else: 64 | index = "" 65 | msg += f"{name} {drive_id} {index}\n" 66 | 67 | try: 68 | with open(filename, "w") as file: 69 | file.write(msg) 70 | except Exception as e: 71 | print(f"Error writing to {filename}: {e}") 72 | exit(1) 73 | print("\n\n Done!") 74 | 75 | 76 | if __name__ == "__main__": 77 | main() 78 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/gdrive_utils/count.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | from tenacity import RetryError 3 | 4 | from ...mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper 5 | 6 | LOGGER = getLogger(__name__) 7 | 8 | 9 | class GoogleDriveCount(GoogleDriveHelper): 10 | def __init__(self): 11 | super().__init__() 12 | 13 | def count(self, link, user_id): 14 | try: 15 | file_id = self.get_id_from_url(link, user_id) 16 | except (KeyError, IndexError): 17 | return ( 18 | "Google Drive ID could not be found in the provided link", 19 | None, 20 | None, 21 | None, 22 | None, 23 | ) 24 | self.service = self.authorize() 25 | LOGGER.info(f"File ID: {file_id}") 26 | try: 27 | return self._proceed_count(file_id) 28 | except Exception as err: 29 | if isinstance(err, RetryError): 30 | LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") 31 | err = err.last_attempt.exception() 32 | err = str(err).replace(">", "").replace("<", "") 33 | if "File not found" in err: 34 | if not self.alt_auth and self.use_sa: 35 | self.alt_auth = True 36 | self.use_sa = False 37 | LOGGER.error("File not found. Trying with token.pickle...") 38 | return self.count(link, user_id) 39 | msg = "File not found." 40 | else: 41 | msg = f"Error.\n{err}" 42 | return msg, None, None, None, None 43 | 44 | def _proceed_count(self, file_id): 45 | meta = self.get_file_metadata(file_id) 46 | name = meta["name"] 47 | LOGGER.info(f"Counting: {name}") 48 | mime_type = meta.get("mimeType") 49 | if mime_type == self.G_DRIVE_DIR_MIME_TYPE: 50 | self._gdrive_directory(meta) 51 | mime_type = "Folder" 52 | else: 53 | if mime_type is None: 54 | mime_type = "File" 55 | self.total_files += 1 56 | self._gdrive_file(meta) 57 | return name, mime_type, self.proc_bytes, self.total_files, self.total_folders 58 | 59 | def _gdrive_file(self, filee): 60 | size = int(filee.get("size", 0)) 61 | self.proc_bytes += size 62 | 63 | def _gdrive_directory(self, drive_folder): 64 | files = self.get_files_by_folder_id(drive_folder["id"]) 65 | if len(files) == 0: 66 | return 67 | for filee in files: 68 | shortcut_details = filee.get("shortcutDetails") 69 | if shortcut_details is not None: 70 | mime_type = shortcut_details["targetMimeType"] 71 | file_id = shortcut_details["targetId"] 72 | filee = self.get_file_metadata(file_id) 73 | else: 74 | mime_type = filee.get("mimeType") 75 | if mime_type == self.G_DRIVE_DIR_MIME_TYPE: 76 | self.total_folders += 1 77 | self._gdrive_directory(filee) 78 | else: 79 | self.total_files += 1 80 | self._gdrive_file(filee) 81 | -------------------------------------------------------------------------------- /bot/modules/help.py: -------------------------------------------------------------------------------- 1 | from ..helper.ext_utils.bot_utils import COMMAND_USAGE, new_task 2 | from ..helper.ext_utils.help_messages import ( 3 | YT_HELP_DICT, 4 | MIRROR_HELP_DICT, 5 | CLONE_HELP_DICT, 6 | ) 7 | from ..helper.telegram_helper.button_build import ButtonMaker 8 | from ..helper.telegram_helper.message_utils import ( 9 | edit_message, 10 | delete_message, 11 | send_message, 12 | ) 13 | from ..helper.ext_utils.help_messages import help_string 14 | 15 | 16 | @new_task 17 | async def arg_usage(_, query): 18 | data = query.data.split() 19 | message = query.message 20 | await query.answer() 21 | if data[1] == "close": 22 | return await delete_message(message, message.reply_to_message) 23 | pg_no = int(data[3]) 24 | if data[1] == "nex": 25 | if data[2] == "mirror": 26 | await edit_message( 27 | message, COMMAND_USAGE["mirror"][0], COMMAND_USAGE["mirror"][pg_no + 1] 28 | ) 29 | elif data[2] == "yt": 30 | await edit_message( 31 | message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][pg_no + 1] 32 | ) 33 | elif data[2] == "clone": 34 | await edit_message( 35 | message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][pg_no + 1] 36 | ) 37 | elif data[1] == "pre": 38 | if data[2] == "mirror": 39 | await edit_message( 40 | message, COMMAND_USAGE["mirror"][0], COMMAND_USAGE["mirror"][pg_no + 1] 41 | ) 42 | elif data[2] == "yt": 43 | await edit_message( 44 | message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][pg_no + 1] 45 | ) 46 | elif data[2] == "clone": 47 | await edit_message( 48 | message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][pg_no + 1] 49 | ) 50 | elif data[1] == "back": 51 | if data[2] == "m": 52 | await edit_message( 53 | message, COMMAND_USAGE["mirror"][0], COMMAND_USAGE["mirror"][pg_no + 1] 54 | ) 55 | elif data[2] == "y": 56 | await edit_message( 57 | message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][pg_no + 1] 58 | ) 59 | elif data[2] == "c": 60 | await edit_message( 61 | message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][pg_no + 1] 62 | ) 63 | elif data[1] == "mirror": 64 | buttons = ButtonMaker() 65 | buttons.data_button("Back", f"help back m {pg_no}") 66 | button = buttons.build_menu() 67 | await edit_message(message, MIRROR_HELP_DICT[data[2]], button) 68 | elif data[1] == "yt": 69 | buttons = ButtonMaker() 70 | buttons.data_button("Back", f"help back y {pg_no}") 71 | button = buttons.build_menu() 72 | await edit_message(message, YT_HELP_DICT[data[2]], button) 73 | elif data[1] == "clone": 74 | buttons = ButtonMaker() 75 | buttons.data_button("Back", f"help back c {pg_no}") 76 | button = buttons.build_menu() 77 | await edit_message(message, CLONE_HELP_DICT[data[2]], button) 78 | 79 | 80 | @new_task 81 | async def bot_help(_, message): 82 | await send_message(message, help_string) 83 | -------------------------------------------------------------------------------- /bot/modules/__init__.py: -------------------------------------------------------------------------------- 1 | from .bot_settings import send_bot_settings, edit_bot_settings 2 | from .cancel_task import cancel, cancel_multi, cancel_all_buttons, cancel_all_update 3 | from .chat_permission import authorize, unauthorize, add_sudo, remove_sudo 4 | from .clone import clone_node 5 | from .exec import aioexecute, execute, clear 6 | from .file_selector import select, confirm_selection 7 | from .force_start import remove_from_queue 8 | from .gd_count import count_node 9 | from .gd_delete import delete_file 10 | from .gd_search import gdrive_search, select_type 11 | from .help import arg_usage, bot_help 12 | from .mediainfo import mediainfo 13 | from .broadcast import broadcast 14 | from .uphoster import uphoster 15 | from .mirror_leech import ( 16 | mirror, 17 | leech, 18 | qb_leech, 19 | qb_mirror, 20 | jd_leech, 21 | jd_mirror, 22 | nzb_leech, 23 | nzb_mirror, 24 | ) 25 | from .restart import ( 26 | restart_bot, 27 | restart_notification, 28 | confirm_restart, 29 | restart_sessions, 30 | ) 31 | from .imdb import imdb_search, imdb_callback 32 | from .rss import get_rss_menu, rss_listener 33 | from .search import torrent_search, torrent_search_update, initiate_search_tools 34 | from .nzb_search import hydra_search 35 | from .services import start, start_cb, login, ping, log, log_cb 36 | from .shell import run_shell 37 | from .stats import bot_stats, stats_pages, get_packages_version 38 | from .status import task_status, status_pages 39 | from .users_settings import get_users_settings, edit_user_settings, send_user_settings 40 | from .ytdlp import ytdl, ytdl_leech 41 | 42 | __all__ = [ 43 | "send_bot_settings", 44 | "edit_bot_settings", 45 | "cancel", 46 | "cancel_multi", 47 | "cancel_all_buttons", 48 | "cancel_all_update", 49 | "authorize", 50 | "unauthorize", 51 | "add_sudo", 52 | "remove_sudo", 53 | "clone_node", 54 | "aioexecute", 55 | "execute", 56 | "hydra_search", 57 | "clear", 58 | "select", 59 | "confirm_selection", 60 | "remove_from_queue", 61 | "count_node", 62 | "delete_file", 63 | "gdrive_search", 64 | "select_type", 65 | "arg_usage", 66 | "uphoster", 67 | "mirror", 68 | "leech", 69 | "qb_leech", 70 | "qb_mirror", 71 | "jd_leech", 72 | "jd_mirror", 73 | "nzb_leech", 74 | "nzb_mirror", 75 | "restart_bot", 76 | "restart_notification", 77 | "confirm_restart", 78 | "restart_sessions", 79 | "imdb_search", 80 | "imdb_callback", 81 | "get_rss_menu", 82 | "rss_listener", 83 | "torrent_search", 84 | "torrent_search_update", 85 | "initiate_search_tools", 86 | "start", 87 | "start_cb", 88 | "login", 89 | "bot_help", 90 | "mediainfo", 91 | "broadcast", 92 | "ping", 93 | "log", 94 | "log_cb", 95 | "run_shell", 96 | "bot_stats", 97 | "stats_pages", 98 | "get_packages_version", 99 | "task_status", 100 | "status_pages", 101 | "get_users_settings", 102 | "edit_user_settings", 103 | "send_user_settings", 104 | "ytdl", 105 | "ytdl_leech", 106 | ] 107 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/telegraph_helper.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep 2 | from secrets import token_hex 3 | from telegraph.aio import Telegraph 4 | from telegraph.exceptions import RetryAfterError 5 | 6 | from ... import LOGGER 7 | from ...core.config_manager import Config 8 | 9 | 10 | class TelegraphHelper: 11 | def __init__(self, author_name=None, author_url=None): 12 | self._telegraph = Telegraph(domain="graph.org") 13 | self._author_name = author_name 14 | self._author_url = author_url 15 | 16 | async def create_account(self): 17 | LOGGER.info("Creating Telegraph Account") 18 | try: 19 | await self._telegraph.create_account( 20 | short_name=token_hex(5), 21 | author_name=self._author_name, 22 | author_url=self._author_url, 23 | ) 24 | except Exception as e: 25 | LOGGER.error(f"Failed to create Telegraph Account: {e}") 26 | 27 | async def create_page(self, title, content): 28 | try: 29 | return await self._telegraph.create_page( 30 | title=title, 31 | author_name=self._author_name, 32 | author_url=self._author_url, 33 | html_content=content, 34 | ) 35 | except RetryAfterError as st: 36 | LOGGER.warning( 37 | f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds." 38 | ) 39 | await sleep(st.retry_after) 40 | return await self.create_page(title, content) 41 | 42 | async def edit_page(self, path, title, content): 43 | try: 44 | return await self._telegraph.edit_page( 45 | path=path, 46 | title=title, 47 | author_name=self._author_name, 48 | author_url=self._author_url, 49 | html_content=content, 50 | ) 51 | except RetryAfterError as st: 52 | LOGGER.warning( 53 | f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds." 54 | ) 55 | await sleep(st.retry_after) 56 | return await self.edit_page(path, title, content) 57 | 58 | async def edit_telegraph(self, path, telegraph_content): 59 | nxt_page = 1 60 | prev_page = 0 61 | num_of_path = len(path) 62 | for content in telegraph_content: 63 | if nxt_page == 1: 64 | content += ( 65 | f'Next' 66 | ) 67 | nxt_page += 1 68 | else: 69 | if prev_page <= num_of_path: 70 | content += f'Prev' 71 | prev_page += 1 72 | if nxt_page < num_of_path: 73 | content += f' | Next' 74 | nxt_page += 1 75 | await self.edit_page( 76 | path=path[prev_page], 77 | title="WZML-X Torrent Search", 78 | content=content, 79 | ) 80 | return 81 | 82 | 83 | telegraph = TelegraphHelper(Config.AUTHOR_NAME, Config.AUTHOR_URL) 84 | 85 | print(__name__) 86 | -------------------------------------------------------------------------------- /sabnzbdapi/requests.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | from httpx import AsyncClient, AsyncHTTPTransport, Timeout 4 | from urllib3 import disable_warnings 5 | from urllib3.exceptions import InsecureRequestWarning 6 | 7 | from .exception import APIConnectionError, APIResponseError 8 | from .job_functions import JobFunctions 9 | 10 | 11 | class SabnzbdSession(AsyncClient): 12 | @wraps(AsyncClient.request) 13 | async def request(self, method: str, url: str, **kwargs): 14 | kwargs.setdefault("timeout", Timeout(connect=30, read=60, write=60, pool=None)) 15 | kwargs.setdefault("follow_redirects", True) 16 | return await super().request(method, url, **kwargs) 17 | 18 | 19 | class SabnzbdClient(JobFunctions): 20 | LOGGED_IN = False 21 | 22 | def __init__( 23 | self, 24 | host: str, 25 | api_key: str, 26 | port: str = "8070", 27 | VERIFY_CERTIFICATE: bool = False, 28 | RETRIES: int = 10, 29 | HTTPX_REQUETS_ARGS: dict = None, 30 | ): 31 | if HTTPX_REQUETS_ARGS is None: 32 | HTTPX_REQUETS_ARGS = {} 33 | self._base_url = f"{host.rstrip('/')}:{port}/sabnzbd/api" 34 | self._default_params = {"apikey": api_key, "output": "json"} 35 | self._VERIFY_CERTIFICATE = VERIFY_CERTIFICATE 36 | self._RETRIES = RETRIES 37 | self._HTTPX_REQUETS_ARGS = HTTPX_REQUETS_ARGS 38 | self._http_session = None 39 | if not self._VERIFY_CERTIFICATE: 40 | disable_warnings(InsecureRequestWarning) 41 | super().__init__() 42 | 43 | def _session(self): 44 | if self._http_session is not None: 45 | return self._http_session 46 | 47 | transport = AsyncHTTPTransport( 48 | retries=self._RETRIES, verify=self._VERIFY_CERTIFICATE 49 | ) 50 | 51 | self._http_session = SabnzbdSession(transport=transport) 52 | self._http_session.verify = self._VERIFY_CERTIFICATE 53 | 54 | return self._http_session 55 | 56 | async def call( 57 | self, 58 | params: dict = None, 59 | api_method: str = "GET", 60 | requests_args: dict = None, 61 | **kwargs, 62 | ): 63 | if requests_args is None: 64 | requests_args = {} 65 | session = self._session() 66 | params |= kwargs 67 | requests_kwargs = {**self._HTTPX_REQUETS_ARGS, **requests_args} 68 | retries = 5 69 | response = None 70 | for retry_count in range(retries): 71 | try: 72 | res = await session.request( 73 | method=api_method, 74 | url=self._base_url, 75 | params={**self._default_params, **params}, 76 | **requests_kwargs, 77 | ) 78 | response = res.json() 79 | break 80 | except APIResponseError as err: 81 | raise APIResponseError( 82 | f"Failed to decode response!: {res.text}" 83 | ) from err 84 | except APIConnectionError as err: 85 | if retry_count >= (retries - 1): 86 | raise err 87 | if response is None: 88 | raise APIConnectionError("Failed to connect to API!") 89 | return response 90 | 91 | async def close(self): 92 | if self._http_session is not None: 93 | await self._http_session.aclose() 94 | self._http_session = None 95 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/metadata_status.py: -------------------------------------------------------------------------------- 1 | from .... import LOGGER 2 | from ...ext_utils.status_utils import ( 3 | get_readable_file_size, 4 | EngineStatus, 5 | MirrorStatus, 6 | get_readable_time, 7 | ) 8 | 9 | 10 | class MetadataStatus: 11 | def __init__(self, listener, obj, gid, status=""): 12 | self.listener = listener 13 | self._obj = obj 14 | self._gid = gid 15 | self._cstatus = status 16 | self.engine = EngineStatus().STATUS_METADATA 17 | 18 | def speed(self): 19 | try: 20 | return ( 21 | f"{get_readable_file_size(self._obj.speed_raw)}/s" 22 | if hasattr(self._obj, "speed_raw") and self._obj.speed_raw 23 | else "0 B/s" 24 | ) 25 | except Exception: 26 | return "0 B/s" 27 | 28 | def processed_bytes(self): 29 | try: 30 | return ( 31 | get_readable_file_size(self._obj.processed_bytes) 32 | if hasattr(self._obj, "processed_bytes") and self._obj.processed_bytes 33 | else "0 B" 34 | ) 35 | except Exception: 36 | return "0 B" 37 | 38 | def progress(self): 39 | try: 40 | if ( 41 | hasattr(self._obj, "progress_raw") 42 | and self._obj.progress_raw is not None 43 | ): 44 | return f"{round(self._obj.progress_raw, 2)}%" 45 | elif hasattr(self._obj, "_progress") and self._obj._progress is not None: 46 | return f"{round(self._obj._progress, 2)}%" 47 | return "0%" 48 | except Exception: 49 | return "0%" 50 | 51 | def gid(self): 52 | return self._gid 53 | 54 | def name(self): 55 | try: 56 | return getattr(self.listener, "subname", None) or self.listener.name 57 | except Exception: 58 | return "Processing..." 59 | 60 | def size(self): 61 | try: 62 | size = getattr(self.listener, "subsize", None) or self.listener.size 63 | return get_readable_file_size(size) if size else "Unknown" 64 | except Exception: 65 | return "Unknown" 66 | 67 | def eta(self): 68 | try: 69 | if ( 70 | hasattr(self._obj, "eta_raw") 71 | and self._obj.eta_raw is not None 72 | and self._obj.eta_raw > 0 73 | ): 74 | return get_readable_time(self._obj.eta_raw) 75 | elif hasattr(self._obj, "_eta") and self._obj._eta != "-": 76 | return self._obj._eta 77 | return "-" 78 | except Exception: 79 | return "-" 80 | 81 | def status(self): 82 | if self._cstatus == "Convert": 83 | return MirrorStatus.STATUS_CONVERT 84 | else: 85 | return MirrorStatus.STATUS_METADATA 86 | 87 | def task(self): 88 | return self 89 | 90 | async def cancel_task(self): 91 | LOGGER.info(f"Cancelling {self._cstatus}: {self.listener.name}") 92 | self.listener.is_cancelled = True 93 | if ( 94 | self.listener.subproc is not None 95 | and self.listener.subproc.returncode is None 96 | ): 97 | try: 98 | self.listener.subproc.kill() 99 | except Exception: 100 | pass 101 | await self.listener.on_upload_error(f"{self._cstatus} stopped by user!") 102 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/shortener_utils.py: -------------------------------------------------------------------------------- 1 | from base64 import b64encode 2 | from random import choice, random 3 | from asyncio import sleep as asleep 4 | from urllib.parse import quote 5 | 6 | from cloudscraper import create_scraper 7 | from urllib3 import disable_warnings 8 | 9 | from ... import LOGGER, shortener_dict 10 | from ...core.config_manager import Config 11 | 12 | 13 | async def short_url(longurl, attempt=0): 14 | if not shortener_dict and not Config.PROTECTED_API: 15 | return longurl 16 | if attempt >= 4: 17 | return longurl 18 | 19 | cget = create_scraper().request 20 | disable_warnings() 21 | try: 22 | if Config.PROTECTED_API: 23 | res = cget("GET", Config.PROTECTED_API, params={"url": longurl}).json() 24 | if res.get("status") == "success": 25 | return res["url"] 26 | raise Exception(f"Protected API Error: {res}") 27 | 28 | _shortener, _shortener_api = choice(list(shortener_dict.items())) 29 | if "shorte.st" in _shortener: 30 | headers = {"public-api-token": _shortener_api} 31 | data = {"urlToShorten": quote(longurl)} 32 | return cget( 33 | "PUT", "https://api.shorte.st/v1/data/url", headers=headers, data=data 34 | ).json()["shortenedUrl"] 35 | elif "linkvertise" in _shortener: 36 | url = quote(b64encode(longurl.encode("utf-8"))) 37 | linkvertise = [ 38 | f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", 39 | f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", 40 | f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", 41 | f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", 42 | ] 43 | return choice(linkvertise) 44 | elif "bitly.com" in _shortener: 45 | headers = {"Authorization": f"Bearer {_shortener_api}"} 46 | return cget( 47 | "POST", 48 | "https://api-ssl.bit.ly/v4/shorten", 49 | json={"long_url": longurl}, 50 | headers=headers, 51 | ).json()["link"] 52 | elif "ouo.io" in _shortener: 53 | return cget( 54 | "GET", f"http://ouo.io/api/{_shortener_api}?s={longurl}", verify=False 55 | ).text 56 | elif "cutt.ly" in _shortener: 57 | return cget( 58 | "GET", 59 | f"http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}", 60 | ).json()["url"]["shortLink"] 61 | else: 62 | res = cget( 63 | "GET", 64 | f"https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}", 65 | ).json() 66 | shorted = res["shortenedUrl"] 67 | if not shorted: 68 | shrtco_res = cget( 69 | "GET", f"https://api.shrtco.de/v2/shorten?url={quote(longurl)}" 70 | ).json() 71 | shrtco_link = shrtco_res["result"]["full_short_link"] 72 | res = cget( 73 | "GET", 74 | f"https://{_shortener}/api?api={_shortener_api}&url={shrtco_link}", 75 | ).json() 76 | shorted = res["shortenedUrl"] 77 | if not shorted: 78 | shorted = longurl 79 | return shorted 80 | except Exception as e: 81 | LOGGER.error(e) 82 | await asleep(0.8) 83 | attempt += 1 84 | return await short_url(longurl, attempt) 85 | -------------------------------------------------------------------------------- /plugins/speedtest_plugin.py: -------------------------------------------------------------------------------- 1 | from speedtest import Speedtest, ConfigRetrievalError 2 | 3 | from pyrogram import Client 4 | from pyrogram.filters import command 5 | from pyrogram.handlers import MessageHandler 6 | from pyrogram.types import Message 7 | 8 | from bot.core.plugin_manager import PluginBase, PluginInfo 9 | from bot.helper.ext_utils.bot_utils import new_task, sync_to_async 10 | from bot.helper.ext_utils.status_utils import get_readable_file_size 11 | from bot.helper.telegram_helper.filters import CustomFilters 12 | from bot.helper.telegram_helper.message_utils import ( 13 | send_message, 14 | edit_message, 15 | delete_message, 16 | ) 17 | 18 | 19 | class SpeedtestPlugin(PluginBase): 20 | PLUGIN_INFO = PluginInfo( 21 | name="speedtest_plugin", 22 | version="1.0.0", 23 | author="WZML-X", 24 | description="Speedtest plugin for testing internet speed", 25 | enabled=True, 26 | handlers=[], 27 | commands=["speedtest"], 28 | dependencies=[], 29 | ) 30 | 31 | async def on_load(self) -> bool: 32 | from bot import LOGGER 33 | 34 | LOGGER.info("Speedtest plugin loaded") 35 | return True 36 | 37 | async def on_unload(self) -> bool: 38 | from bot import LOGGER 39 | 40 | LOGGER.info("Speedtest plugin unloaded") 41 | return True 42 | 43 | async def on_enable(self) -> bool: 44 | from bot import LOGGER 45 | 46 | LOGGER.info("Speedtest plugin enabled") 47 | return True 48 | 49 | async def on_disable(self) -> bool: 50 | from bot import LOGGER 51 | 52 | LOGGER.info("Speedtest plugin disabled") 53 | return True 54 | 55 | 56 | @new_task 57 | async def speedtest_command(client: Client, message: Message): 58 | speed = await send_message(message, "Initiating Speedtest...") 59 | try: 60 | speed_results = await sync_to_async(Speedtest) 61 | await sync_to_async(speed_results.get_best_server) 62 | await sync_to_async(speed_results.download) 63 | await sync_to_async(speed_results.upload) 64 | except ConfigRetrievalError: 65 | await edit_message( 66 | speed, 67 | "ERROR: Can't connect to Server at the Moment, Try Again Later !", 68 | ) 69 | return 70 | speed_results.results.share() 71 | result = speed_results.results.dict() 72 | string_speed = f""" 73 | ➲ SPEEDTEST INFO 74 | ┠ Upload: {get_readable_file_size(result['upload'] / 8)}/s 75 | ┠ Download: {get_readable_file_size(result['download'] / 8)}/s 76 | ┠ Ping: {result['ping']} ms 77 | ┠ Time: {result['timestamp']} 78 | ┠ Data Sent: {get_readable_file_size(int(result['bytes_sent']))} 79 | ┖ Data Received: {get_readable_file_size(int(result['bytes_received']))} 80 | 81 | ➲ SPEEDTEST SERVER 82 | ┠ Name: {result['server']['name']} 83 | ┠ Country: {result['server']['country']}, {result['server']['cc']} 84 | ┠ Sponsor: {result['server']['sponsor']} 85 | ┠ Latency: {result['server']['latency']} 86 | ┠ Latitude: {result['server']['lat']} 87 | ┖ Longitude: {result['server']['lon']} 88 | """ 89 | try: 90 | await send_message(message, string_speed, photo=result["share"]) 91 | await delete_message(speed) 92 | except Exception as e: 93 | from bot import LOGGER 94 | 95 | LOGGER.error(str(e)) 96 | await edit_message(speed, string_speed) 97 | -------------------------------------------------------------------------------- /bot/helper/listeners/direct_listener.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep, TimeoutError 2 | from aiohttp.client_exceptions import ClientError 3 | 4 | from ... import LOGGER 5 | from ...core.torrent_manager import TorrentManager, aria2_name 6 | 7 | 8 | class DirectListener: 9 | def __init__(self, path, listener, a2c_opt): 10 | self.listener = listener 11 | self._path = path 12 | self._a2c_opt = a2c_opt 13 | self._proc_bytes = 0 14 | self._failed = 0 15 | self.download_task = None 16 | self.name = self.listener.name 17 | 18 | @property 19 | def processed_bytes(self): 20 | if self.download_task: 21 | return self._proc_bytes + int( 22 | self.download_task.get("completedLength", "0") 23 | ) 24 | return self._proc_bytes 25 | 26 | @property 27 | def speed(self): 28 | return ( 29 | int(self.download_task.get("downloadSpeed", "0")) 30 | if self.download_task 31 | else 0 32 | ) 33 | 34 | async def download(self, contents): 35 | self.is_downloading = True 36 | for content in contents: 37 | if self.listener.is_cancelled: 38 | break 39 | if content["path"]: 40 | self._a2c_opt["dir"] = f"{self._path}/{content['path']}" 41 | else: 42 | self._a2c_opt["dir"] = self._path 43 | filename = content["filename"] 44 | self._a2c_opt["out"] = filename 45 | try: 46 | gid = await TorrentManager.aria2.addUri( 47 | uris=[content["url"]], options=self._a2c_opt, position=0 48 | ) 49 | except (TimeoutError, ClientError, Exception) as e: 50 | self._failed += 1 51 | LOGGER.error(f"Unable to download {filename} due to: {e}") 52 | continue 53 | self.download_task = await TorrentManager.aria2.tellStatus(gid) 54 | while True: 55 | if self.listener.is_cancelled: 56 | if self.download_task: 57 | await TorrentManager.aria2_remove(self.download_task) 58 | break 59 | self.download_task = await TorrentManager.aria2.tellStatus(gid) 60 | if error_message := self.download_task.get("errorMessage"): 61 | self._failed += 1 62 | LOGGER.error( 63 | f"Unable to download {aria2_name(self.download_task)} due to: {error_message}" 64 | ) 65 | await TorrentManager.aria2_remove(self.download_task) 66 | break 67 | elif self.download_task.get("status", "") == "complete": 68 | self._proc_bytes += int(self.download_task.get("totalLength", "0")) 69 | await TorrentManager.aria2_remove(self.download_task) 70 | break 71 | await sleep(1) 72 | self.download_task = None 73 | if self.listener.is_cancelled: 74 | return 75 | if self._failed == len(contents): 76 | await self.listener.on_download_error("All files are failed to download!") 77 | return 78 | await self.listener.on_download_complete() 79 | return 80 | 81 | async def cancel_task(self): 82 | self.listener.is_cancelled = True 83 | LOGGER.info(f"Cancelling Download: {self.listener.name}") 84 | await self.listener.on_download_error("Download Cancelled by User!") 85 | if self.download_task: 86 | await TorrentManager.aria2_remove(self.download_task) 87 | -------------------------------------------------------------------------------- /bot/helper/listeners/jdownloader_listener.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep 2 | 3 | from ... import intervals, jd_listener_lock, jd_downloads 4 | from ..ext_utils.bot_utils import new_task 5 | from ...core.jdownloader_booter import jdownloader 6 | from ..ext_utils.status_utils import get_task_by_gid 7 | 8 | 9 | @new_task 10 | async def remove_download(gid): 11 | if intervals["stopAll"]: 12 | return 13 | await jdownloader.device.downloads.remove_links( 14 | package_ids=jd_downloads[gid]["ids"] 15 | ) 16 | if task := await get_task_by_gid(gid): 17 | await task.listener.on_download_error("Download removed manually!") 18 | async with jd_listener_lock: 19 | del jd_downloads[gid] 20 | 21 | 22 | @new_task 23 | async def _on_download_complete(gid): 24 | if task := await get_task_by_gid(gid): 25 | if task.listener.select: 26 | async with jd_listener_lock: 27 | await jdownloader.device.downloads.cleanup( 28 | "DELETE_DISABLED", 29 | "REMOVE_LINKS_AND_DELETE_FILES", 30 | "ALL", 31 | package_ids=jd_downloads[gid]["ids"], 32 | ) 33 | await task.listener.on_download_complete() 34 | if intervals["stopAll"]: 35 | return 36 | async with jd_listener_lock: 37 | if gid in jd_downloads: 38 | await jdownloader.device.downloads.remove_links( 39 | package_ids=jd_downloads[gid]["ids"], 40 | ) 41 | del jd_downloads[gid] 42 | 43 | 44 | @new_task 45 | async def _jd_listener(): 46 | while True: 47 | await sleep(3) 48 | async with jd_listener_lock: 49 | if len(jd_downloads) == 0: 50 | intervals["jd"] = "" 51 | break 52 | try: 53 | packages = await jdownloader.device.downloads.query_packages( 54 | [{"finished": True, "saveTo": True}] 55 | ) 56 | except Exception: 57 | continue 58 | 59 | all_packages = {pack["uuid"]: pack for pack in packages} 60 | for d_gid, d_dict in list(jd_downloads.items()): 61 | if d_dict["status"] == "down": 62 | for index, pid in enumerate(d_dict["ids"]): 63 | if pid not in all_packages: 64 | del jd_downloads[d_gid]["ids"][index] 65 | if len(jd_downloads[d_gid]["ids"]) == 0: 66 | path = jd_downloads[d_gid]["path"] 67 | jd_downloads[d_gid]["ids"] = [ 68 | uid 69 | for uid, pk in all_packages.items() 70 | if pk["saveTo"].startswith(path) 71 | ] 72 | if len(jd_downloads[d_gid]["ids"]) == 0: 73 | await remove_download(d_gid) 74 | 75 | if completed_packages := [ 76 | pack["uuid"] for pack in packages if pack.get("finished", False) 77 | ]: 78 | for d_gid, d_dict in list(jd_downloads.items()): 79 | if d_dict["status"] == "down": 80 | is_finished = all( 81 | did in completed_packages for did in d_dict["ids"] 82 | ) 83 | if is_finished: 84 | jd_downloads[d_gid]["status"] = "done" 85 | await _on_download_complete(d_gid) 86 | 87 | 88 | async def on_download_start(): 89 | async with jd_listener_lock: 90 | if not intervals["jd"]: 91 | intervals["jd"] = await _jd_listener() 92 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/bot_commands.py: -------------------------------------------------------------------------------- 1 | from ...core.config_manager import Config 2 | from ...core.plugin_manager import get_plugin_manager 3 | 4 | 5 | class BotCommands: 6 | StartCommand = "start" 7 | LoginCommand = "login" 8 | 9 | _static_commands = { 10 | "Mirror": ["mirror", "m"], 11 | "QbMirror": ["qbmirror", "qm"], 12 | "JdMirror": ["jdmirror", "jm"], 13 | "Ytdl": ["ytdl", "y"], 14 | "UpHoster": ["uphoster", "up"], 15 | "NzbMirror": ["nzbmirror", "nm"], 16 | "Leech": ["leech", "l"], 17 | "QbLeech": ["qbleech", "ql"], 18 | "JdLeech": ["jdleech", "jl"], 19 | "YtdlLeech": ["ytdlleech", "yl"], 20 | "NzbLeech": ["nzbleech", "nl"], 21 | "Clone": ["clone", "cl"], 22 | "Count": "count", 23 | "Delete": "del", 24 | "List": "list", 25 | "Search": "search", 26 | "Users": "users", 27 | "CancelTask": ["cancel", "c"], 28 | "CancelAll": ["cancelall", "call"], 29 | "ForceStart": ["forcestart", "fs"], 30 | "Status": ["status", "s", "statusall"], 31 | "MediaInfo": ["mediainfo", "mi"], 32 | "Ping": "ping", 33 | "Restart": ["restart", "r", "restartall"], 34 | "RestartSessions": ["restartses", "rses"], 35 | "Broadcast": ["broadcast", "bc"], 36 | "Stats": ["stats", "st"], 37 | "Help": ["help", "h"], 38 | "Log": "log", 39 | "Shell": "shell", 40 | "AExec": "aexec", 41 | "Exec": "exec", 42 | "ClearLocals": "clearlocals", 43 | "IMDB": "imdb", 44 | "Rss": "rss", 45 | "Authorize": ["authorize", "a"], 46 | "UnAuthorize": ["unauthorize", "ua"], 47 | "AddSudo": ["addsudo", "as"], 48 | "RmSudo": ["rmsudo", "rs"], 49 | "BotSet": ["bsetting", "bs"], 50 | "UserSet": ["usetting", "us"], 51 | "Select": ["select", "sel"], 52 | "NzbSearch": ["nzbsearch", "ns"], 53 | "Plugins": "plugins", 54 | } 55 | 56 | @classmethod 57 | def get_commands(cls): 58 | commands = cls._static_commands.copy() 59 | 60 | plugin_manager = get_plugin_manager() 61 | if plugin_manager: 62 | for plugin_info in plugin_manager.list_plugins(): 63 | if plugin_info.enabled and plugin_info.commands: 64 | for cmd in plugin_info.commands: 65 | if cmd == "speedtest": 66 | commands["SpeedTest"] = ["speedtest", "stest"] 67 | elif cmd == "stest": 68 | if "SpeedTest" not in commands: 69 | commands["SpeedTest"] = ["speedtest", "stest"] 70 | elif "stest" not in commands["SpeedTest"]: 71 | commands["SpeedTest"].append("stest") 72 | 73 | return commands 74 | 75 | @classmethod 76 | def _build_command_vars(cls): 77 | commands = cls.get_commands() 78 | 79 | for key, cmds in commands.items(): 80 | setattr( 81 | cls, 82 | f"{key}Command", 83 | ( 84 | [ 85 | ( 86 | f"{cmd}{Config.CMD_SUFFIX}" 87 | if cmd not in ["restartall", "statusall"] 88 | else cmd 89 | ) 90 | for cmd in cmds 91 | ] 92 | if isinstance(cmds, list) 93 | else f"{cmds}{Config.CMD_SUFFIX}" 94 | ), 95 | ) 96 | 97 | @classmethod 98 | def refresh_commands(cls): 99 | cls._build_command_vars() 100 | 101 | 102 | BotCommands._build_command_vars() 103 | -------------------------------------------------------------------------------- /bot/modules/force_start.py: -------------------------------------------------------------------------------- 1 | from .. import ( 2 | task_dict, 3 | task_dict_lock, 4 | user_data, 5 | queued_up, 6 | queued_dl, 7 | queue_dict_lock, 8 | ) 9 | from ..core.config_manager import Config 10 | from ..helper.ext_utils.bot_utils import new_task 11 | from ..helper.ext_utils.status_utils import get_task_by_gid 12 | from ..helper.telegram_helper.bot_commands import BotCommands 13 | from ..helper.telegram_helper.message_utils import send_message 14 | from ..helper.ext_utils.task_manager import start_dl_from_queued, start_up_from_queued 15 | 16 | 17 | @new_task 18 | async def remove_from_queue(_, message): 19 | user_id = (message.from_user or message.sender_chat).id 20 | msg = message.text.split() 21 | status = msg[1] if len(msg) > 1 and msg[1] in ["fd", "fu"] else "" 22 | if status and len(msg) > 2 or not status and len(msg) > 1: 23 | gid = msg[2] if status else msg[1] 24 | task = await get_task_by_gid(gid) 25 | if task is None: 26 | await send_message(message, f"GID: {gid} Not Found.") 27 | return 28 | elif reply_to_id := message.reply_to_message_id: 29 | async with task_dict_lock: 30 | task = task_dict.get(reply_to_id) 31 | if task is None: 32 | await send_message(message, "This is not an active task!") 33 | return 34 | elif len(msg) in {1, 2}: 35 | msg = f"""Reply to an active Command message which was used to start the download/upload. 36 | /{BotCommands.ForceStartCommand[0]} fd (to remove it from download queue) or fu (to remove it from upload queue) or nothing to start remove it from both download and upload queue. 37 | Also send /{BotCommands.ForceStartCommand[0]} GID fu|fd or obly gid to force start by removeing the task rom queue! 38 | Examples: 39 | /{BotCommands.ForceStartCommand[1]} GID fu (force upload) 40 | /{BotCommands.ForceStartCommand[1]} GID (force download and upload) 41 | By reply to task cmd: 42 | /{BotCommands.ForceStartCommand[1]} (force download and upload) 43 | /{BotCommands.ForceStartCommand[1]} fd (force download) 44 | """ 45 | await send_message(message, msg) 46 | return 47 | if ( 48 | Config.OWNER_ID != user_id 49 | and task.listener.user_id != user_id 50 | and (user_id not in user_data or not user_data[user_id].get("SUDO")) 51 | ): 52 | await send_message(message, "This task is not for you!") 53 | return 54 | listener = task.listener 55 | msg = "" 56 | async with queue_dict_lock: 57 | if status == "fu": 58 | listener.force_upload = True 59 | if listener.mid in queued_up: 60 | await start_up_from_queued(listener.mid) 61 | msg = "Task have been force started to upload!" 62 | else: 63 | msg = "Force upload enabled for this task!" 64 | elif status == "fd": 65 | listener.force_download = True 66 | if listener.mid in queued_dl: 67 | await start_dl_from_queued(listener.mid) 68 | msg = "Task have been force started to download only!" 69 | else: 70 | msg = "This task not in download queue!" 71 | else: 72 | listener.force_download = True 73 | listener.force_upload = True 74 | if listener.mid in queued_up: 75 | await start_up_from_queued(listener.mid) 76 | msg = "Task have been force started to upload!" 77 | elif listener.mid in queued_dl: 78 | await start_dl_from_queued(listener.mid) 79 | msg = "Task have been force started to download and upload will start once download finish!" 80 | else: 81 | msg = "This task not in queue!" 82 | if msg: 83 | await send_message(message, msg) 84 | -------------------------------------------------------------------------------- /bot/modules/exec.py: -------------------------------------------------------------------------------- 1 | from aiofiles import open as aiopen 2 | from contextlib import redirect_stdout, suppress 3 | from io import StringIO, BytesIO 4 | from os import path as ospath, getcwd, chdir 5 | from textwrap import indent 6 | from traceback import format_exc 7 | from re import match 8 | 9 | from .. import LOGGER 10 | from ..core.tg_client import TgClient 11 | from ..helper.ext_utils.bot_utils import sync_to_async, new_task 12 | from ..helper.telegram_helper.message_utils import send_file, send_message 13 | 14 | namespaces = {} 15 | 16 | 17 | def namespace_of(message): 18 | if message.chat.id not in namespaces: 19 | namespaces[message.chat.id] = { 20 | "__name__": "__main__", 21 | "__file__": "", 22 | "__builtins__": globals()["__builtins__"], 23 | "bot": TgClient.bot, 24 | "message": message, 25 | "user": message.from_user or message.sender_chat, 26 | "chat": message.chat, 27 | } 28 | 29 | return namespaces[message.chat.id] 30 | 31 | 32 | def log_input(message): 33 | LOGGER.info( 34 | f"IN: {message.text} (user={(message.from_user or message.sender_chat).id}, chat={message.chat.id})" 35 | ) 36 | 37 | 38 | async def send(msg, message): 39 | if len(str(msg)) > 2000: 40 | with BytesIO(str.encode(msg)) as out_file: 41 | out_file.name = "output.txt" 42 | await send_file(message, out_file) 43 | else: 44 | LOGGER.info(f"OUT: '{msg}'") 45 | if not msg or msg == "\n": 46 | msg = "MessageEmpty" 47 | elif not bool(match(r"<(spoiler|b|i|code|s|u|/a)>", msg)): 48 | msg = f"{msg}" 49 | await send_message(message, msg) 50 | 51 | 52 | @new_task 53 | async def aioexecute(_, message): 54 | await send(await do("aexec", message), message) 55 | 56 | 57 | @new_task 58 | async def execute(_, message): 59 | await send(await do("exec", message), message) 60 | 61 | 62 | def cleanup_code(code): 63 | if code.startswith("```") and code.endswith("```"): 64 | return "\n".join(code.split("\n")[1:-1]) 65 | return code.strip("` \n") 66 | 67 | 68 | async def do(func, message): 69 | log_input(message) 70 | content = message.text.split(maxsplit=1)[-1] 71 | body = cleanup_code(content) 72 | env = namespace_of(message) 73 | 74 | chdir(getcwd()) 75 | async with aiopen(ospath.join(getcwd(), "bot/modules/temp.txt"), "w") as temp: 76 | await temp.write(body) 77 | 78 | stdout = StringIO() 79 | 80 | try: 81 | if func == "exec": 82 | exec(f"def func():\n{indent(body, ' ')}", env) 83 | else: 84 | exec(f"async def func():\n{indent(body, ' ')}", env) 85 | except Exception as e: 86 | return f"{e.__class__.__name__}: {e}" 87 | 88 | rfunc = env["func"] 89 | 90 | try: 91 | with redirect_stdout(stdout): 92 | func_return = ( 93 | await sync_to_async(rfunc) if func == "exec" else await rfunc() 94 | ) 95 | except Exception: 96 | value = stdout.getvalue() 97 | return f"{value}{format_exc()}" 98 | else: 99 | value = stdout.getvalue() 100 | result = None 101 | if func_return is None: 102 | if value: 103 | result = f"{value}" 104 | else: 105 | with suppress(Exception): 106 | result = f"{repr(await sync_to_async(eval, body, env))}" 107 | else: 108 | result = f"{value}{func_return}" 109 | if result: 110 | return result 111 | 112 | 113 | @new_task 114 | async def clear(_, message): 115 | log_input(message) 116 | global namespaces 117 | if message.chat.id in namespaces: 118 | del namespaces[message.chat.id] 119 | await send("Locals Cleared.", message) 120 | -------------------------------------------------------------------------------- /bot/core/jdownloader_booter.py: -------------------------------------------------------------------------------- 1 | from json import dumps 2 | from random import randint 3 | from re import match 4 | 5 | from aiofiles import open as aiopen 6 | from aiofiles.os import listdir, makedirs, path, rename 7 | from aioshutil import rmtree 8 | 9 | from myjd import MyJdApi 10 | 11 | from .. import LOGGER 12 | from ..helper.ext_utils.bot_utils import cmd_exec, new_task 13 | from .config_manager import Config 14 | from .tg_client import TgClient 15 | 16 | 17 | class JDownloader(MyJdApi): 18 | def __init__(self): 19 | super().__init__() 20 | self._username = "" 21 | self._password = "" 22 | self._device_name = "" 23 | self.is_connected = False 24 | self.error = "JDownloader Credentials not provided!" 25 | 26 | async def _write_config(self, path, data): 27 | async with aiopen(path, "w") as f: 28 | await f.write(dumps(data)) 29 | 30 | @new_task 31 | async def boot(self): 32 | await cmd_exec(["pkill", "-9", "-f", "java"]) 33 | if not Config.JD_EMAIL or not Config.JD_PASS: 34 | self.is_connected = False 35 | self.error = "JDownloader Credentials not provided!" 36 | return 37 | self.error = "Connecting... Try agin after couple of seconds" 38 | self._device_name = f"{randint(0, 1000)}@{TgClient.BNAME}" 39 | if await path.exists("/JDownloader/logs"): 40 | LOGGER.info( 41 | "Starting JDownloader... This might take up to 10 sec and might restart once if update available!" 42 | ) 43 | else: 44 | LOGGER.info( 45 | "Starting JDownloader... This might take up to 8 sec and might restart once after build!" 46 | ) 47 | jdata = { 48 | "autoconnectenabledv2": True, 49 | "password": Config.JD_PASS, 50 | "devicename": f"{self._device_name}", 51 | "email": Config.JD_EMAIL, 52 | } 53 | remote_data = { 54 | "localapiserverheaderaccesscontrollalloworigin": "", 55 | "deprecatedapiport": 3128, 56 | "localapiserverheaderxcontenttypeoptions": "nosniff", 57 | "localapiserverheaderxframeoptions": "DENY", 58 | "externinterfaceenabled": True, 59 | "deprecatedapilocalhostonly": True, 60 | "localapiserverheaderreferrerpolicy": "no-referrer", 61 | "deprecatedapienabled": True, 62 | "localapiserverheadercontentsecuritypolicy": "default-src 'self'", 63 | "jdanywhereapienabled": True, 64 | "externinterfacelocalhostonly": False, 65 | "localapiserverheaderxxssprotection": "1; mode=block", 66 | } 67 | await makedirs("/JDownloader/cfg", exist_ok=True) 68 | await self._write_config( 69 | "/JDownloader/cfg/org.jdownloader.api.myjdownloader.MyJDownloaderSettings.json", 70 | jdata, 71 | ) 72 | await self._write_config( 73 | "/JDownloader/cfg/org.jdownloader.api.RemoteAPIConfig.json", 74 | remote_data, 75 | ) 76 | if not await path.exists("/JDownloader/JDownloader.jar"): 77 | pattern = r"JDownloader\.jar\.backup.\d$" 78 | for filename in await listdir("/JDownloader"): 79 | if match(pattern, filename): 80 | await rename( 81 | f"/JDownloader/{filename}", "/JDownloader/JDownloader.jar" 82 | ) 83 | break 84 | await rmtree("/JDownloader/update") 85 | await rmtree("/JDownloader/tmp") 86 | cmd = "cpulimit -l 20 -- java -Xms256m -Xmx500m -Dsun.jnu.encoding=UTF-8 -Dfile.encoding=UTF-8 -Djava.awt.headless=true -jar /JDownloader/JDownloader.jar" 87 | self.is_connected = True 88 | _, __, code = await cmd_exec(cmd, shell=True) 89 | self.is_connected = False 90 | if code != -9: 91 | await self.boot() 92 | 93 | 94 | jdownloader = JDownloader() 95 | -------------------------------------------------------------------------------- /bot/modules/gd_search.py: -------------------------------------------------------------------------------- 1 | from .. import LOGGER, user_data 2 | from ..helper.ext_utils.bot_utils import ( 3 | sync_to_async, 4 | get_telegraph_list, 5 | new_task, 6 | ) 7 | from ..helper.mirror_leech_utils.gdrive_utils.search import GoogleDriveSearch 8 | from ..helper.telegram_helper.button_build import ButtonMaker 9 | from ..helper.telegram_helper.message_utils import send_message, edit_message 10 | 11 | 12 | async def list_buttons(user_id, is_recursive=True, user_token=False): 13 | buttons = ButtonMaker() 14 | buttons.data_button( 15 | f"{'✅️' if user_token else '❌️'} User Token", 16 | f"list_types {user_id} ut {is_recursive} {user_token}", 17 | "header", 18 | ) 19 | buttons.data_button( 20 | f"{'✅️' if is_recursive else '❌️'} Recursive", 21 | f"list_types {user_id} rec {is_recursive} {user_token}", 22 | "header", 23 | ) 24 | buttons.data_button( 25 | "Folders", f"list_types {user_id} folders {is_recursive} {user_token}" 26 | ) 27 | buttons.data_button( 28 | "Files", f"list_types {user_id} files {is_recursive} {user_token}" 29 | ) 30 | buttons.data_button( 31 | "Both", f"list_types {user_id} both {is_recursive} {user_token}" 32 | ) 33 | 34 | buttons.data_button("Cancel", f"list_types {user_id} cancel", "footer") 35 | return buttons.build_menu(2) 36 | 37 | 38 | async def _list_drive(key, message, item_type, is_recursive, user_token, user_id): 39 | LOGGER.info(f"GD Listing: {key}") 40 | if user_token: 41 | user_dict = user_data.get(user_id, {}) 42 | target_id = user_dict.get("GDRIVE_ID", "") or "" 43 | LOGGER.info(target_id) 44 | else: 45 | target_id = "" 46 | telegraph_content, contents_no = await sync_to_async( 47 | GoogleDriveSearch(is_recursive=is_recursive, item_type=item_type).drive_list, 48 | key, 49 | target_id, 50 | user_id, 51 | ) 52 | if telegraph_content: 53 | try: 54 | button = await get_telegraph_list(telegraph_content) 55 | except Exception as e: 56 | await edit_message(message, e) 57 | return 58 | msg = f"Found {contents_no} result for {key}" 59 | await edit_message(message, msg, button) 60 | else: 61 | await edit_message(message, f"No result found for {key}") 62 | 63 | 64 | @new_task 65 | async def select_type(_, query): 66 | user_id = query.from_user.id 67 | message = query.message 68 | key = message.reply_to_message.text.split(maxsplit=1)[1].strip() 69 | data = query.data.split() 70 | if user_id != int(data[1]): 71 | return await query.answer(text="Not Yours!", show_alert=True) 72 | elif data[2] == "rec": 73 | await query.answer() 74 | is_recursive = not bool(eval(data[3])) 75 | buttons = await list_buttons(user_id, is_recursive, eval(data[4])) 76 | return await edit_message(message, "Choose list options:", buttons) 77 | elif data[2] == "ut": 78 | await query.answer() 79 | user_token = not bool(eval(data[4])) 80 | buttons = await list_buttons(user_id, eval(data[3]), user_token) 81 | return await edit_message(message, "Choose list options:", buttons) 82 | elif data[2] == "cancel": 83 | await query.answer() 84 | return await edit_message(message, "List has been canceled!") 85 | await query.answer() 86 | item_type = data[2] 87 | is_recursive = eval(data[3]) 88 | user_token = eval(data[4]) 89 | await edit_message(message, f"Searching.. for {key}") 90 | await _list_drive(key, message, item_type, is_recursive, user_token, user_id) 91 | 92 | 93 | @new_task 94 | async def gdrive_search(_, message): 95 | if len(message.text.split()) == 1: 96 | return await send_message( 97 | message, "Send a search query along with list command" 98 | ) 99 | user_id = message.from_user.id 100 | buttons = await list_buttons(user_id) 101 | await send_message(message, "Choose list options:", buttons) 102 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/uphoster_utils/multi_upload.py: -------------------------------------------------------------------------------- 1 | from asyncio import gather 2 | from logging import getLogger 3 | 4 | from bot.helper.mirror_leech_utils.uphoster_utils.gofile_utils.upload import ( 5 | GoFileUpload, 6 | ) 7 | from bot.helper.mirror_leech_utils.uphoster_utils.buzzheavier_utils.upload import ( 8 | BuzzHeavierUpload, 9 | ) 10 | from bot.helper.mirror_leech_utils.uphoster_utils.pixeldrain_utils.upload import ( 11 | PixelDrainUpload, 12 | ) 13 | 14 | LOGGER = getLogger(__name__) 15 | 16 | 17 | class MultiUphosterUpload: 18 | def __init__(self, listener, path, services): 19 | self.listener = listener 20 | self.path = path 21 | self.services = services 22 | self.uploaders = [] 23 | self._processed_bytes = 0 24 | self._speed = 0 25 | self.is_cancelled = False 26 | self.results = {} 27 | self.failed = [] 28 | 29 | for service in services: 30 | if service == "gofile": 31 | self.uploaders.append(GoFileUpload(ProxyListener(self, "gofile"), path)) 32 | elif service == "buzzheavier": 33 | self.uploaders.append( 34 | BuzzHeavierUpload(ProxyListener(self, "buzzheavier"), path) 35 | ) 36 | elif service == "pixeldrain": 37 | self.uploaders.append( 38 | PixelDrainUpload(ProxyListener(self, "pixeldrain"), path) 39 | ) 40 | 41 | @property 42 | def speed(self): 43 | return sum(u.speed for u in self.uploaders) 44 | 45 | @property 46 | def processed_bytes(self): 47 | if not self.uploaders: 48 | return 0 49 | return sum(u.processed_bytes for u in self.uploaders) / len(self.uploaders) 50 | 51 | async def upload(self): 52 | tasks = [u.upload() for u in self.uploaders] 53 | await gather(*tasks) 54 | 55 | async def cancel_task(self): 56 | self.is_cancelled = True 57 | tasks = [u.cancel_task() for u in self.uploaders] 58 | await gather(*tasks) 59 | 60 | async def on_upload_complete( 61 | self, service, link, files, folders, mime_type, dir_id 62 | ): 63 | self.results[service] = { 64 | "link": link, 65 | "files": files, 66 | "folders": folders, 67 | "mime_type": mime_type, 68 | "dir_id": dir_id, 69 | } 70 | await self._check_completion() 71 | 72 | async def on_upload_error(self, service, error): 73 | LOGGER.error(f"Upload failed for {service}: {error}") 74 | self.failed.append(service) 75 | self.results[service] = {"error": error} 76 | await self._check_completion() 77 | 78 | async def _check_completion(self): 79 | if len(self.results) == len(self.uploaders): 80 | if len(self.failed) == len(self.uploaders): 81 | await self.listener.on_upload_error("All uploads failed.") 82 | else: 83 | successful_result = next( 84 | v for k, v in self.results.items() if "error" not in v 85 | ) 86 | await self.listener.on_upload_complete( 87 | self.results, 88 | successful_result["files"], 89 | successful_result["folders"], 90 | successful_result["mime_type"], 91 | successful_result["dir_id"], 92 | ) 93 | 94 | 95 | class ProxyListener: 96 | def __init__(self, multi_uploader, service): 97 | self.multi_uploader = multi_uploader 98 | self.service = service 99 | self.is_cancelled = False 100 | 101 | def __getattr__(self, name): 102 | return getattr(self.multi_uploader.listener, name) 103 | 104 | async def on_upload_complete(self, link, files, folders, mime_type, dir_id=""): 105 | await self.multi_uploader.on_upload_complete( 106 | self.service, link, files, folders, mime_type, dir_id 107 | ) 108 | 109 | async def on_upload_error(self, error): 110 | await self.multi_uploader.on_upload_error(self.service, error) 111 | -------------------------------------------------------------------------------- /bot/__main__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E402 2 | 3 | from .core.config_manager import Config 4 | 5 | Config.load() 6 | 7 | from datetime import datetime 8 | from logging import Formatter 9 | from time import localtime 10 | 11 | from pytz import timezone 12 | 13 | from . import LOGGER, bot_loop 14 | from .core.tg_client import TgClient 15 | 16 | 17 | async def main(): 18 | from asyncio import gather 19 | 20 | from .core.startup import ( 21 | load_configurations, 22 | load_settings, 23 | save_settings, 24 | update_aria2_options, 25 | update_nzb_options, 26 | update_qb_options, 27 | update_variables, 28 | ) 29 | 30 | await load_settings() 31 | 32 | try: 33 | tz = timezone(Config.TIMEZONE) 34 | except Exception: 35 | from pytz import utc 36 | 37 | tz = utc 38 | 39 | def changetz(*args): 40 | try: 41 | return datetime.now(tz).timetuple() 42 | except Exception: 43 | return localtime() 44 | 45 | Formatter.converter = changetz 46 | 47 | await gather( 48 | TgClient.start_bot(), TgClient.start_user(), TgClient.start_helper_bots() 49 | ) 50 | await gather(load_configurations(), update_variables()) 51 | 52 | from .core.torrent_manager import TorrentManager 53 | 54 | await TorrentManager.initiate() 55 | await gather( 56 | update_qb_options(), 57 | update_aria2_options(), 58 | update_nzb_options(), 59 | ) 60 | from .core.jdownloader_booter import jdownloader 61 | from .helper.ext_utils.files_utils import clean_all 62 | from .helper.ext_utils.telegraph_helper import telegraph 63 | from .helper.mirror_leech_utils.rclone_utils.serve import rclone_serve_booter 64 | from .modules import ( 65 | get_packages_version, 66 | initiate_search_tools, 67 | restart_notification, 68 | ) 69 | 70 | await gather( 71 | save_settings(), 72 | jdownloader.boot(), 73 | clean_all(), 74 | initiate_search_tools(), 75 | get_packages_version(), 76 | restart_notification(), 77 | telegraph.create_account(), 78 | rclone_serve_booter(), 79 | ) 80 | 81 | 82 | bot_loop.run_until_complete(main()) 83 | 84 | from .core.handlers import add_handlers 85 | from .helper.ext_utils.bot_utils import create_help_buttons 86 | from .helper.listeners.aria2_listener import add_aria2_callbacks 87 | 88 | add_aria2_callbacks() 89 | create_help_buttons() 90 | add_handlers() 91 | 92 | from .core.plugin_manager import get_plugin_manager 93 | from .modules.plugin_manager import register_plugin_commands 94 | 95 | plugin_manager = get_plugin_manager() 96 | plugin_manager.bot = TgClient.bot 97 | register_plugin_commands() 98 | 99 | from pyrogram.filters import regex 100 | from pyrogram.handlers import CallbackQueryHandler 101 | 102 | from .core.handlers import add_handlers 103 | from .helper.ext_utils.bot_utils import new_task 104 | from .helper.telegram_helper.filters import CustomFilters 105 | from .helper.telegram_helper.message_utils import ( 106 | delete_message, 107 | edit_message, 108 | send_message, 109 | ) 110 | 111 | 112 | @new_task 113 | async def restart_sessions_confirm(_, query): 114 | data = query.data.split() 115 | message = query.message 116 | if data[1] == "confirm": 117 | reply_to = message.reply_to_message 118 | restart_message = await send_message(reply_to, "Restarting Session(s)...") 119 | await delete_message(message) 120 | await TgClient.reload() 121 | add_handlers() 122 | TgClient.bot.add_handler( 123 | CallbackQueryHandler( 124 | restart_sessions_confirm, 125 | filters=regex("^sessionrestart") & CustomFilters.sudo, 126 | ) 127 | ) 128 | await edit_message(restart_message, "Session(s) Restarted Successfully!") 129 | else: 130 | await delete_message(message) 131 | 132 | 133 | TgClient.bot.add_handler( 134 | CallbackQueryHandler( 135 | restart_sessions_confirm, 136 | filters=regex("^sessionrestart") & CustomFilters.sudo, 137 | ) 138 | ) 139 | 140 | LOGGER.info("WZ Client(s) & Services Started !") 141 | bot_loop.run_forever() 142 | -------------------------------------------------------------------------------- /bot/modules/chat_permission.py: -------------------------------------------------------------------------------- 1 | from .. import user_data 2 | from ..helper.ext_utils.bot_utils import update_user_ldata, new_task 3 | from ..helper.ext_utils.db_handler import database 4 | from ..helper.telegram_helper.message_utils import send_message 5 | 6 | 7 | @new_task 8 | async def authorize(_, message): 9 | msg = message.text.split() 10 | thread_id = None 11 | if len(msg) > 1: 12 | if "|" in msg: 13 | chat_id, thread_id = list(map(int, msg[1].split("|"))) 14 | else: 15 | chat_id = int(msg[1].strip()) 16 | elif reply_to := message.reply_to_message: 17 | chat_id = (reply_to.from_user or reply_to.sender_chat).id 18 | else: 19 | if message.is_topic_message: 20 | thread_id = message.message_thread_id 21 | chat_id = message.chat.id 22 | if chat_id in user_data and user_data[chat_id].get("AUTH"): 23 | if ( 24 | thread_id is not None 25 | and thread_id in user_data[chat_id].get("thread_ids", []) 26 | or thread_id is None 27 | ): 28 | msg = "Already Authorized!" 29 | else: 30 | if "thread_ids" in user_data[chat_id]: 31 | user_data[chat_id]["thread_ids"].append(thread_id) 32 | else: 33 | user_data[chat_id]["thread_ids"] = [thread_id] 34 | msg = "Authorized" 35 | else: 36 | update_user_ldata(chat_id, "AUTH", True) 37 | if thread_id is not None: 38 | update_user_ldata(chat_id, "thread_ids", [thread_id]) 39 | await database.update_user_data(chat_id) 40 | msg = "Authorized" 41 | await send_message(message, msg) 42 | 43 | 44 | @new_task 45 | async def unauthorize(_, message): 46 | msg = message.text.split() 47 | thread_id = None 48 | if len(msg) > 1: 49 | if "|" in msg: 50 | chat_id, thread_id = list(map(int, msg[1].split("|"))) 51 | else: 52 | chat_id = int(msg[1].strip()) 53 | elif reply_to := message.reply_to_message: 54 | chat_id = (reply_to.from_user or reply_to.sender_chat).id 55 | else: 56 | if message.is_topic_message: 57 | thread_id = message.message_thread_id 58 | chat_id = message.chat.id 59 | if chat_id in user_data and user_data[chat_id].get("AUTH"): 60 | if thread_id is not None and thread_id in user_data[chat_id].get( 61 | "thread_ids", [] 62 | ): 63 | user_data[chat_id]["thread_ids"].remove(thread_id) 64 | else: 65 | update_user_ldata(chat_id, "AUTH", False) 66 | await database.update_user_data(chat_id) 67 | msg = "Unauthorized" 68 | else: 69 | msg = "Already Unauthorized!" 70 | await send_message(message, msg) 71 | 72 | 73 | @new_task 74 | async def add_sudo(_, message): 75 | id_ = "" 76 | msg = message.text.split() 77 | if len(msg) > 1: 78 | id_ = int(msg[1].strip()) 79 | elif reply_to := message.reply_to_message: 80 | id_ = (reply_to.from_user or reply_to.sender_chat).id 81 | if id_: 82 | if id_ in user_data and user_data[id_].get("SUDO"): 83 | msg = "Already Sudo!" 84 | else: 85 | update_user_ldata(id_, "SUDO", True) 86 | await database.update_user_data(id_) 87 | msg = "Promoted as Sudo" 88 | else: 89 | msg = "Give ID or Reply To message of whom you want to Promote." 90 | await send_message(message, msg) 91 | 92 | 93 | @new_task 94 | async def remove_sudo(_, message): 95 | id_ = "" 96 | msg = message.text.split() 97 | if len(msg) > 1: 98 | id_ = int(msg[1].strip()) 99 | elif reply_to := message.reply_to_message: 100 | id_ = (reply_to.from_user or reply_to.sender_chat).id 101 | if id_: 102 | if id_ in user_data and user_data[id_].get("SUDO"): 103 | update_user_ldata(id_, "SUDO", False) 104 | await database.update_user_data(id_) 105 | msg = "Demoted" 106 | else: 107 | msg = "Already Not Sudo! Sudo users added from config must be removed from config." 108 | else: 109 | msg = "Give ID or Reply To message of whom you want to remove from Sudo" 110 | await send_message(message, msg) 111 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/qbit_status.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep, gather 2 | 3 | from .... import LOGGER, qb_torrents, qb_listener_lock 4 | from ....core.torrent_manager import TorrentManager 5 | from ...ext_utils.status_utils import ( 6 | MirrorStatus, 7 | EngineStatus, 8 | get_readable_file_size, 9 | get_readable_time, 10 | ) 11 | 12 | 13 | async def get_download(tag, old_info=None): 14 | try: 15 | res = (await TorrentManager.qbittorrent.torrents.info(tag=tag))[0] 16 | return res or old_info 17 | except Exception as e: 18 | LOGGER.error(f"{e}: Qbittorrent, while getting torrent info. Tag: {tag}") 19 | return old_info 20 | 21 | 22 | class QbittorrentStatus: 23 | def __init__(self, listener, seeding=False, queued=False): 24 | self.queued = queued 25 | self.seeding = seeding 26 | self.listener = listener 27 | self._info = None 28 | self.engine = EngineStatus().STATUS_QBIT 29 | 30 | async def update(self): 31 | self._info = await get_download(f"{self.listener.mid}", self._info) 32 | 33 | def progress(self): 34 | return f"{round(self._info.progress * 100, 2)}%" 35 | 36 | def processed_bytes(self): 37 | return get_readable_file_size(self._info.downloaded) 38 | 39 | def speed(self): 40 | return f"{get_readable_file_size(self._info.dlspeed)}/s" 41 | 42 | def name(self): 43 | if self._info.state in ["metaDL", "checkingResumeData"]: 44 | return f"[METADATA]{self.listener.name}" 45 | else: 46 | return self.listener.name 47 | 48 | def size(self): 49 | return get_readable_file_size(self._info.size) 50 | 51 | def eta(self): 52 | return get_readable_time(self._info.eta.total_seconds()) 53 | 54 | async def status(self): 55 | await self.update() 56 | state = self._info.state 57 | if state == "queuedDL" or self.queued: 58 | return MirrorStatus.STATUS_QUEUEDL 59 | elif state == "queuedUP": 60 | return MirrorStatus.STATUS_QUEUEUP 61 | elif state in ["stoppedDL", "stoppedUP"]: 62 | return MirrorStatus.STATUS_PAUSED 63 | elif state in ["checkingUP", "checkingDL"]: 64 | return MirrorStatus.STATUS_CHECK 65 | elif state in ["stalledUP", "uploading"] and self.seeding: 66 | return MirrorStatus.STATUS_SEED 67 | else: 68 | return MirrorStatus.STATUS_DOWNLOAD 69 | 70 | def seeders_num(self): 71 | return self._info.num_seeds 72 | 73 | def leechers_num(self): 74 | return self._info.num_leechs 75 | 76 | def uploaded_bytes(self): 77 | return get_readable_file_size(self._info.uploaded) 78 | 79 | def seed_speed(self): 80 | return f"{get_readable_file_size(self._info.upspeed)}/s" 81 | 82 | def ratio(self): 83 | return f"{round(self._info.ratio, 3)}" 84 | 85 | def seeding_time(self): 86 | return get_readable_time(int(self._info.seeding_time.total_seconds())) 87 | 88 | def task(self): 89 | return self 90 | 91 | def gid(self): 92 | return self.hash()[:12] 93 | 94 | def hash(self): 95 | return self._info.hash 96 | 97 | async def cancel_task(self): 98 | self.listener.is_cancelled = True 99 | await self.update() 100 | await TorrentManager.qbittorrent.torrents.stop([self._info.hash]) 101 | if not self.seeding: 102 | if self.queued: 103 | LOGGER.info(f"Cancelling QueueDL: {self.name()}") 104 | msg = "task have been removed from queue/download" 105 | else: 106 | LOGGER.info(f"Cancelling Download: {self._info.name}") 107 | msg = "Stopped by user!" 108 | await sleep(0.3) 109 | await gather( 110 | self.listener.on_download_error(msg), 111 | TorrentManager.qbittorrent.torrents.delete([self._info.hash], True), 112 | TorrentManager.qbittorrent.torrents.delete_tags( 113 | tags=[self._info.tags[0]] 114 | ), 115 | ) 116 | async with qb_listener_lock: 117 | if self._info.tags[0] in qb_torrents: 118 | del qb_torrents[self._info.tags[0]] 119 | -------------------------------------------------------------------------------- /update.py: -------------------------------------------------------------------------------- 1 | from sys import exit 2 | from importlib import import_module 3 | from logging import ( 4 | FileHandler, 5 | StreamHandler, 6 | INFO, 7 | basicConfig, 8 | error as log_error, 9 | info as log_info, 10 | getLogger, 11 | ERROR, 12 | ) 13 | from os import path, remove, environ 14 | from pymongo.mongo_client import MongoClient 15 | from pymongo.server_api import ServerApi 16 | from subprocess import run as srun, call as scall 17 | 18 | getLogger("pymongo").setLevel(ERROR) 19 | 20 | var_list = [ 21 | "BOT_TOKEN", 22 | "TELEGRAM_API", 23 | "TELEGRAM_HASH", 24 | "OWNER_ID", 25 | "DATABASE_URL", 26 | "BASE_URL", 27 | "UPSTREAM_REPO", 28 | "UPSTREAM_BRANCH", 29 | "UPDATE_PKGS", 30 | ] 31 | 32 | if path.exists("log.txt"): 33 | with open("log.txt", "r+") as f: 34 | f.truncate(0) 35 | 36 | if path.exists("rlog.txt"): 37 | remove("rlog.txt") 38 | 39 | basicConfig( 40 | format="[%(asctime)s] [%(levelname)s] - %(message)s", 41 | datefmt="%d-%b-%y %I:%M:%S %p", 42 | handlers=[FileHandler("log.txt"), StreamHandler()], 43 | level=INFO, 44 | ) 45 | try: 46 | settings = import_module("config") 47 | config_file = { 48 | key: value.strip() if isinstance(value, str) else value 49 | for key, value in vars(settings).items() 50 | if not key.startswith("__") 51 | } 52 | except ModuleNotFoundError: 53 | log_info("Config.py file is not Added! Checking ENVs..") 54 | config_file = {} 55 | 56 | env_updates = { 57 | key: value.strip() if isinstance(value, str) else value 58 | for key, value in environ.items() 59 | if key in var_list 60 | } 61 | if env_updates: 62 | log_info("Config data is updated with ENVs!") 63 | config_file.update(env_updates) 64 | 65 | BOT_TOKEN = config_file.get("BOT_TOKEN", "") 66 | if not BOT_TOKEN: 67 | log_error("BOT_TOKEN variable is missing! Exiting now") 68 | exit(1) 69 | 70 | BOT_ID = BOT_TOKEN.split(":", 1)[0] 71 | 72 | if DATABASE_URL := config_file.get("DATABASE_URL", "").strip(): 73 | try: 74 | conn = MongoClient(DATABASE_URL, server_api=ServerApi("1")) 75 | db = conn.wzmlx 76 | old_config = db.settings.deployConfig.find_one({"_id": BOT_ID}, {"_id": 0}) 77 | config_dict = db.settings.config.find_one({"_id": BOT_ID}) 78 | if ( 79 | old_config is not None and old_config == config_file or old_config is None 80 | ) and config_dict is not None: 81 | config_file["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] 82 | config_file["UPSTREAM_BRANCH"] = config_dict.get("UPSTREAM_BRANCH", "wzv3") 83 | config_file["UPDATE_PKGS"] = config_dict.get("UPDATE_PKGS", "True") 84 | conn.close() 85 | except Exception as e: 86 | log_error(f"Database ERROR: {e}") 87 | 88 | UPSTREAM_REPO = config_file.get("UPSTREAM_REPO", "").strip() 89 | UPSTREAM_BRANCH = config_file.get("UPSTREAM_BRANCH", "").strip() or "wzv3" 90 | 91 | if UPSTREAM_REPO: 92 | if path.exists(".git"): 93 | srun(["rm", "-rf", ".git"]) 94 | 95 | update = srun( 96 | [ 97 | f"git init -q \ 98 | && git config --global user.email 105407900+SilentDemonSD@users.noreply.github.com \ 99 | && git config --global user.name SilentDemonSD \ 100 | && git add . \ 101 | && git commit -sm update -q \ 102 | && git remote add origin {UPSTREAM_REPO} \ 103 | && git fetch origin -q \ 104 | && git reset --hard origin/{UPSTREAM_BRANCH} -q" 105 | ], 106 | shell=True, 107 | ) 108 | 109 | repo = UPSTREAM_REPO.split("/") 110 | UPSTREAM_REPO = f"https://github.com/{repo[-2]}/{repo[-1]}" 111 | if update.returncode == 0: 112 | log_info("Successfully updated with Latest Updates !") 113 | else: 114 | log_error("Something went Wrong ! Recheck your details or Ask Support !") 115 | log_info(f"UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}") 116 | 117 | 118 | UPDATE_PKGS = config_file.get("UPDATE_PKGS", "True") 119 | if (isinstance(UPDATE_PKGS, str) and UPDATE_PKGS.lower() == "true") or UPDATE_PKGS: 120 | scall("uv pip install -U -r requirements.txt", shell=True) 121 | log_info("Successfully Updated all the Packages !") 122 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/jdownloader_status.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | 3 | from .... import LOGGER, jd_listener_lock, jd_downloads 4 | from ....core.jdownloader_booter import jdownloader 5 | from ...ext_utils.status_utils import ( 6 | MirrorStatus, 7 | EngineStatus, 8 | get_readable_file_size, 9 | get_readable_time, 10 | ) 11 | 12 | 13 | def _get_combined_info(result, old_info): 14 | name = result[0].get("name") 15 | hosts = result[0].get("hosts") 16 | bytesLoaded = 0 17 | bytesTotal = 0 18 | speed = 0 19 | status = "" 20 | for res in result: 21 | if res.get("enabled"): 22 | st = res.get("status", "") 23 | if st and st.lower() != "finished": 24 | status = st 25 | bytesLoaded += res.get("bytesLoaded", 0) 26 | bytesTotal += res.get("bytesTotal", 0) 27 | speed += res.get("speed", 0) 28 | try: 29 | if not speed: 30 | speed = (bytesLoaded - old_info.get("bytesLoaded", 0)) / ( 31 | time() - old_info.get("last_update", 0) 32 | ) 33 | eta = (bytesTotal - bytesLoaded) / speed 34 | except Exception: 35 | eta = 0 36 | return { 37 | "name": name, 38 | "status": status, 39 | "speed": speed, 40 | "eta": eta, 41 | "hosts": hosts, 42 | "bytesLoaded": bytesLoaded, 43 | "bytesTotal": bytesTotal, 44 | "last_update": time(), 45 | } 46 | 47 | 48 | async def get_download(gid, old_info): 49 | try: 50 | result = await jdownloader.device.downloads.query_packages( 51 | [ 52 | { 53 | "bytesLoaded": True, 54 | "bytesTotal": True, 55 | "enabled": True, 56 | "packageUUIDs": jd_downloads[gid]["ids"], 57 | "maxResults": -1, 58 | "running": True, 59 | "speed": True, 60 | "eta": True, 61 | "status": True, 62 | "hosts": True, 63 | } 64 | ] 65 | ) 66 | return _get_combined_info(result, old_info) if len(result) > 1 else result[0] 67 | except Exception: 68 | return old_info 69 | 70 | 71 | class JDownloaderStatus: 72 | def __init__(self, listener, gid): 73 | self.listener = listener 74 | self._gid = gid 75 | self._info = {} 76 | self.engine = EngineStatus().STATUS_JD 77 | 78 | async def _update(self): 79 | self._info = await get_download(self._gid, self._info) 80 | 81 | def progress(self): 82 | try: 83 | return f"{round((self._info.get('bytesLoaded', 0) / self._info.get('bytesTotal', 0)) * 100, 2)}%" 84 | except ZeroDivisionError: 85 | return "0%" 86 | 87 | def processed_bytes(self): 88 | return get_readable_file_size(self._info.get("bytesLoaded", 0)) 89 | 90 | def speed(self): 91 | return f"{get_readable_file_size(self._info.get('speed', 0))}/s" 92 | 93 | def name(self): 94 | return self._info.get("name") or self.listener.name 95 | 96 | def size(self): 97 | return get_readable_file_size(self._info.get("bytesTotal", 0)) 98 | 99 | def eta(self): 100 | return get_readable_time(eta) if (eta := self._info.get("eta", False)) else "-" 101 | 102 | async def status(self): 103 | await self._update() 104 | state = self._info.get("status", "jdlimit").capitalize() 105 | if len(state) == 0: 106 | if self._info.get("bytesLoaded", 0) == 0: 107 | return MirrorStatus.STATUS_QUEUEDL 108 | else: 109 | return MirrorStatus.STATUS_DOWNLOAD 110 | return MirrorStatus.STATUS_QUEUEDL if state == "Jdlimit" else state 111 | 112 | def task(self): 113 | return self 114 | 115 | def gid(self): 116 | return self._gid 117 | 118 | async def cancel_task(self): 119 | self.listener.is_cancelled = True 120 | LOGGER.info(f"Cancelling Download: {self.name()}") 121 | await jdownloader.device.downloads.remove_links( 122 | package_ids=jd_downloads[self._gid]["ids"] 123 | ) 124 | async with jd_listener_lock: 125 | del jd_downloads[self._gid] 126 | await self.listener.on_download_error("Cancelled by user!") 127 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/tg_utils.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | from uuid import uuid4 3 | 4 | from pyrogram.enums import ChatAction 5 | from pyrogram.errors import ChannelInvalid, PeerIdInvalid, RPCError, UserNotParticipant 6 | 7 | from ..ext_utils.links_utils import encode_slink 8 | 9 | from ... import LOGGER, user_data 10 | from ...core.config_manager import Config 11 | from ...core.tg_client import TgClient 12 | from ..ext_utils.shortener_utils import short_url 13 | from ..ext_utils.status_utils import get_readable_time 14 | from .button_build import ButtonMaker 15 | 16 | 17 | async def chat_info(channel_id): 18 | channel_id = str(channel_id).strip() 19 | if channel_id.startswith("-100"): 20 | channel_id = int(channel_id) 21 | elif channel_id.startswith("@"): 22 | channel_id = channel_id.replace("@", "") 23 | else: 24 | return None 25 | try: 26 | return await TgClient.bot.get_chat(channel_id) 27 | except (PeerIdInvalid, ChannelInvalid) as e: 28 | LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") 29 | return None 30 | 31 | 32 | async def forcesub(message, ids, button=None): 33 | join_button = {} 34 | _msg = "" 35 | for channel_id in ids.split(): 36 | chat = await chat_info(channel_id) 37 | try: 38 | await chat.get_member(message.from_user.id) 39 | except UserNotParticipant: 40 | if username := chat.username: 41 | invite_link = f"https://t.me/{username}" 42 | else: 43 | invite_link = chat.invite_link 44 | join_button[chat.title] = invite_link 45 | except RPCError as e: 46 | LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") 47 | except Exception as e: 48 | LOGGER.error(f"{e} for {channel_id}") 49 | if join_button: 50 | if button is None: 51 | button = ButtonMaker() 52 | _msg = "┠ Channel(s) pending to be joined, Join Now!" 53 | for key, value in join_button.items(): 54 | button.url_button(f"Join {key}", value, "footer") 55 | return _msg, button 56 | 57 | 58 | async def user_info(user_id): 59 | try: 60 | return await TgClient.bot.get_users(user_id) 61 | except Exception: 62 | return "" 63 | 64 | 65 | async def check_botpm(message, button=None): 66 | try: 67 | await TgClient.bot.send_chat_action(message.from_user.id, ChatAction.TYPING) 68 | return None, button 69 | except Exception: 70 | if button is None: 71 | button = ButtonMaker() 72 | _msg = "┠ Bot isn't Started in PM or Inbox (Private)" 73 | button.url_button( 74 | "Start Bot Now", f"https://t.me/{TgClient.BNAME}?start=start", "header" 75 | ) 76 | return _msg, button 77 | 78 | 79 | async def verify_token(user_id, button=None): 80 | if not Config.VERIFY_TIMEOUT or bool( 81 | user_id == Config.OWNER_ID 82 | or user_id in user_data 83 | and user_data[user_id].get("is_sudo") 84 | ): 85 | return None, button 86 | user_data.setdefault(user_id, {}) 87 | data = user_data[user_id] 88 | expire = data.get("VERIFY_TIME") 89 | if Config.LOGIN_PASS and data.get("VERIFY_TOKEN", "") == Config.LOGIN_PASS: 90 | return None, button 91 | isExpired = ( 92 | expire is None 93 | or expire is not None 94 | and (time() - expire) > Config.VERIFY_TIMEOUT 95 | ) 96 | if isExpired: 97 | token = ( 98 | data["VERIFY_TOKEN"] 99 | if expire is None and "VERIFY_TOKEN" in data 100 | else str(uuid4()) 101 | ) 102 | if expire is not None: 103 | del data["VERIFY_TIME"] 104 | data["VERIFY_TOKEN"] = token 105 | user_data[user_id].update(data) 106 | if button is None: 107 | button = ButtonMaker() 108 | encrypt_url = encode_slink(f"{token}&&{user_id}") 109 | button.url_button( 110 | "Verify Access Token", 111 | await short_url(f"https://t.me/{TgClient.BNAME}?start={encrypt_url}"), 112 | ) 113 | return ( 114 | f"┠ Verify Access Token has been expired, Kindly validate a new access token to start using bot again.\n┃\n┖ Validity : {get_readable_time(Config.VERIFY_TIMEOUT)}", 115 | button, 116 | ) 117 | return None, button 118 | -------------------------------------------------------------------------------- /bot/modules/nzb_search.py: -------------------------------------------------------------------------------- 1 | from xml.etree import ElementTree as ET 2 | from aiohttp import ClientSession 3 | 4 | from .. import LOGGER 5 | from ..core.config_manager import Config 6 | from ..helper.ext_utils.bot_utils import new_task 7 | from ..helper.ext_utils.status_utils import get_readable_file_size 8 | from ..helper.ext_utils.telegraph_helper import telegraph 9 | from ..helper.telegram_helper.button_build import ButtonMaker 10 | from ..helper.telegram_helper.message_utils import edit_message, send_message 11 | 12 | 13 | @new_task 14 | async def hydra_search(_, message): 15 | key = message.text.split() 16 | if len(key) == 1: 17 | await send_message( 18 | message, 19 | "Please provide a search query. Example: `/nzbsearch movie title`.", 20 | ) 21 | return 22 | 23 | query = " ".join(key[1:]).strip() 24 | message = await send_message(message, f"Searching for '{query}'...") 25 | try: 26 | items = await search_nzbhydra(query) 27 | if not items: 28 | await edit_message(message, "No results found.") 29 | LOGGER.info(f"No results found for search query: {query}") 30 | return 31 | 32 | page_url = await create_telegraph_page(query, items) 33 | buttons = ButtonMaker() 34 | buttons.url_button("Results", page_url) 35 | button = buttons.build_menu() 36 | await edit_message( 37 | message, 38 | f"Search results for '{query}' are available here", 39 | button, 40 | ) 41 | except Exception as e: 42 | LOGGER.error(f"Error in hydra_search: {e!s}") 43 | await edit_message(message, "Something went wrong.") 44 | 45 | 46 | async def search_nzbhydra(query, limit=50): 47 | search_url = f"{Config.HYDRA_IP}/api" 48 | params = { 49 | "apikey": Config.HYDRA_API_KEY, 50 | "t": "search", 51 | "q": query, 52 | "limit": limit, 53 | } 54 | 55 | headers = { 56 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3", 57 | } 58 | 59 | async with ClientSession() as session: 60 | try: 61 | async with session.get( 62 | search_url, 63 | params=params, 64 | headers=headers, 65 | ) as response: 66 | if response.status == 200: 67 | content = await response.text() 68 | root = ET.fromstring(content) 69 | return root.findall(".//item") 70 | 71 | LOGGER.error( 72 | f"Failed to search NZBHydra. Status Code: {response.status}", 73 | ) 74 | LOGGER.error(f"Response Text: {await response.text()}") 75 | return None 76 | except ET.ParseError: 77 | LOGGER.error("Failed to parse the XML response.") 78 | return None 79 | except Exception as e: 80 | LOGGER.error(f"Error in search_nzbhydra: {e!s}") 81 | return None 82 | 83 | 84 | async def create_telegraph_page(query, items): 85 | content = "Search Results:

" 86 | sorted_items = sorted( 87 | [ 88 | ( 89 | int(item.find("size").text) if item.find("size") is not None else 0, 90 | item, 91 | ) 92 | for item in items[:100] 93 | ], 94 | reverse=True, 95 | key=lambda x: x[0], 96 | ) 97 | 98 | for idx, (size_bytes, item) in enumerate(sorted_items, 1): 99 | title = ( 100 | item.find("title").text 101 | if item.find("title") is not None 102 | else "No Title Available" 103 | ) 104 | download_url = ( 105 | item.find("link").text 106 | if item.find("link") is not None 107 | else "No Link Available" 108 | ) 109 | size = get_readable_file_size(size_bytes) 110 | 111 | content += ( 112 | f"{idx}. {title}
" 113 | f"Download URL | Share Download URL
" 114 | f"Size: {size}
" 115 | f"━━━━━━━━━━━━━━━━━━━━━━

" 116 | ) 117 | 118 | response = await telegraph.create_page( 119 | title=f"Search Results for '{query}'", 120 | content=content, 121 | ) 122 | LOGGER.info(f"Telegraph page created for search: {query}") 123 | return f"https://telegra.ph/{response['path']}" 124 | -------------------------------------------------------------------------------- /bot/core/tg_client.py: -------------------------------------------------------------------------------- 1 | from pyrogram import Client, enums 2 | from asyncio import Lock, gather 3 | from inspect import signature 4 | 5 | from .. import LOGGER 6 | from .config_manager import Config 7 | 8 | 9 | class TgClient: 10 | _lock = Lock() 11 | _hlock = Lock() 12 | 13 | bot = None 14 | user = None 15 | helper_bots = {} 16 | helper_loads = {} 17 | 18 | BNAME = "" 19 | ID = 0 20 | IS_PREMIUM_USER = False 21 | MAX_SPLIT_SIZE = 2097152000 22 | 23 | @classmethod 24 | def wztgClient(cls, *args, **kwargs): 25 | kwargs["api_id"] = Config.TELEGRAM_API 26 | kwargs["api_hash"] = Config.TELEGRAM_HASH 27 | kwargs["proxy"] = Config.TG_PROXY 28 | kwargs["parse_mode"] = enums.ParseMode.HTML 29 | kwargs["in_memory"] = True 30 | for param, value in { 31 | "max_concurrent_transmissions": 100, 32 | "skip_updates": False, 33 | }.items(): 34 | if param in signature(Client.__init__).parameters: 35 | kwargs[param] = value 36 | return Client(*args, **kwargs) 37 | 38 | @classmethod 39 | async def start_hclient(cls, no, b_token): 40 | try: 41 | hbot = await cls.wztgClient( 42 | f"WZ-HBot{no}", 43 | bot_token=b_token, 44 | no_updates=True, 45 | ).start() 46 | LOGGER.info(f"Helper Bot [@{hbot.me.username}] Started!") 47 | cls.helper_bots[no], cls.helper_loads[no] = hbot, 0 48 | except Exception as e: 49 | LOGGER.error(f"Failed to start helper bot {no} from HELPER_TOKENS. {e}") 50 | cls.helper_bots.pop(no, None) 51 | 52 | @classmethod 53 | async def start_helper_bots(cls): 54 | if not Config.HELPER_TOKENS: 55 | return 56 | LOGGER.info("Generating helper client from HELPER_TOKENS") 57 | async with cls._hlock: 58 | await gather( 59 | *( 60 | cls.start_hclient(no, b_token) 61 | for no, b_token in enumerate(Config.HELPER_TOKENS.split(), start=1) 62 | ) 63 | ) 64 | 65 | @classmethod 66 | async def start_bot(cls): 67 | LOGGER.info("Generating client from BOT_TOKEN") 68 | cls.ID = Config.BOT_TOKEN.split(":", 1)[0] 69 | cls.bot = cls.wztgClient( 70 | f"WZ-Bot{cls.ID}", 71 | bot_token=Config.BOT_TOKEN, 72 | workdir="/usr/src/app", 73 | ) 74 | await cls.bot.start() 75 | cls.BNAME = cls.bot.me.username 76 | cls.ID = Config.BOT_TOKEN.split(":", 1)[0] 77 | LOGGER.info(f"WZ Bot : [@{cls.BNAME}] Started!") 78 | 79 | @classmethod 80 | async def start_user(cls): 81 | if Config.USER_SESSION_STRING: 82 | LOGGER.info("Generating client from USER_SESSION_STRING") 83 | try: 84 | cls.user = cls.wztgClient( 85 | "WZ-User", 86 | session_string=Config.USER_SESSION_STRING, 87 | sleep_threshold=60, 88 | no_updates=True, 89 | ) 90 | await cls.user.start() 91 | cls.IS_PREMIUM_USER = cls.user.me.is_premium 92 | if cls.IS_PREMIUM_USER: 93 | cls.MAX_SPLIT_SIZE = 4194304000 94 | uname = cls.user.me.username or cls.user.me.first_name 95 | LOGGER.info(f"WZ User : [{uname}] Started!") 96 | except Exception as e: 97 | LOGGER.error(f"Failed to start client from USER_SESSION_STRING. {e}") 98 | cls.IS_PREMIUM_USER = False 99 | cls.user = None 100 | 101 | @classmethod 102 | async def stop(cls): 103 | async with cls._lock: 104 | if cls.bot: 105 | await cls.bot.stop() 106 | cls.bot = None 107 | if cls.user: 108 | await cls.user.stop() 109 | cls.user = None 110 | if cls.helper_bots: 111 | await gather(*[h_bot.stop() for h_bot in cls.helper_bots.values()]) 112 | cls.helper_bots = {} 113 | LOGGER.info("All Client(s) stopped") 114 | 115 | @classmethod 116 | async def reload(cls): 117 | async with cls._lock: 118 | await cls.bot.restart() 119 | if cls.user: 120 | await cls.user.restart() 121 | if cls.helper_bots: 122 | await gather(*[h_bot.restart() for h_bot in cls.helper_bots.values()]) 123 | LOGGER.info("All Client(s) restarted") 124 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/aria2_download.py: -------------------------------------------------------------------------------- 1 | from aiofiles.os import remove, path as aiopath 2 | from aiofiles import open as aiopen 3 | from base64 import b64encode 4 | from aiohttp.client_exceptions import ClientError 5 | from asyncio import TimeoutError 6 | 7 | from .... import task_dict_lock, task_dict, LOGGER 8 | from ....core.config_manager import Config 9 | from ....core.torrent_manager import TorrentManager, is_metadata, aria2_name 10 | from ...ext_utils.bot_utils import bt_selection_buttons 11 | from ...ext_utils.task_manager import check_running_tasks 12 | from ...mirror_leech_utils.status_utils.aria2_status import Aria2Status 13 | from ...telegram_helper.message_utils import send_status_message, send_message 14 | 15 | 16 | async def add_aria2_download(listener, dpath, header, ratio, seed_time): 17 | if Config.DISABLE_TORRENTS and ( 18 | listener.link.startswith("magnet:") or listener.link.endswith(".torrent") 19 | ): 20 | await listener.on_download_error("Torrent and magnet downloads are disabled.") 21 | return 22 | a2c_opt = {"dir": dpath} 23 | if listener.name: 24 | a2c_opt["out"] = listener.name 25 | if header: 26 | a2c_opt["header"] = header 27 | if ratio: 28 | a2c_opt["seed-ratio"] = ratio 29 | if seed_time: 30 | a2c_opt["seed-time"] = seed_time 31 | if TORRENT_TIMEOUT := Config.TORRENT_TIMEOUT: 32 | a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}" 33 | 34 | add_to_queue, event = await check_running_tasks(listener) 35 | if add_to_queue: 36 | if listener.link.startswith("magnet:"): 37 | a2c_opt["pause-metadata"] = "true" 38 | else: 39 | a2c_opt["pause"] = "true" 40 | 41 | try: 42 | if await aiopath.exists(listener.link): 43 | async with aiopen(listener.link, "rb") as tf: 44 | torrent = await tf.read() 45 | encoded = b64encode(torrent).decode() 46 | params = [encoded, [], a2c_opt] 47 | gid = await TorrentManager.aria2.jsonrpc("addTorrent", params) 48 | """gid = await TorrentManager.aria2.add_torrent(path=listener.link, options=a2c_opt)""" 49 | else: 50 | gid = await TorrentManager.aria2.addUri( 51 | uris=[listener.link], options=a2c_opt 52 | ) 53 | except (TimeoutError, ClientError, Exception) as e: 54 | LOGGER.info(f"Aria2c Download Error: {e}") 55 | await listener.on_download_error(f"{e}") 56 | return 57 | download = await TorrentManager.aria2.tellStatus(gid) 58 | if download.get("errorMessage"): 59 | error = str(download["errorMessage"]).replace("<", " ").replace(">", " ") 60 | LOGGER.info(f"Aria2c Download Error: {error}") 61 | await TorrentManager.aria2_remove(download) 62 | await listener.on_download_error(error) 63 | return 64 | if await aiopath.exists(listener.link): 65 | await remove(listener.link) 66 | 67 | name = aria2_name(download) 68 | async with task_dict_lock: 69 | task_dict[listener.mid] = Aria2Status(listener, gid, queued=add_to_queue) 70 | if add_to_queue: 71 | LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}") 72 | if ( 73 | not listener.select or "bittorrent" not in download 74 | ) and listener.multi <= 1: 75 | await send_status_message(listener.message) 76 | else: 77 | LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}") 78 | 79 | await listener.on_download_start() 80 | 81 | if ( 82 | not add_to_queue 83 | and (not listener.select or not Config.BASE_URL) 84 | and listener.multi <= 1 85 | ): 86 | await send_status_message(listener.message) 87 | elif listener.select and "bittorrent" in download and not is_metadata(download): 88 | if not add_to_queue: 89 | await TorrentManager.aria2.forcePause(gid) 90 | SBUTTONS = bt_selection_buttons(gid) 91 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading." 92 | await send_message(listener.message, msg, SBUTTONS) 93 | 94 | if add_to_queue: 95 | await event.wait() 96 | if listener.is_cancelled: 97 | return 98 | async with task_dict_lock: 99 | task = task_dict[listener.mid] 100 | task.queued = False 101 | await task.update() 102 | new_gid = task.gid() 103 | 104 | await TorrentManager.aria2.unpause(new_gid) 105 | LOGGER.info(f"Start Queued Download from Aria2c: {name}. Gid: {new_gid}") 106 | -------------------------------------------------------------------------------- /bot/helper/listeners/nzb_listener.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep, gather 2 | 3 | from ... import ( 4 | intervals, 5 | sabnzbd_client, 6 | nzb_jobs, 7 | nzb_listener_lock, 8 | LOGGER, 9 | ) 10 | from ..ext_utils.bot_utils import new_task 11 | from ..ext_utils.status_utils import get_task_by_gid, get_raw_file_size 12 | from ..ext_utils.task_manager import stop_duplicate_check, limit_checker 13 | 14 | 15 | async def _remove_job(nzo_id, mid): 16 | res1, _ = await gather( 17 | sabnzbd_client.delete_history(nzo_id, delete_files=True), 18 | sabnzbd_client.delete_category(f"{mid}"), 19 | ) 20 | if not res1: 21 | await sabnzbd_client.delete_job(nzo_id, True) 22 | async with nzb_listener_lock: 23 | if nzo_id in nzb_jobs: 24 | del nzb_jobs[nzo_id] 25 | 26 | 27 | @new_task 28 | async def _on_download_error(err, nzo_id, button=None, is_limit=False): 29 | if task := await get_task_by_gid(nzo_id): 30 | LOGGER.info(f"Cancelling Download: {task.name()}") 31 | await gather( 32 | task.listener.on_download_error(err, button, is_limit), 33 | _remove_job(nzo_id, task.listener.mid), 34 | ) 35 | 36 | 37 | @new_task 38 | async def _stop_duplicate(nzo_id): 39 | if task := await get_task_by_gid(nzo_id): 40 | await task.update() 41 | task.listener.name = task.name() 42 | msg, button = await stop_duplicate_check(task.listener) 43 | if msg: 44 | await _on_download_error(msg, nzo_id, button) 45 | 46 | 47 | @new_task 48 | async def _size_check(nzo_id): 49 | if task := await get_task_by_gid(nzo_id): 50 | await task.update() 51 | task.listener.size = get_raw_file_size(task.size()) 52 | mmsg = await limit_checker(task.listener) 53 | if mmsg: 54 | await _on_download_error(mmsg, nzo_id, is_limit=True) 55 | 56 | 57 | @new_task 58 | async def _on_download_complete(nzo_id): 59 | if task := await get_task_by_gid(nzo_id): 60 | await task.listener.on_download_complete() 61 | if intervals["stopAll"]: 62 | return 63 | await _remove_job(nzo_id, task.listener.mid) 64 | 65 | 66 | @new_task 67 | async def _nzb_listener(): 68 | while not intervals["stopAll"]: 69 | async with nzb_listener_lock: 70 | try: 71 | jobs = (await sabnzbd_client.get_history())["history"]["slots"] 72 | downloads = (await sabnzbd_client.get_downloads())["queue"]["slots"] 73 | if len(nzb_jobs) == 0: 74 | intervals["nzb"] = "" 75 | break 76 | for job in jobs: 77 | nzo_id = job["nzo_id"] 78 | if nzo_id not in nzb_jobs: 79 | continue 80 | if job["status"] == "Completed": 81 | if not nzb_jobs[nzo_id]["uploaded"]: 82 | nzb_jobs[nzo_id]["uploaded"] = True 83 | await _on_download_complete(nzo_id) 84 | nzb_jobs[nzo_id]["status"] = "Completed" 85 | elif job["status"] == "Failed": 86 | await _on_download_error(job["fail_message"], nzo_id) 87 | for dl in downloads: 88 | nzo_id = dl["nzo_id"] 89 | if nzo_id not in nzb_jobs: 90 | continue 91 | if dl["labels"] and dl["labels"][0] == "ALTERNATIVE": 92 | await _on_download_error("Duplicated Job!", nzo_id) 93 | continue 94 | if dl["status"] == "Downloading" and not dl["filename"].startswith( 95 | "Trying" 96 | ): 97 | if not nzb_jobs[nzo_id]["stop_dup_check"]: 98 | nzb_jobs[nzo_id]["stop_dup_check"] = True 99 | await _stop_duplicate(nzo_id) 100 | if not nzb_jobs[nzo_id]["size_check"]: 101 | nzb_jobs[nzo_id]["size_check"] = True 102 | await _size_check(nzo_id) 103 | except Exception as e: 104 | LOGGER.error(str(e)) 105 | await sleep(3) 106 | 107 | 108 | async def on_download_start(nzo_id): 109 | async with nzb_listener_lock: 110 | nzb_jobs[nzo_id] = { 111 | "uploaded": False, 112 | "stop_dup_check": False, 113 | "size_check": False, 114 | "status": "Downloading", 115 | } 116 | if not intervals["nzb"]: 117 | intervals["nzb"] = await _nzb_listener() 118 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md: -------------------------------------------------------------------------------- 1 | RAPHIELSCAPE PUBLIC LICENSE 2 | Version 1.c, June 2019 3 | 4 | Copyright (C) 2019 Raphielscape LLC. 5 | Copyright (C) 2019 Devscapes Open Source Holding GmbH. 6 | 7 | Everyone is permitted to copy and distribute verbatim or modified 8 | copies of this license document, and changing it is allowed as long 9 | as the name is changed. 10 | 11 | RAPHIELSCAPE PUBLIC LICENSE 12 | A-1. DEFINITIONS 13 | 14 | 0. “This License” refers to version 1.c of the Raphielscape Public License. 15 | 16 | 1. “Copyright” also means copyright-like laws that apply to other kinds of works. 17 | 18 | 2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. 19 | “Licensees” and “recipients” may be individuals or organizations. 20 | 21 | 3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, 22 | other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work 23 | or a work “based on” the earlier work. 24 | 25 | 4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it. 26 | “Object code” means any non-source form of a work. 27 | 28 | The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and 29 | (for an executable work) run the object code and to modify the work, including scripts to control those activities. 30 | 31 | The Corresponding Source need not include anything that users can regenerate automatically from other parts of the 32 | Corresponding Source. 33 | The Corresponding Source for a work in source code form is that same work. 34 | 35 | 5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of 36 | the Corresponding Source. 37 | 38 | 6. "Owner" refers to any parties which is made the early form of the Corresponding Source. 39 | 40 | A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 41 | 42 | 0. You must give any other recipients of the Work or Derivative Works a copy of this License; and 43 | 44 | 1. You must cause any modified files to carry prominent notices stating that You changed the files; and 45 | 46 | 2. You must retain, in the Source form of any Derivative Works that You distribute, 47 | this license, all copyright, patent, trademark, authorships and attribution notices 48 | from the Source form of the Work; and 49 | 50 | 3. Respecting the author and owner of works that are distributed in any way. 51 | 52 | You may add Your own copyright statement to Your modifications and may provide 53 | additional or different license terms and conditions for use, reproduction, 54 | or distribution of Your modifications, or for any such Derivative Works as a whole, 55 | provided Your use, reproduction, and distribution of the Work otherwise complies 56 | with the conditions stated in this License. 57 | 58 | B. DISCLAIMER OF WARRANTY 59 | 60 | THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR 61 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 62 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS 63 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 64 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, 65 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 66 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 67 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 68 | 69 | C. REVISED VERSION OF THIS LICENSE 70 | 71 | The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the 72 | Raphielscape Public License from time to time. Such new versions will be similar in spirit 73 | to the present version, but may differ in detail to address new problems or concerns. 74 | 75 | Each version is given a distinguishing version number. If the Program specifies that a 76 | certain numbered version of the Raphielscape Public License "or any later version" applies to it, 77 | you have the option of following the terms and conditions either of that numbered version or of 78 | any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a 79 | version number of the Raphielscape Public License, you may choose any version ever published 80 | by the Devscapes Open Source Holding GmbH. 81 | 82 | END OF LICENSE 83 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/aria2_status.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | 3 | from .... import LOGGER 4 | from ....core.torrent_manager import TorrentManager, aria2_name 5 | from ...ext_utils.status_utils import ( 6 | EngineStatus, 7 | MirrorStatus, 8 | get_readable_time, 9 | get_readable_file_size, 10 | ) 11 | 12 | 13 | async def get_download(gid, old_info=None): 14 | try: 15 | res = await TorrentManager.aria2.tellStatus(gid) 16 | return res or old_info 17 | except Exception as e: 18 | LOGGER.error(f"{e}: Aria2c, Error while getting torrent info") 19 | return old_info 20 | 21 | 22 | class Aria2Status: 23 | def __init__(self, listener, gid, seeding=False, queued=False): 24 | self._gid = gid 25 | self._download = {} 26 | self.listener = listener 27 | self.queued = queued 28 | self.start_time = 0 29 | self.seeding = seeding 30 | self.engine = EngineStatus().STATUS_ARIA2 31 | 32 | async def update(self): 33 | self._download = await get_download(self._gid, self._download) 34 | if self._download.get("followedBy", []): 35 | self._gid = self._download["followedBy"][0] 36 | self._download = await get_download(self._gid) 37 | 38 | def progress(self): 39 | try: 40 | return f"{round(int(self._download.get("completedLength", "0")) / int(self._download.get("totalLength", "0")) * 100, 2)}%" 41 | except ZeroDivisionError: 42 | return "0%" 43 | 44 | def processed_bytes(self): 45 | return get_readable_file_size(int(self._download.get("completedLength", "0"))) 46 | 47 | def speed(self): 48 | return ( 49 | f"{get_readable_file_size(int(self._download.get("downloadSpeed", "0")))}/s" 50 | ) 51 | 52 | def name(self): 53 | return aria2_name(self._download) 54 | 55 | def size(self): 56 | return get_readable_file_size(int(self._download.get("totalLength", "0"))) 57 | 58 | def eta(self): 59 | try: 60 | return get_readable_time( 61 | ( 62 | int(self._download.get("totalLength", "0")) 63 | - int(self._download.get("completedLength", "0")) 64 | ) 65 | / int(self._download.get("downloadSpeed", "0")) 66 | ) 67 | except ZeroDivisionError: 68 | return "-" 69 | 70 | async def status(self): 71 | await self.update() 72 | if self._download.get("status", "") == "waiting" or self.queued: 73 | if self.seeding: 74 | return MirrorStatus.STATUS_QUEUEUP 75 | else: 76 | return MirrorStatus.STATUS_QUEUEDL 77 | elif self._download.get("status", "") == "paused": 78 | return MirrorStatus.STATUS_PAUSED 79 | elif self._download.get("seeder", "") == "true" and self.seeding: 80 | return MirrorStatus.STATUS_SEED 81 | else: 82 | return MirrorStatus.STATUS_DOWNLOAD 83 | 84 | def seeders_num(self): 85 | return self._download.get("numSeeders", 0) 86 | 87 | def leechers_num(self): 88 | return self._download.get("connections", 0) 89 | 90 | def uploaded_bytes(self): 91 | return get_readable_file_size(int(self._download.get("uploadLength", "0"))) 92 | 93 | def seed_speed(self): 94 | return ( 95 | f"{get_readable_file_size(int(self._download.get("uploadSpeed", "0")))}/s" 96 | ) 97 | 98 | def ratio(self): 99 | try: 100 | return round( 101 | int(self._download.get("uploadLength", "0")) 102 | / int(self._download.get("completedLength", "0")), 103 | 3, 104 | ) 105 | except ZeroDivisionError: 106 | return 0 107 | 108 | def seeding_time(self): 109 | return get_readable_time(time() - self.start_time) 110 | 111 | def task(self): 112 | return self 113 | 114 | def gid(self): 115 | return self._gid 116 | 117 | async def cancel_task(self): 118 | self.listener.is_cancelled = True 119 | await self.update() 120 | await TorrentManager.aria2_remove(self._download) 121 | if self._download.get("seeder", "") == "true" and self.seeding: 122 | LOGGER.info(f"Cancelling Seed: {self.name()}") 123 | await self.listener.on_upload_error( 124 | f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}" 125 | ) 126 | else: 127 | if self.queued: 128 | LOGGER.info(f"Cancelling QueueDl: {self.name()}") 129 | msg = "task have been removed from queue/download" 130 | else: 131 | LOGGER.info(f"Cancelling Download: {self.name()}") 132 | msg = "Stopped by user!" 133 | await self.listener.on_download_error(msg) 134 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/youtube_utils/youtube_helper.py: -------------------------------------------------------------------------------- 1 | from logging import ERROR, getLogger 2 | from os import path as ospath 3 | from pickle import load as pload 4 | from urllib.parse import parse_qs, urlparse 5 | 6 | from google_auth_httplib2 import AuthorizedHttp 7 | from googleapiclient.discovery import build 8 | from googleapiclient.http import build_http 9 | from tenacity import ( 10 | retry, 11 | retry_if_exception_type, 12 | stop_after_attempt, 13 | wait_exponential, 14 | ) 15 | 16 | LOGGER = getLogger(__name__) 17 | getLogger("googleapiclient.discovery").setLevel(ERROR) 18 | 19 | 20 | class YouTubeHelper: 21 | def __init__(self): 22 | self._OAUTH_SCOPE = [ 23 | "https://www.googleapis.com/auth/youtube.upload", 24 | "https://www.googleapis.com/auth/youtube", 25 | ] 26 | self.token_path = "token.pickle" 27 | self.is_uploading = False 28 | self.service = None 29 | self.total_files = 0 30 | self.file_processed_bytes = 0 31 | self.proc_bytes = 0 32 | self.total_time = 0 33 | self.status = None 34 | self.update_interval = 3 35 | self.upload_progress = 0 36 | 37 | @property 38 | def speed(self): 39 | try: 40 | return self.proc_bytes / self.total_time 41 | except Exception: 42 | return 0 43 | 44 | @property 45 | def processed_bytes(self): 46 | return self.proc_bytes 47 | 48 | async def progress(self): 49 | if self.status is not None: 50 | if hasattr(self.status, "total_size") and hasattr(self.status, "progress"): 51 | chunk_size = ( 52 | self.status.total_size * self.status.progress() 53 | - self.file_processed_bytes 54 | ) 55 | self.file_processed_bytes = ( 56 | self.status.total_size * self.status.progress() 57 | ) 58 | self.proc_bytes += chunk_size 59 | self.total_time += self.update_interval 60 | else: 61 | # For YouTube uploads, we'll track progress differently 62 | self.total_time += self.update_interval 63 | 64 | def authorize(self, user_id=""): 65 | credentials = None 66 | token_path = self.token_path 67 | 68 | if user_id: 69 | token_path = f"tokens/{user_id}.pickle" 70 | 71 | if ospath.exists(token_path): 72 | LOGGER.info(f"Authorize YouTube with {token_path}") 73 | with open(token_path, "rb") as f: 74 | credentials = pload(f) 75 | else: 76 | LOGGER.error(f"YouTube token file {token_path} not found!") 77 | raise FileNotFoundError(f"YouTube token file {token_path} not found!") 78 | 79 | authorized_http = AuthorizedHttp(credentials, http=build_http()) 80 | authorized_http.http.disable_ssl_certificate_validation = True 81 | return build("youtube", "v3", http=authorized_http, cache_discovery=False) 82 | 83 | def get_video_id_from_url(self, url): 84 | """Extract video ID from YouTube URL""" 85 | if "youtube.com/watch?v=" in url: 86 | parsed = urlparse(url) 87 | return parse_qs(parsed.query)["v"][0] 88 | if "youtu.be/" in url: 89 | return url.split("youtu.be/")[1].split("?")[0] 90 | return url # Assume it's already a video ID 91 | 92 | @retry( 93 | wait=wait_exponential(multiplier=2, min=3, max=6), 94 | stop=stop_after_attempt(3), 95 | retry=retry_if_exception_type(Exception), 96 | ) 97 | def get_video_info(self, video_id): 98 | """Get video information""" 99 | return ( 100 | self.service.videos() 101 | .list(part="snippet,statistics,status", id=video_id) 102 | .execute() 103 | ) 104 | 105 | @retry( 106 | wait=wait_exponential(multiplier=2, min=3, max=6), 107 | stop=stop_after_attempt(3), 108 | retry=retry_if_exception_type(Exception), 109 | ) 110 | def get_channel_info(self): 111 | """Get channel information""" 112 | return ( 113 | self.service.channels().list(part="snippet,statistics", mine=True).execute() 114 | ) 115 | 116 | def escapes(self, estr): 117 | """Escape special characters in strings""" 118 | chars = ["\\", "'", '"', r"\a", r"\b", r"\f", r"\n", r"\r", r"\t"] 119 | for char in chars: 120 | estr = estr.replace(char, f"\\{char}") 121 | return estr.strip() 122 | 123 | async def cancel_task(self): 124 | """Cancel the current upload task""" 125 | self.listener.is_cancelled = True 126 | if self.is_uploading: 127 | LOGGER.info(f"Cancelling YouTube Upload: {self.listener.name}") 128 | await self.listener.on_upload_error( 129 | "Your YouTube upload has been cancelled!" 130 | ) 131 | -------------------------------------------------------------------------------- /bot/modules/mediainfo.py: -------------------------------------------------------------------------------- 1 | from os import getcwd, path as ospath 2 | from re import search 3 | from shlex import split 4 | 5 | from aiofiles import open as aiopen 6 | from aiofiles.os import mkdir, path as aiopath, remove as aioremove 7 | from aiohttp import ClientSession 8 | 9 | from .. import LOGGER 10 | from ..core.tg_client import TgClient 11 | from ..helper.ext_utils.bot_utils import cmd_exec 12 | from ..helper.ext_utils.telegraph_helper import telegraph 13 | from ..helper.telegram_helper.bot_commands import BotCommands 14 | from ..helper.telegram_helper.message_utils import send_message, edit_message 15 | 16 | 17 | async def gen_mediainfo(message, link=None, media=None, mmsg=None): 18 | temp_send = await send_message(message, "Generating MediaInfo...") 19 | try: 20 | path = "mediainfo/" 21 | if not await aiopath.isdir(path): 22 | await mkdir(path) 23 | file_size = 0 24 | if link: 25 | filename = search(".+/(.+)", link).group(1) 26 | des_path = ospath.join(path, filename) 27 | headers = { 28 | "user-agent": "Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36" 29 | } 30 | async with ClientSession() as session: 31 | async with session.get(link, headers=headers) as response: 32 | file_size = int(response.headers.get("Content-Length", 0)) 33 | async with aiopen(des_path, "wb") as f: 34 | async for chunk in response.content.iter_chunked(10000000): 35 | await f.write(chunk) 36 | break 37 | elif media: 38 | des_path = ospath.join(path, media.file_name) 39 | file_size = media.file_size 40 | if file_size <= 50000000: 41 | await mmsg.download(ospath.join(getcwd(), des_path)) 42 | else: 43 | async for chunk in TgClient.bot.stream_media(media, limit=5): 44 | async with aiopen(des_path, "ab") as f: 45 | await f.write(chunk) 46 | stdout, _, _ = await cmd_exec(split(f'mediainfo "{des_path}"')) 47 | tc = f"

📌 {ospath.basename(des_path)}



" 48 | if len(stdout) != 0: 49 | tc += parseinfo(stdout, file_size) 50 | except Exception as e: 51 | LOGGER.error(e) 52 | await edit_message(temp_send, f"MediaInfo Stopped due to {str(e)}") 53 | finally: 54 | await aioremove(des_path) 55 | link_id = (await telegraph.create_page(title="MediaInfo X", content=tc))["path"] 56 | await temp_send.edit( 57 | f"MediaInfo:\n\n➲ Link : https://graph.org/{link_id}", 58 | disable_web_page_preview=False, 59 | ) 60 | 61 | 62 | section_dict = {"General": "🗒", "Video": "🎞", "Audio": "🔊", "Text": "🔠", "Menu": "🗃"} 63 | 64 | 65 | def parseinfo(out, size): 66 | tc, trigger = "", False 67 | size_line = ( 68 | f"File size : {size / (1024 * 1024):.2f} MiB" 69 | ) 70 | for line in out.split("\n"): 71 | for section, emoji in section_dict.items(): 72 | if line.startswith(section): 73 | trigger = True 74 | if not line.startswith("General"): 75 | tc += "
" 76 | tc += f"

{emoji} {line.replace('Text', 'Subtitle')}

" 77 | break 78 | if line.startswith("File size"): 79 | line = size_line 80 | if trigger: 81 | tc += "
"
 82 |             trigger = False
 83 |         else:
 84 |             tc += line + "\n"
 85 |     tc += "

" 86 | return tc 87 | 88 | 89 | async def mediainfo(_, message): 90 | rply = message.reply_to_message 91 | help_msg = f""" 92 | By replying to media: 93 | /{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]} [media] 94 | 95 | By reply/sending download link: 96 | /{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]} [link] 97 | """ 98 | if len(message.command) > 1 or rply and rply.text: 99 | link = rply.text if rply else message.command[1] 100 | return await gen_mediainfo(message, link) 101 | elif rply: 102 | if file := next( 103 | ( 104 | i 105 | for i in [ 106 | rply.document, 107 | rply.video, 108 | rply.audio, 109 | rply.voice, 110 | rply.animation, 111 | rply.video_note, 112 | ] 113 | if i is not None 114 | ), 115 | None, 116 | ): 117 | return await gen_mediainfo(message, None, file, rply) 118 | else: 119 | return await send_message(message, help_msg) 120 | else: 121 | return await send_message(message, help_msg) 122 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/download_utils/rclone_download.py: -------------------------------------------------------------------------------- 1 | from asyncio import gather 2 | from json import loads 3 | from secrets import token_hex 4 | from aiofiles.os import remove 5 | 6 | from .... import task_dict, task_dict_lock, LOGGER 7 | from ....core.config_manager import BinConfig 8 | from ...ext_utils.bot_utils import cmd_exec 9 | from ...ext_utils.task_manager import ( 10 | check_running_tasks, 11 | stop_duplicate_check, 12 | limit_checker, 13 | ) 14 | from ...mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper 15 | from ...mirror_leech_utils.status_utils.queue_status import QueueStatus 16 | from ...mirror_leech_utils.status_utils.rclone_status import RcloneStatus 17 | from ...telegram_helper.message_utils import send_status_message 18 | 19 | 20 | async def add_rclone_download(listener, path): 21 | if listener.link.startswith("mrcc:"): 22 | listener.link = listener.link.split("mrcc:", 1)[1] 23 | config_path = f"rclone/{listener.user_id}.conf" 24 | else: 25 | config_path = "rclone.conf" 26 | 27 | remote, listener.link = listener.link.split(":", 1) 28 | listener.link = listener.link.strip("/") 29 | rclone_select = False 30 | if listener.link.startswith("rclone_select"): 31 | rclone_select = True 32 | rpath = "" 33 | else: 34 | rpath = listener.link 35 | 36 | cmd1 = [ 37 | BinConfig.RCLONE_NAME, 38 | "lsjson", 39 | "--fast-list", 40 | "--stat", 41 | "--no-mimetype", 42 | "--no-modtime", 43 | "--config", 44 | config_path, 45 | f"{remote}:{rpath}", 46 | ] 47 | cmd2 = [ 48 | BinConfig.RCLONE_NAME, 49 | "size", 50 | "--fast-list", 51 | "--json", 52 | "--config", 53 | config_path, 54 | f"{remote}:{rpath}", 55 | ] 56 | if rclone_select: 57 | cmd2.extend(("--files-from", listener.link)) 58 | res = await cmd_exec(cmd2) 59 | if res[2] != 0: 60 | if res[2] != -9: 61 | msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {res[1][:4000]}" 62 | await listener.on_download_error(msg) 63 | return 64 | try: 65 | rsize = loads(res[0]) 66 | except Exception as err: 67 | await listener.on_download_error(f"RcloneDownload JsonLoad: {err}") 68 | return 69 | if not listener.name: 70 | listener.name = listener.link 71 | path += listener.name 72 | else: 73 | res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2)) 74 | if res1[2] != 0 or res2[2] != 0: 75 | if res1[2] != -9: 76 | err = res1[1] or res2[1] 77 | msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {err[:4000]}" 78 | await listener.on_download_error(msg) 79 | return 80 | try: 81 | rstat = loads(res1[0]) 82 | rsize = loads(res2[0]) 83 | except Exception as err: 84 | await listener.on_download_error(f"RcloneDownload JsonLoad: {err}") 85 | return 86 | if rstat["IsDir"]: 87 | if not listener.name: 88 | listener.name = ( 89 | listener.link.rsplit("/", 1)[-1] if listener.link else remote 90 | ) 91 | path += listener.name 92 | else: 93 | listener.name = listener.link.rsplit("/", 1)[-1] 94 | listener.size = rsize["bytes"] 95 | gid = token_hex(5) 96 | 97 | if not rclone_select: 98 | msg, button = await stop_duplicate_check(listener) 99 | if msg: 100 | await listener.on_download_error(msg, button) 101 | return 102 | if limit_exceeded := await limit_checker(listener): 103 | await listener.on_download_error(limit_exceeded, is_limit=True) 104 | return 105 | 106 | add_to_queue, event = await check_running_tasks(listener) 107 | if add_to_queue: 108 | LOGGER.info(f"Added to Queue/Download: {listener.name}") 109 | async with task_dict_lock: 110 | task_dict[listener.mid] = QueueStatus(listener, gid, "dl") 111 | await listener.on_download_start() 112 | if listener.multi <= 1: 113 | await send_status_message(listener.message) 114 | await event.wait() 115 | if listener.is_cancelled: 116 | return 117 | 118 | RCTransfer = RcloneTransferHelper(listener) 119 | async with task_dict_lock: 120 | task_dict[listener.mid] = RcloneStatus(listener, RCTransfer, gid, "dl") 121 | 122 | if add_to_queue: 123 | LOGGER.info(f"Start Queued Download with rclone: {listener.link}") 124 | else: 125 | await listener.on_download_start() 126 | if listener.multi <= 1: 127 | await send_status_message(listener.message) 128 | LOGGER.info(f"Download with rclone: {listener.link}") 129 | 130 | await RCTransfer.download(remote, config_path, path) 131 | if rclone_select: 132 | await remove(listener.link) 133 | -------------------------------------------------------------------------------- /bot/helper/mirror_leech_utils/status_utils/nzb_status.py: -------------------------------------------------------------------------------- 1 | from asyncio import gather 2 | from collections import defaultdict 3 | 4 | from .... import LOGGER, sabnzbd_client, nzb_jobs, nzb_listener_lock 5 | from ...ext_utils.status_utils import ( 6 | MirrorStatus, 7 | EngineStatus, 8 | get_readable_file_size, 9 | get_readable_time, 10 | time_to_seconds, 11 | ) 12 | 13 | 14 | async def get_download(nzo_id, old_info=None): 15 | if old_info is None: 16 | old_info = {} 17 | try: 18 | queue = await sabnzbd_client.get_downloads(nzo_ids=nzo_id) 19 | if res := queue["queue"]["slots"]: 20 | slot = res[0] 21 | if msg := slot["labels"]: 22 | LOGGER.warning(" | ".join(msg)) 23 | return slot 24 | else: 25 | history = await sabnzbd_client.get_history(nzo_ids=nzo_id) 26 | if res := history["history"]["slots"]: 27 | slot = res[0] 28 | if slot["status"] == "Verifying": 29 | percentage = slot["action_line"].split("Verifying: ")[-1].split("/") 30 | percentage = round( 31 | (int(float(percentage[0])) / int(float(percentage[1]))) * 100, 2 32 | ) 33 | old_info["percentage"] = percentage 34 | elif slot["status"] == "Repairing": 35 | action = slot["action_line"].split("Repairing: ")[-1].split() 36 | percentage = action[0].strip("%") 37 | eta = action[2] 38 | old_info["percentage"] = percentage 39 | old_info["timeleft"] = eta 40 | elif slot["status"] == "Extracting": 41 | if "Unpacking" in slot["action_line"]: 42 | action = slot["action_line"].split("Unpacking: ")[-1].split() 43 | else: 44 | action = ( 45 | slot["action_line"].split("Direct Unpack: ")[-1].split() 46 | ) 47 | percentage = action[0].split("/") 48 | percentage = round( 49 | (int(float(percentage[0])) / int(float(percentage[1]))) * 100, 2 50 | ) 51 | eta = action[2] 52 | old_info["percentage"] = percentage 53 | old_info["timeleft"] = eta 54 | old_info["status"] = slot["status"] 55 | return old_info 56 | except Exception as e: 57 | LOGGER.error(f"{e}: Sabnzbd, while getting job info. ID: {nzo_id}") 58 | return old_info 59 | 60 | 61 | class SabnzbdStatus: 62 | def __init__(self, listener, gid, queued=False): 63 | self.queued = queued 64 | self.listener = listener 65 | self._gid = gid 66 | self._info = {} 67 | self.engine = EngineStatus().STATUS_SABNZBD 68 | 69 | async def update(self): 70 | self._info = await get_download(self._gid, self._info) 71 | 72 | def progress(self): 73 | return f"{self._info.get('percentage', '0')}%" 74 | 75 | def processed_raw(self): 76 | return ( 77 | float(self._info.get("mb", "0")) - float(self._info.get("mbleft", "0")) 78 | ) * 1048576 79 | 80 | def processed_bytes(self): 81 | return get_readable_file_size(self.processed_raw()) 82 | 83 | def speed_raw(self): 84 | if self._info.get("mb", "0") == self._info.get("mbleft", "0"): 85 | return 0 86 | try: 87 | return int(float(self._info.get("mbleft", "0")) * 1048576) / self.eta_raw() 88 | except Exception: 89 | return 0 90 | 91 | def speed(self): 92 | return f"{get_readable_file_size(self.speed_raw())}/s" 93 | 94 | def name(self): 95 | return self._info.get("filename", "") 96 | 97 | def size(self): 98 | return self._info.get("size", 0) 99 | 100 | def eta_raw(self): 101 | return int(time_to_seconds(self._info.get("timeleft", "0"))) 102 | 103 | def eta(self): 104 | return get_readable_time(self.eta_raw()) 105 | 106 | async def status(self): 107 | await self.update() 108 | if self._info.get("mb", "0") == self._info.get("mbleft", "0"): 109 | return MirrorStatus.STATUS_QUEUEDL 110 | state = self._info.get("status") 111 | if state == "Paused" and self.queued: 112 | return MirrorStatus.STATUS_QUEUEDL 113 | elif state in [ 114 | "QuickCheck", 115 | "Verifying", 116 | "Repairing", 117 | "Fetching", 118 | "Moving", 119 | "Extracting", 120 | ]: 121 | return state 122 | else: 123 | return MirrorStatus.STATUS_DOWNLOAD 124 | 125 | def task(self): 126 | return self 127 | 128 | def gid(self): 129 | return self._gid 130 | 131 | async def cancel_task(self): 132 | self.listener.is_cancelled = True 133 | await self.update() 134 | LOGGER.info(f"Cancelling Download: {self.name()}") 135 | await gather( 136 | self.listener.on_download_error("Stopped by user!"), 137 | sabnzbd_client.delete_job(self._gid, delete_files=True), 138 | sabnzbd_client.delete_category(f"{self.listener.mid}"), 139 | sabnzbd_client.delete_history(self._gid, delete_files=True), 140 | ) 141 | async with nzb_listener_lock: 142 | if self._gid in nzb_jobs: 143 | del nzb_jobs[self._gid] 144 | -------------------------------------------------------------------------------- /gen_scripts/add_to_team_drive.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | from argparse import ArgumentParser 3 | from glob import glob 4 | from json import load, JSONDecodeError 5 | from os import path 6 | from pickle import load as pickle_load, dump as pickle_dump 7 | from sys import exit 8 | from time import time 9 | 10 | from googleapiclient.discovery import build 11 | from progress.bar import Bar 12 | from google.auth.transport.requests import Request 13 | from google_auth_oauthlib.flow import InstalledAppFlow 14 | 15 | 16 | def parse_args(): 17 | parser = ArgumentParser( 18 | description="Add service accounts to a shared drive using credentials files in a folder." 19 | ) 20 | parser.add_argument( 21 | "--path", 22 | "-p", 23 | default="accounts", 24 | help="Path to the service accounts folder.", 25 | ) 26 | parser.add_argument( 27 | "--credentials", 28 | "-c", 29 | default="./credentials.json", 30 | help="Path for the credentials file.", 31 | ) 32 | parser.add_argument("--yes", "-y", action="store_true", help="Skips the prompt.") 33 | req = parser.add_argument_group("required arguments") 34 | req.add_argument( 35 | "--drive-id", "-d", required=True, help="The ID of the Shared Drive." 36 | ) 37 | return parser.parse_args() 38 | 39 | 40 | def load_credentials_file(credentials_pattern): 41 | credentials_files = glob(credentials_pattern) 42 | if not credentials_files: 43 | print(">> No credentials found.") 44 | exit(0) 45 | credentials_file = credentials_files[0] 46 | try: 47 | with open(credentials_file, "r") as f: 48 | load(f) 49 | print(">> Found credentials.") 50 | except (IOError, JSONDecodeError) as e: 51 | print(">> Error reading credentials:", e) 52 | exit(1) 53 | return credentials_file 54 | 55 | 56 | def authenticate(creds_file): 57 | token_path = "token_sa.pickle" 58 | creds = None 59 | try: 60 | if path.exists(token_path): 61 | with open(token_path, "rb") as token_file: 62 | creds = pickle_load(token_file) 63 | except Exception as e: 64 | print(">> Failed to load existing token:", e) 65 | try: 66 | if not creds or not getattr(creds, "valid", False): 67 | if ( 68 | creds 69 | and getattr(creds, "expired", False) 70 | and getattr(creds, "refresh_token", None) 71 | ): 72 | creds.refresh(Request()) 73 | else: 74 | flow = InstalledAppFlow.from_client_secrets_file( 75 | creds_file, 76 | scopes=[ 77 | "https://www.googleapis.com/auth/admin.directory.group", 78 | "https://www.googleapis.com/auth/admin.directory.group.member", 79 | ], 80 | ) 81 | creds = flow.run_console() 82 | with open(token_path, "wb") as token_file: 83 | pickle_dump(creds, token_file) 84 | except Exception as e: 85 | print(">> Authentication failed:", e) 86 | exit(1) 87 | return creds 88 | 89 | 90 | def add_service_accounts(drive_client, account_dir, drive_id): 91 | account_files = glob(path.join(account_dir, "*.json")) 92 | if not account_files: 93 | print(">> No service accounts found in:", account_dir) 94 | exit(0) 95 | batch = drive_client.new_batch_http_request() 96 | pbar = Bar("Readying accounts", max=len(account_files)) 97 | for acc_file in account_files: 98 | try: 99 | with open(acc_file, "r") as f: 100 | data = load(f) 101 | client_email = data["client_email"] 102 | batch.add( 103 | drive_client.permissions().create( 104 | fileId=drive_id, 105 | supportsAllDrives=True, 106 | body={ 107 | "role": "organizer", 108 | "type": "user", 109 | "emailAddress": client_email, 110 | }, 111 | ) 112 | ) 113 | except Exception as e: 114 | print(">> Error processing file {}: {}".format(acc_file, e)) 115 | pbar.next() 116 | pbar.finish() 117 | print("Adding...") 118 | try: 119 | batch.execute() 120 | except Exception as e: 121 | print(">> Batch execution failed:", e) 122 | exit(1) 123 | 124 | 125 | def main(): 126 | start_time = time() 127 | args = parse_args() 128 | credentials_file = load_credentials_file(args.credentials) 129 | 130 | if not args.yes: 131 | try: 132 | input( 133 | ">> Ensure the Google account that generated credentials.json has " 134 | "Manager access on your Team Drive.\n>> (Press any key to continue)" 135 | ) 136 | except Exception as e: 137 | print(">> User prompt failed:", e) 138 | exit(1) 139 | 140 | creds = authenticate(credentials_file) 141 | drive_client = build("drive", "v3", credentials=creds) 142 | add_service_accounts(drive_client, args.path, args.drive_id) 143 | 144 | elapsed = time() - start_time 145 | hours, rem = divmod(elapsed, 3600) 146 | minutes, seconds = divmod(rem, 60) 147 | print("Complete.") 148 | print( 149 | "Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format( 150 | int(hours), int(minutes), seconds 151 | ) 152 | ) 153 | 154 | 155 | if __name__ == "__main__": 156 | main() 157 | -------------------------------------------------------------------------------- /bot/core/torrent_manager.py: -------------------------------------------------------------------------------- 1 | from asyncio import TimeoutError, gather 2 | from contextlib import suppress 3 | from inspect import iscoroutinefunction 4 | from pathlib import Path 5 | 6 | from aioaria2 import Aria2WebsocketClient 7 | from aiohttp import ClientError 8 | from aioqbt.client import create_client 9 | from tenacity import ( 10 | retry, 11 | retry_if_exception_type, 12 | stop_after_attempt, 13 | wait_exponential, 14 | ) 15 | 16 | from .. import LOGGER, aria2_options 17 | from .config_manager import Config 18 | 19 | 20 | def wrap_with_retry(obj, max_retries=3): 21 | for attr_name in dir(obj): 22 | if attr_name.startswith("_"): 23 | continue 24 | 25 | attr = getattr(obj, attr_name) 26 | if iscoroutinefunction(attr): 27 | retry_policy = retry( 28 | stop=stop_after_attempt(max_retries), 29 | wait=wait_exponential(multiplier=1, min=1, max=5), 30 | retry=retry_if_exception_type( 31 | (ClientError, TimeoutError, RuntimeError) 32 | ), 33 | ) 34 | wrapped = retry_policy(attr) 35 | setattr(obj, attr_name, wrapped) 36 | return obj 37 | 38 | 39 | class TorrentManager: 40 | aria2 = None 41 | qbittorrent = None 42 | 43 | @classmethod 44 | async def initiate(cls): 45 | if cls.aria2: 46 | return 47 | try: 48 | cls.aria2 = await Aria2WebsocketClient.new("http://localhost:6800/jsonrpc") 49 | LOGGER.info("Aria2 initialized successfully.") 50 | 51 | if Config.DISABLE_TORRENTS: 52 | LOGGER.info("Torrents are disabled.") 53 | return 54 | 55 | cls.qbittorrent = await create_client("http://localhost:8090/api/v2/") 56 | cls.qbittorrent = wrap_with_retry(cls.qbittorrent) 57 | 58 | except Exception as e: 59 | LOGGER.error(f"Error during initialization: {e}") 60 | await cls.close_all() 61 | raise 62 | 63 | @classmethod 64 | async def close_all(cls): 65 | close_tasks = [] 66 | if cls.aria2: 67 | close_tasks.append(cls.aria2.close()) 68 | cls.aria2 = None 69 | if cls.qbittorrent: 70 | close_tasks.append(cls.qbittorrent.close()) 71 | cls.qbittorrent = None 72 | if close_tasks: 73 | await gather(*close_tasks) 74 | 75 | @classmethod 76 | async def aria2_remove(cls, download): 77 | if download.get("status", "") in ["active", "paused", "waiting"]: 78 | await cls.aria2.forceRemove(download.get("gid", "")) 79 | else: 80 | with suppress(Exception): 81 | await cls.aria2.removeDownloadResult(download.get("gid", "")) 82 | 83 | @classmethod 84 | async def remove_all(cls): 85 | await cls.pause_all() 86 | if cls.qbittorrent: 87 | await gather( 88 | cls.qbittorrent.torrents.delete("all", False), 89 | cls.aria2.purgeDownloadResult(), 90 | ) 91 | else: 92 | await gather( 93 | cls.aria2.purgeDownloadResult(), 94 | ) 95 | downloads = [] 96 | results = await gather(cls.aria2.tellActive(), cls.aria2.tellWaiting(0, 1000)) 97 | for res in results: 98 | downloads.extend(res) 99 | tasks = [] 100 | tasks.extend( 101 | cls.aria2.forceRemove(download.get("gid")) for download in downloads 102 | ) 103 | with suppress(Exception): 104 | await gather(*tasks) 105 | 106 | @classmethod 107 | async def overall_speed(cls): 108 | aria2_speed = await cls.aria2.getGlobalStat() 109 | download_speed = int(aria2_speed.get("downloadSpeed", "0")) 110 | upload_speed = int(aria2_speed.get("uploadSpeed", "0")) 111 | 112 | if cls.qbittorrent: 113 | qb_speed = await cls.qbittorrent.transfer.info() 114 | download_speed += qb_speed.dl_info_speed 115 | upload_speed += qb_speed.up_info_speed 116 | 117 | return download_speed, upload_speed 118 | 119 | @classmethod 120 | async def pause_all(cls): 121 | pause_tasks = [cls.aria2.forcePauseAll()] 122 | if cls.qbittorrent: 123 | pause_tasks.append(cls.qbittorrent.torrents.stop("all")) 124 | await gather(*pause_tasks) 125 | 126 | @classmethod 127 | async def change_aria2_option(cls, key, value): 128 | downloads = [] 129 | results = await gather(cls.aria2.tellActive(), cls.aria2.tellWaiting(0, 1000)) 130 | for res in results: 131 | downloads.extend(res) 132 | tasks = [ 133 | cls.aria2.changeOption(download.get("gid"), {key: value}) 134 | for download in downloads 135 | if download.get("status", "") != "complete" 136 | ] 137 | if tasks: 138 | try: 139 | await gather(*tasks) 140 | except Exception as e: 141 | LOGGER.error(e) 142 | if key not in ["checksum", "index-out", "out", "pause", "select-file"]: 143 | await cls.aria2.changeGlobalOption({key: value}) 144 | aria2_options[key] = value 145 | 146 | 147 | def aria2_name(download_info): 148 | if "bittorrent" in download_info and download_info["bittorrent"].get("info"): 149 | return download_info["bittorrent"]["info"]["name"] 150 | elif download_info.get("files"): 151 | if download_info["files"][0]["path"].startswith("[METADATA]"): 152 | return download_info["files"][0]["path"] 153 | file_path = download_info["files"][0]["path"] 154 | dir_path = download_info["dir"] 155 | if file_path.startswith(dir_path): 156 | return Path(file_path[len(dir_path) + 1 :]).parts[0] 157 | else: 158 | return "" 159 | else: 160 | return "" 161 | 162 | 163 | def is_metadata(download_info): 164 | return any( 165 | f["path"].startswith("[METADATA]") for f in download_info.get("files", []) 166 | ) 167 | -------------------------------------------------------------------------------- /config_sample.py: -------------------------------------------------------------------------------- 1 | # REQUIRED CONFIG 2 | BOT_TOKEN = "" 3 | OWNER_ID = 0 4 | TELEGRAM_API = 0 5 | TELEGRAM_HASH = "" 6 | DATABASE_URL = "" 7 | 8 | # OPTIONAL CONFIG 9 | DEFAULT_LANG = "en" 10 | TG_PROXY = ( 11 | {} 12 | ) # {"scheme": ”socks5”, "hostname": ””, "port": 1234, "username": ”user”, "password": ”pass”} 13 | USER_SESSION_STRING = "" 14 | CMD_SUFFIX = "" 15 | AUTHORIZED_CHATS = "" 16 | SUDO_USERS = "" 17 | STATUS_LIMIT = 10 18 | DEFAULT_UPLOAD = "rc" 19 | STATUS_UPDATE_INTERVAL = 15 20 | FILELION_API = "" 21 | STREAMWISH_API = "" 22 | EXCLUDED_EXTENSIONS = "" 23 | INCOMPLETE_TASK_NOTIFIER = False 24 | YT_DLP_OPTIONS = "" 25 | USE_SERVICE_ACCOUNTS = False 26 | NAME_SWAP = "" 27 | FFMPEG_CMDS = {} 28 | UPLOAD_PATHS = {} 29 | 30 | # Hyper Tg Downloader 31 | HELPER_TOKENS = "" 32 | 33 | # MegaAPI v4.30 34 | MEGA_EMAIL = "" 35 | MEGA_PASSWORD = "" 36 | 37 | # Disable Options 38 | DISABLE_TORRENTS = False 39 | DISABLE_LEECH = False 40 | DISABLE_BULK = False 41 | DISABLE_MULTI = False 42 | DISABLE_SEED = False 43 | DISABLE_FF_MODE = False 44 | 45 | # Telegraph 46 | AUTHOR_NAME = "WZML-X" 47 | AUTHOR_URL = "https://t.me/WZML_X" 48 | 49 | # Task Limits 50 | DIRECT_LIMIT = 0 51 | MEGA_LIMIT = 0 52 | TORRENT_LIMIT = 0 53 | GD_DL_LIMIT = 0 54 | RC_DL_LIMIT = 0 55 | CLONE_LIMIT = 0 56 | JD_LIMIT = 0 57 | NZB_LIMIT = 0 58 | YTDLP_LIMIT = 0 59 | PLAYLIST_LIMIT = 0 60 | LEECH_LIMIT = 0 61 | EXTRACT_LIMIT = 0 62 | ARCHIVE_LIMIT = 0 63 | STORAGE_LIMIT = 0 64 | 65 | # Insta video downloader api 66 | INSTADL_API = "" 67 | 68 | # Nzb search 69 | HYDRA_IP = "" 70 | HYDRA_API_KEY = "" 71 | 72 | # Media Search 73 | IMDB_TEMPLATE = """Title: {title} [{year}] 74 | Also Known As: {aka} 75 | Rating ⭐️: {rating} 76 | Release Info: {release_date} 77 | Genre: {genres} 78 | IMDb URL: {url} 79 | Language: {languages} 80 | Country of Origin : {countries} 81 | 82 | Story Line: {plot} 83 | 84 | Read More ...""" 85 | 86 | # Task Tools 87 | FORCE_SUB_IDS = "" 88 | MEDIA_STORE = True 89 | DELETE_LINKS = False 90 | CLEAN_LOG_MSG = False 91 | 92 | # Limiters 93 | BOT_MAX_TASKS = 0 94 | USER_MAX_TASKS = 0 95 | USER_TIME_INTERVAL = 0 96 | VERIFY_TIMEOUT = 0 97 | LOGIN_PASS = "" 98 | 99 | # Bot Settings 100 | BOT_PM = False 101 | SET_COMMANDS = True 102 | TIMEZONE = "Asia/Kolkata" 103 | 104 | # GDrive Tools 105 | GDRIVE_ID = "" 106 | GD_DESP = "Uploaded with WZ Bot" 107 | IS_TEAM_DRIVE = False 108 | STOP_DUPLICATE = False 109 | INDEX_URL = "" 110 | 111 | # YT Tools 112 | YT_DESP = "Uploaded to YouTube by WZML-X bot" 113 | YT_TAGS = ["telegram", "bot", "youtube"] # or as a comma-separated string 114 | YT_CATEGORY_ID = 22 115 | YT_PRIVACY_STATUS = "unlisted" 116 | 117 | # Rclone 118 | RCLONE_PATH = "" 119 | RCLONE_FLAGS = "" 120 | RCLONE_SERVE_URL = "" 121 | SHOW_CLOUD_LINK = True 122 | RCLONE_SERVE_PORT = 0 123 | RCLONE_SERVE_USER = "" 124 | RCLONE_SERVE_PASS = "" 125 | 126 | # JDownloader 127 | JD_EMAIL = "" 128 | JD_PASS = "" 129 | 130 | # Sabnzbd 131 | USENET_SERVERS = [ 132 | { 133 | "name": "main", 134 | "host": "", 135 | "port": 563, 136 | "timeout": 60, 137 | "username": "", 138 | "password": "", 139 | "connections": 8, 140 | "ssl": 1, 141 | "ssl_verify": 2, 142 | "ssl_ciphers": "", 143 | "enable": 1, 144 | "required": 0, 145 | "optional": 0, 146 | "retention": 0, 147 | "send_group": 0, 148 | "priority": 0, 149 | } 150 | ] 151 | 152 | # Update 153 | UPSTREAM_REPO = "" 154 | UPSTREAM_BRANCH = "master" 155 | UPDATE_PKGS = True 156 | 157 | # Leech 158 | LEECH_SPLIT_SIZE = 0 159 | AS_DOCUMENT = False 160 | EQUAL_SPLITS = False 161 | MEDIA_GROUP = False 162 | USER_TRANSMISSION = True 163 | HYBRID_LEECH = True 164 | LEECH_PREFIX = "" 165 | LEECH_SUFFIX = "" 166 | LEECH_FONT = "" 167 | LEECH_CAPTION = "" 168 | THUMBNAIL_LAYOUT = "" 169 | 170 | # Log Channels 171 | LEECH_DUMP_CHAT = "" 172 | LINKS_LOG_ID = "" 173 | MIRROR_LOG_ID = "" 174 | 175 | # qBittorrent/Aria2c 176 | TORRENT_TIMEOUT = 0 177 | BASE_URL = "" 178 | BASE_URL_PORT = 0 179 | WEB_PINCODE = True 180 | 181 | # Queueing system 182 | QUEUE_ALL = 0 183 | QUEUE_DOWNLOAD = 0 184 | QUEUE_UPLOAD = 0 185 | 186 | # RSS 187 | RSS_DELAY = 600 188 | RSS_CHAT = "" 189 | RSS_SIZE_LIMIT = 0 190 | 191 | # Torrent Search 192 | SEARCH_API_LINK = "" 193 | SEARCH_LIMIT = 0 194 | SEARCH_PLUGINS = [ 195 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py", 196 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py", 197 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py", 198 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py", 199 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py", 200 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentproject.py", 201 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py", 202 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py", 203 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py", 204 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py", 205 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py", 206 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py", 207 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py", 208 | "https://raw.githubusercontent.com/v1k45/1337x-qBittorrent-search-plugin/master/leetx.py", 209 | "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py", 210 | "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py", 211 | "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py", 212 | ] 213 | --------------------------------------------------------------------------------