",
22 | "__builtins__": globals()["__builtins__"],
23 | "bot": TgClient.bot,
24 | "message": message,
25 | "user": message.from_user or message.sender_chat,
26 | "chat": message.chat,
27 | }
28 |
29 | return namespaces[message.chat.id]
30 |
31 |
32 | def log_input(message):
33 | LOGGER.info(
34 | f"IN: {message.text} (user={(message.from_user or message.sender_chat).id}, chat={message.chat.id})"
35 | )
36 |
37 |
38 | async def send(msg, message):
39 | if len(str(msg)) > 2000:
40 | with BytesIO(str.encode(msg)) as out_file:
41 | out_file.name = "output.txt"
42 | await send_file(message, out_file)
43 | else:
44 | LOGGER.info(f"OUT: '{msg}'")
45 | if not msg or msg == "\n":
46 | msg = "MessageEmpty"
47 | elif not bool(match(r"<(spoiler|b|i|code|s|u|/a)>", msg)):
48 | msg = f"{msg}"
49 | await send_message(message, msg)
50 |
51 |
52 | @new_task
53 | async def aioexecute(_, message):
54 | await send(await do("aexec", message), message)
55 |
56 |
57 | @new_task
58 | async def execute(_, message):
59 | await send(await do("exec", message), message)
60 |
61 |
62 | def cleanup_code(code):
63 | if code.startswith("```") and code.endswith("```"):
64 | return "\n".join(code.split("\n")[1:-1])
65 | return code.strip("` \n")
66 |
67 |
68 | async def do(func, message):
69 | log_input(message)
70 | content = message.text.split(maxsplit=1)[-1]
71 | body = cleanup_code(content)
72 | env = namespace_of(message)
73 |
74 | chdir(getcwd())
75 | async with aiopen(ospath.join(getcwd(), "bot/modules/temp.txt"), "w") as temp:
76 | await temp.write(body)
77 |
78 | stdout = StringIO()
79 |
80 | try:
81 | if func == "exec":
82 | exec(f"def func():\n{indent(body, ' ')}", env)
83 | else:
84 | exec(f"async def func():\n{indent(body, ' ')}", env)
85 | except Exception as e:
86 | return f"{e.__class__.__name__}: {e}"
87 |
88 | rfunc = env["func"]
89 |
90 | try:
91 | with redirect_stdout(stdout):
92 | func_return = (
93 | await sync_to_async(rfunc) if func == "exec" else await rfunc()
94 | )
95 | except Exception:
96 | value = stdout.getvalue()
97 | return f"{value}{format_exc()}"
98 | else:
99 | value = stdout.getvalue()
100 | result = None
101 | if func_return is None:
102 | if value:
103 | result = f"{value}"
104 | else:
105 | with suppress(Exception):
106 | result = f"{repr(await sync_to_async(eval, body, env))}"
107 | else:
108 | result = f"{value}{func_return}"
109 | if result:
110 | return result
111 |
112 |
113 | @new_task
114 | async def clear(_, message):
115 | log_input(message)
116 | global namespaces
117 | if message.chat.id in namespaces:
118 | del namespaces[message.chat.id]
119 | await send("Locals Cleared.", message)
120 |
--------------------------------------------------------------------------------
/bot/core/jdownloader_booter.py:
--------------------------------------------------------------------------------
1 | from json import dumps
2 | from random import randint
3 | from re import match
4 |
5 | from aiofiles import open as aiopen
6 | from aiofiles.os import listdir, makedirs, path, rename
7 | from aioshutil import rmtree
8 |
9 | from myjd import MyJdApi
10 |
11 | from .. import LOGGER
12 | from ..helper.ext_utils.bot_utils import cmd_exec, new_task
13 | from .config_manager import Config
14 | from .tg_client import TgClient
15 |
16 |
17 | class JDownloader(MyJdApi):
18 | def __init__(self):
19 | super().__init__()
20 | self._username = ""
21 | self._password = ""
22 | self._device_name = ""
23 | self.is_connected = False
24 | self.error = "JDownloader Credentials not provided!"
25 |
26 | async def _write_config(self, path, data):
27 | async with aiopen(path, "w") as f:
28 | await f.write(dumps(data))
29 |
30 | @new_task
31 | async def boot(self):
32 | await cmd_exec(["pkill", "-9", "-f", "java"])
33 | if not Config.JD_EMAIL or not Config.JD_PASS:
34 | self.is_connected = False
35 | self.error = "JDownloader Credentials not provided!"
36 | return
37 | self.error = "Connecting... Try agin after couple of seconds"
38 | self._device_name = f"{randint(0, 1000)}@{TgClient.BNAME}"
39 | if await path.exists("/JDownloader/logs"):
40 | LOGGER.info(
41 | "Starting JDownloader... This might take up to 10 sec and might restart once if update available!"
42 | )
43 | else:
44 | LOGGER.info(
45 | "Starting JDownloader... This might take up to 8 sec and might restart once after build!"
46 | )
47 | jdata = {
48 | "autoconnectenabledv2": True,
49 | "password": Config.JD_PASS,
50 | "devicename": f"{self._device_name}",
51 | "email": Config.JD_EMAIL,
52 | }
53 | remote_data = {
54 | "localapiserverheaderaccesscontrollalloworigin": "",
55 | "deprecatedapiport": 3128,
56 | "localapiserverheaderxcontenttypeoptions": "nosniff",
57 | "localapiserverheaderxframeoptions": "DENY",
58 | "externinterfaceenabled": True,
59 | "deprecatedapilocalhostonly": True,
60 | "localapiserverheaderreferrerpolicy": "no-referrer",
61 | "deprecatedapienabled": True,
62 | "localapiserverheadercontentsecuritypolicy": "default-src 'self'",
63 | "jdanywhereapienabled": True,
64 | "externinterfacelocalhostonly": False,
65 | "localapiserverheaderxxssprotection": "1; mode=block",
66 | }
67 | await makedirs("/JDownloader/cfg", exist_ok=True)
68 | await self._write_config(
69 | "/JDownloader/cfg/org.jdownloader.api.myjdownloader.MyJDownloaderSettings.json",
70 | jdata,
71 | )
72 | await self._write_config(
73 | "/JDownloader/cfg/org.jdownloader.api.RemoteAPIConfig.json",
74 | remote_data,
75 | )
76 | if not await path.exists("/JDownloader/JDownloader.jar"):
77 | pattern = r"JDownloader\.jar\.backup.\d$"
78 | for filename in await listdir("/JDownloader"):
79 | if match(pattern, filename):
80 | await rename(
81 | f"/JDownloader/{filename}", "/JDownloader/JDownloader.jar"
82 | )
83 | break
84 | await rmtree("/JDownloader/update")
85 | await rmtree("/JDownloader/tmp")
86 | cmd = "cpulimit -l 20 -- java -Xms256m -Xmx500m -Dsun.jnu.encoding=UTF-8 -Dfile.encoding=UTF-8 -Djava.awt.headless=true -jar /JDownloader/JDownloader.jar"
87 | self.is_connected = True
88 | _, __, code = await cmd_exec(cmd, shell=True)
89 | self.is_connected = False
90 | if code != -9:
91 | await self.boot()
92 |
93 |
94 | jdownloader = JDownloader()
95 |
--------------------------------------------------------------------------------
/bot/modules/gd_search.py:
--------------------------------------------------------------------------------
1 | from .. import LOGGER, user_data
2 | from ..helper.ext_utils.bot_utils import (
3 | sync_to_async,
4 | get_telegraph_list,
5 | new_task,
6 | )
7 | from ..helper.mirror_leech_utils.gdrive_utils.search import GoogleDriveSearch
8 | from ..helper.telegram_helper.button_build import ButtonMaker
9 | from ..helper.telegram_helper.message_utils import send_message, edit_message
10 |
11 |
12 | async def list_buttons(user_id, is_recursive=True, user_token=False):
13 | buttons = ButtonMaker()
14 | buttons.data_button(
15 | f"{'✅️' if user_token else '❌️'} User Token",
16 | f"list_types {user_id} ut {is_recursive} {user_token}",
17 | "header",
18 | )
19 | buttons.data_button(
20 | f"{'✅️' if is_recursive else '❌️'} Recursive",
21 | f"list_types {user_id} rec {is_recursive} {user_token}",
22 | "header",
23 | )
24 | buttons.data_button(
25 | "Folders", f"list_types {user_id} folders {is_recursive} {user_token}"
26 | )
27 | buttons.data_button(
28 | "Files", f"list_types {user_id} files {is_recursive} {user_token}"
29 | )
30 | buttons.data_button(
31 | "Both", f"list_types {user_id} both {is_recursive} {user_token}"
32 | )
33 |
34 | buttons.data_button("Cancel", f"list_types {user_id} cancel", "footer")
35 | return buttons.build_menu(2)
36 |
37 |
38 | async def _list_drive(key, message, item_type, is_recursive, user_token, user_id):
39 | LOGGER.info(f"GD Listing: {key}")
40 | if user_token:
41 | user_dict = user_data.get(user_id, {})
42 | target_id = user_dict.get("GDRIVE_ID", "") or ""
43 | LOGGER.info(target_id)
44 | else:
45 | target_id = ""
46 | telegraph_content, contents_no = await sync_to_async(
47 | GoogleDriveSearch(is_recursive=is_recursive, item_type=item_type).drive_list,
48 | key,
49 | target_id,
50 | user_id,
51 | )
52 | if telegraph_content:
53 | try:
54 | button = await get_telegraph_list(telegraph_content)
55 | except Exception as e:
56 | await edit_message(message, e)
57 | return
58 | msg = f"Found {contents_no} result for {key}"
59 | await edit_message(message, msg, button)
60 | else:
61 | await edit_message(message, f"No result found for {key}")
62 |
63 |
64 | @new_task
65 | async def select_type(_, query):
66 | user_id = query.from_user.id
67 | message = query.message
68 | key = message.reply_to_message.text.split(maxsplit=1)[1].strip()
69 | data = query.data.split()
70 | if user_id != int(data[1]):
71 | return await query.answer(text="Not Yours!", show_alert=True)
72 | elif data[2] == "rec":
73 | await query.answer()
74 | is_recursive = not bool(eval(data[3]))
75 | buttons = await list_buttons(user_id, is_recursive, eval(data[4]))
76 | return await edit_message(message, "Choose list options:", buttons)
77 | elif data[2] == "ut":
78 | await query.answer()
79 | user_token = not bool(eval(data[4]))
80 | buttons = await list_buttons(user_id, eval(data[3]), user_token)
81 | return await edit_message(message, "Choose list options:", buttons)
82 | elif data[2] == "cancel":
83 | await query.answer()
84 | return await edit_message(message, "List has been canceled!")
85 | await query.answer()
86 | item_type = data[2]
87 | is_recursive = eval(data[3])
88 | user_token = eval(data[4])
89 | await edit_message(message, f"Searching.. for {key}")
90 | await _list_drive(key, message, item_type, is_recursive, user_token, user_id)
91 |
92 |
93 | @new_task
94 | async def gdrive_search(_, message):
95 | if len(message.text.split()) == 1:
96 | return await send_message(
97 | message, "Send a search query along with list command"
98 | )
99 | user_id = message.from_user.id
100 | buttons = await list_buttons(user_id)
101 | await send_message(message, "Choose list options:", buttons)
102 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/uphoster_utils/multi_upload.py:
--------------------------------------------------------------------------------
1 | from asyncio import gather
2 | from logging import getLogger
3 |
4 | from bot.helper.mirror_leech_utils.uphoster_utils.gofile_utils.upload import (
5 | GoFileUpload,
6 | )
7 | from bot.helper.mirror_leech_utils.uphoster_utils.buzzheavier_utils.upload import (
8 | BuzzHeavierUpload,
9 | )
10 | from bot.helper.mirror_leech_utils.uphoster_utils.pixeldrain_utils.upload import (
11 | PixelDrainUpload,
12 | )
13 |
14 | LOGGER = getLogger(__name__)
15 |
16 |
17 | class MultiUphosterUpload:
18 | def __init__(self, listener, path, services):
19 | self.listener = listener
20 | self.path = path
21 | self.services = services
22 | self.uploaders = []
23 | self._processed_bytes = 0
24 | self._speed = 0
25 | self.is_cancelled = False
26 | self.results = {}
27 | self.failed = []
28 |
29 | for service in services:
30 | if service == "gofile":
31 | self.uploaders.append(GoFileUpload(ProxyListener(self, "gofile"), path))
32 | elif service == "buzzheavier":
33 | self.uploaders.append(
34 | BuzzHeavierUpload(ProxyListener(self, "buzzheavier"), path)
35 | )
36 | elif service == "pixeldrain":
37 | self.uploaders.append(
38 | PixelDrainUpload(ProxyListener(self, "pixeldrain"), path)
39 | )
40 |
41 | @property
42 | def speed(self):
43 | return sum(u.speed for u in self.uploaders)
44 |
45 | @property
46 | def processed_bytes(self):
47 | if not self.uploaders:
48 | return 0
49 | return sum(u.processed_bytes for u in self.uploaders) / len(self.uploaders)
50 |
51 | async def upload(self):
52 | tasks = [u.upload() for u in self.uploaders]
53 | await gather(*tasks)
54 |
55 | async def cancel_task(self):
56 | self.is_cancelled = True
57 | tasks = [u.cancel_task() for u in self.uploaders]
58 | await gather(*tasks)
59 |
60 | async def on_upload_complete(
61 | self, service, link, files, folders, mime_type, dir_id
62 | ):
63 | self.results[service] = {
64 | "link": link,
65 | "files": files,
66 | "folders": folders,
67 | "mime_type": mime_type,
68 | "dir_id": dir_id,
69 | }
70 | await self._check_completion()
71 |
72 | async def on_upload_error(self, service, error):
73 | LOGGER.error(f"Upload failed for {service}: {error}")
74 | self.failed.append(service)
75 | self.results[service] = {"error": error}
76 | await self._check_completion()
77 |
78 | async def _check_completion(self):
79 | if len(self.results) == len(self.uploaders):
80 | if len(self.failed) == len(self.uploaders):
81 | await self.listener.on_upload_error("All uploads failed.")
82 | else:
83 | successful_result = next(
84 | v for k, v in self.results.items() if "error" not in v
85 | )
86 | await self.listener.on_upload_complete(
87 | self.results,
88 | successful_result["files"],
89 | successful_result["folders"],
90 | successful_result["mime_type"],
91 | successful_result["dir_id"],
92 | )
93 |
94 |
95 | class ProxyListener:
96 | def __init__(self, multi_uploader, service):
97 | self.multi_uploader = multi_uploader
98 | self.service = service
99 | self.is_cancelled = False
100 |
101 | def __getattr__(self, name):
102 | return getattr(self.multi_uploader.listener, name)
103 |
104 | async def on_upload_complete(self, link, files, folders, mime_type, dir_id=""):
105 | await self.multi_uploader.on_upload_complete(
106 | self.service, link, files, folders, mime_type, dir_id
107 | )
108 |
109 | async def on_upload_error(self, error):
110 | await self.multi_uploader.on_upload_error(self.service, error)
111 |
--------------------------------------------------------------------------------
/bot/__main__.py:
--------------------------------------------------------------------------------
1 | # ruff: noqa: E402
2 |
3 | from .core.config_manager import Config
4 |
5 | Config.load()
6 |
7 | from datetime import datetime
8 | from logging import Formatter
9 | from time import localtime
10 |
11 | from pytz import timezone
12 |
13 | from . import LOGGER, bot_loop
14 | from .core.tg_client import TgClient
15 |
16 |
17 | async def main():
18 | from asyncio import gather
19 |
20 | from .core.startup import (
21 | load_configurations,
22 | load_settings,
23 | save_settings,
24 | update_aria2_options,
25 | update_nzb_options,
26 | update_qb_options,
27 | update_variables,
28 | )
29 |
30 | await load_settings()
31 |
32 | try:
33 | tz = timezone(Config.TIMEZONE)
34 | except Exception:
35 | from pytz import utc
36 |
37 | tz = utc
38 |
39 | def changetz(*args):
40 | try:
41 | return datetime.now(tz).timetuple()
42 | except Exception:
43 | return localtime()
44 |
45 | Formatter.converter = changetz
46 |
47 | await gather(
48 | TgClient.start_bot(), TgClient.start_user(), TgClient.start_helper_bots()
49 | )
50 | await gather(load_configurations(), update_variables())
51 |
52 | from .core.torrent_manager import TorrentManager
53 |
54 | await TorrentManager.initiate()
55 | await gather(
56 | update_qb_options(),
57 | update_aria2_options(),
58 | update_nzb_options(),
59 | )
60 | from .core.jdownloader_booter import jdownloader
61 | from .helper.ext_utils.files_utils import clean_all
62 | from .helper.ext_utils.telegraph_helper import telegraph
63 | from .helper.mirror_leech_utils.rclone_utils.serve import rclone_serve_booter
64 | from .modules import (
65 | get_packages_version,
66 | initiate_search_tools,
67 | restart_notification,
68 | )
69 |
70 | await gather(
71 | save_settings(),
72 | jdownloader.boot(),
73 | clean_all(),
74 | initiate_search_tools(),
75 | get_packages_version(),
76 | restart_notification(),
77 | telegraph.create_account(),
78 | rclone_serve_booter(),
79 | )
80 |
81 |
82 | bot_loop.run_until_complete(main())
83 |
84 | from .core.handlers import add_handlers
85 | from .helper.ext_utils.bot_utils import create_help_buttons
86 | from .helper.listeners.aria2_listener import add_aria2_callbacks
87 |
88 | add_aria2_callbacks()
89 | create_help_buttons()
90 | add_handlers()
91 |
92 | from .core.plugin_manager import get_plugin_manager
93 | from .modules.plugin_manager import register_plugin_commands
94 |
95 | plugin_manager = get_plugin_manager()
96 | plugin_manager.bot = TgClient.bot
97 | register_plugin_commands()
98 |
99 | from pyrogram.filters import regex
100 | from pyrogram.handlers import CallbackQueryHandler
101 |
102 | from .core.handlers import add_handlers
103 | from .helper.ext_utils.bot_utils import new_task
104 | from .helper.telegram_helper.filters import CustomFilters
105 | from .helper.telegram_helper.message_utils import (
106 | delete_message,
107 | edit_message,
108 | send_message,
109 | )
110 |
111 |
112 | @new_task
113 | async def restart_sessions_confirm(_, query):
114 | data = query.data.split()
115 | message = query.message
116 | if data[1] == "confirm":
117 | reply_to = message.reply_to_message
118 | restart_message = await send_message(reply_to, "Restarting Session(s)...")
119 | await delete_message(message)
120 | await TgClient.reload()
121 | add_handlers()
122 | TgClient.bot.add_handler(
123 | CallbackQueryHandler(
124 | restart_sessions_confirm,
125 | filters=regex("^sessionrestart") & CustomFilters.sudo,
126 | )
127 | )
128 | await edit_message(restart_message, "Session(s) Restarted Successfully!")
129 | else:
130 | await delete_message(message)
131 |
132 |
133 | TgClient.bot.add_handler(
134 | CallbackQueryHandler(
135 | restart_sessions_confirm,
136 | filters=regex("^sessionrestart") & CustomFilters.sudo,
137 | )
138 | )
139 |
140 | LOGGER.info("WZ Client(s) & Services Started !")
141 | bot_loop.run_forever()
142 |
--------------------------------------------------------------------------------
/bot/modules/chat_permission.py:
--------------------------------------------------------------------------------
1 | from .. import user_data
2 | from ..helper.ext_utils.bot_utils import update_user_ldata, new_task
3 | from ..helper.ext_utils.db_handler import database
4 | from ..helper.telegram_helper.message_utils import send_message
5 |
6 |
7 | @new_task
8 | async def authorize(_, message):
9 | msg = message.text.split()
10 | thread_id = None
11 | if len(msg) > 1:
12 | if "|" in msg:
13 | chat_id, thread_id = list(map(int, msg[1].split("|")))
14 | else:
15 | chat_id = int(msg[1].strip())
16 | elif reply_to := message.reply_to_message:
17 | chat_id = (reply_to.from_user or reply_to.sender_chat).id
18 | else:
19 | if message.is_topic_message:
20 | thread_id = message.message_thread_id
21 | chat_id = message.chat.id
22 | if chat_id in user_data and user_data[chat_id].get("AUTH"):
23 | if (
24 | thread_id is not None
25 | and thread_id in user_data[chat_id].get("thread_ids", [])
26 | or thread_id is None
27 | ):
28 | msg = "Already Authorized!"
29 | else:
30 | if "thread_ids" in user_data[chat_id]:
31 | user_data[chat_id]["thread_ids"].append(thread_id)
32 | else:
33 | user_data[chat_id]["thread_ids"] = [thread_id]
34 | msg = "Authorized"
35 | else:
36 | update_user_ldata(chat_id, "AUTH", True)
37 | if thread_id is not None:
38 | update_user_ldata(chat_id, "thread_ids", [thread_id])
39 | await database.update_user_data(chat_id)
40 | msg = "Authorized"
41 | await send_message(message, msg)
42 |
43 |
44 | @new_task
45 | async def unauthorize(_, message):
46 | msg = message.text.split()
47 | thread_id = None
48 | if len(msg) > 1:
49 | if "|" in msg:
50 | chat_id, thread_id = list(map(int, msg[1].split("|")))
51 | else:
52 | chat_id = int(msg[1].strip())
53 | elif reply_to := message.reply_to_message:
54 | chat_id = (reply_to.from_user or reply_to.sender_chat).id
55 | else:
56 | if message.is_topic_message:
57 | thread_id = message.message_thread_id
58 | chat_id = message.chat.id
59 | if chat_id in user_data and user_data[chat_id].get("AUTH"):
60 | if thread_id is not None and thread_id in user_data[chat_id].get(
61 | "thread_ids", []
62 | ):
63 | user_data[chat_id]["thread_ids"].remove(thread_id)
64 | else:
65 | update_user_ldata(chat_id, "AUTH", False)
66 | await database.update_user_data(chat_id)
67 | msg = "Unauthorized"
68 | else:
69 | msg = "Already Unauthorized!"
70 | await send_message(message, msg)
71 |
72 |
73 | @new_task
74 | async def add_sudo(_, message):
75 | id_ = ""
76 | msg = message.text.split()
77 | if len(msg) > 1:
78 | id_ = int(msg[1].strip())
79 | elif reply_to := message.reply_to_message:
80 | id_ = (reply_to.from_user or reply_to.sender_chat).id
81 | if id_:
82 | if id_ in user_data and user_data[id_].get("SUDO"):
83 | msg = "Already Sudo!"
84 | else:
85 | update_user_ldata(id_, "SUDO", True)
86 | await database.update_user_data(id_)
87 | msg = "Promoted as Sudo"
88 | else:
89 | msg = "Give ID or Reply To message of whom you want to Promote."
90 | await send_message(message, msg)
91 |
92 |
93 | @new_task
94 | async def remove_sudo(_, message):
95 | id_ = ""
96 | msg = message.text.split()
97 | if len(msg) > 1:
98 | id_ = int(msg[1].strip())
99 | elif reply_to := message.reply_to_message:
100 | id_ = (reply_to.from_user or reply_to.sender_chat).id
101 | if id_:
102 | if id_ in user_data and user_data[id_].get("SUDO"):
103 | update_user_ldata(id_, "SUDO", False)
104 | await database.update_user_data(id_)
105 | msg = "Demoted"
106 | else:
107 | msg = "Already Not Sudo! Sudo users added from config must be removed from config."
108 | else:
109 | msg = "Give ID or Reply To message of whom you want to remove from Sudo"
110 | await send_message(message, msg)
111 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/qbit_status.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep, gather
2 |
3 | from .... import LOGGER, qb_torrents, qb_listener_lock
4 | from ....core.torrent_manager import TorrentManager
5 | from ...ext_utils.status_utils import (
6 | MirrorStatus,
7 | EngineStatus,
8 | get_readable_file_size,
9 | get_readable_time,
10 | )
11 |
12 |
13 | async def get_download(tag, old_info=None):
14 | try:
15 | res = (await TorrentManager.qbittorrent.torrents.info(tag=tag))[0]
16 | return res or old_info
17 | except Exception as e:
18 | LOGGER.error(f"{e}: Qbittorrent, while getting torrent info. Tag: {tag}")
19 | return old_info
20 |
21 |
22 | class QbittorrentStatus:
23 | def __init__(self, listener, seeding=False, queued=False):
24 | self.queued = queued
25 | self.seeding = seeding
26 | self.listener = listener
27 | self._info = None
28 | self.engine = EngineStatus().STATUS_QBIT
29 |
30 | async def update(self):
31 | self._info = await get_download(f"{self.listener.mid}", self._info)
32 |
33 | def progress(self):
34 | return f"{round(self._info.progress * 100, 2)}%"
35 |
36 | def processed_bytes(self):
37 | return get_readable_file_size(self._info.downloaded)
38 |
39 | def speed(self):
40 | return f"{get_readable_file_size(self._info.dlspeed)}/s"
41 |
42 | def name(self):
43 | if self._info.state in ["metaDL", "checkingResumeData"]:
44 | return f"[METADATA]{self.listener.name}"
45 | else:
46 | return self.listener.name
47 |
48 | def size(self):
49 | return get_readable_file_size(self._info.size)
50 |
51 | def eta(self):
52 | return get_readable_time(self._info.eta.total_seconds())
53 |
54 | async def status(self):
55 | await self.update()
56 | state = self._info.state
57 | if state == "queuedDL" or self.queued:
58 | return MirrorStatus.STATUS_QUEUEDL
59 | elif state == "queuedUP":
60 | return MirrorStatus.STATUS_QUEUEUP
61 | elif state in ["stoppedDL", "stoppedUP"]:
62 | return MirrorStatus.STATUS_PAUSED
63 | elif state in ["checkingUP", "checkingDL"]:
64 | return MirrorStatus.STATUS_CHECK
65 | elif state in ["stalledUP", "uploading"] and self.seeding:
66 | return MirrorStatus.STATUS_SEED
67 | else:
68 | return MirrorStatus.STATUS_DOWNLOAD
69 |
70 | def seeders_num(self):
71 | return self._info.num_seeds
72 |
73 | def leechers_num(self):
74 | return self._info.num_leechs
75 |
76 | def uploaded_bytes(self):
77 | return get_readable_file_size(self._info.uploaded)
78 |
79 | def seed_speed(self):
80 | return f"{get_readable_file_size(self._info.upspeed)}/s"
81 |
82 | def ratio(self):
83 | return f"{round(self._info.ratio, 3)}"
84 |
85 | def seeding_time(self):
86 | return get_readable_time(int(self._info.seeding_time.total_seconds()))
87 |
88 | def task(self):
89 | return self
90 |
91 | def gid(self):
92 | return self.hash()[:12]
93 |
94 | def hash(self):
95 | return self._info.hash
96 |
97 | async def cancel_task(self):
98 | self.listener.is_cancelled = True
99 | await self.update()
100 | await TorrentManager.qbittorrent.torrents.stop([self._info.hash])
101 | if not self.seeding:
102 | if self.queued:
103 | LOGGER.info(f"Cancelling QueueDL: {self.name()}")
104 | msg = "task have been removed from queue/download"
105 | else:
106 | LOGGER.info(f"Cancelling Download: {self._info.name}")
107 | msg = "Stopped by user!"
108 | await sleep(0.3)
109 | await gather(
110 | self.listener.on_download_error(msg),
111 | TorrentManager.qbittorrent.torrents.delete([self._info.hash], True),
112 | TorrentManager.qbittorrent.torrents.delete_tags(
113 | tags=[self._info.tags[0]]
114 | ),
115 | )
116 | async with qb_listener_lock:
117 | if self._info.tags[0] in qb_torrents:
118 | del qb_torrents[self._info.tags[0]]
119 |
--------------------------------------------------------------------------------
/update.py:
--------------------------------------------------------------------------------
1 | from sys import exit
2 | from importlib import import_module
3 | from logging import (
4 | FileHandler,
5 | StreamHandler,
6 | INFO,
7 | basicConfig,
8 | error as log_error,
9 | info as log_info,
10 | getLogger,
11 | ERROR,
12 | )
13 | from os import path, remove, environ
14 | from pymongo.mongo_client import MongoClient
15 | from pymongo.server_api import ServerApi
16 | from subprocess import run as srun, call as scall
17 |
18 | getLogger("pymongo").setLevel(ERROR)
19 |
20 | var_list = [
21 | "BOT_TOKEN",
22 | "TELEGRAM_API",
23 | "TELEGRAM_HASH",
24 | "OWNER_ID",
25 | "DATABASE_URL",
26 | "BASE_URL",
27 | "UPSTREAM_REPO",
28 | "UPSTREAM_BRANCH",
29 | "UPDATE_PKGS",
30 | ]
31 |
32 | if path.exists("log.txt"):
33 | with open("log.txt", "r+") as f:
34 | f.truncate(0)
35 |
36 | if path.exists("rlog.txt"):
37 | remove("rlog.txt")
38 |
39 | basicConfig(
40 | format="[%(asctime)s] [%(levelname)s] - %(message)s",
41 | datefmt="%d-%b-%y %I:%M:%S %p",
42 | handlers=[FileHandler("log.txt"), StreamHandler()],
43 | level=INFO,
44 | )
45 | try:
46 | settings = import_module("config")
47 | config_file = {
48 | key: value.strip() if isinstance(value, str) else value
49 | for key, value in vars(settings).items()
50 | if not key.startswith("__")
51 | }
52 | except ModuleNotFoundError:
53 | log_info("Config.py file is not Added! Checking ENVs..")
54 | config_file = {}
55 |
56 | env_updates = {
57 | key: value.strip() if isinstance(value, str) else value
58 | for key, value in environ.items()
59 | if key in var_list
60 | }
61 | if env_updates:
62 | log_info("Config data is updated with ENVs!")
63 | config_file.update(env_updates)
64 |
65 | BOT_TOKEN = config_file.get("BOT_TOKEN", "")
66 | if not BOT_TOKEN:
67 | log_error("BOT_TOKEN variable is missing! Exiting now")
68 | exit(1)
69 |
70 | BOT_ID = BOT_TOKEN.split(":", 1)[0]
71 |
72 | if DATABASE_URL := config_file.get("DATABASE_URL", "").strip():
73 | try:
74 | conn = MongoClient(DATABASE_URL, server_api=ServerApi("1"))
75 | db = conn.wzmlx
76 | old_config = db.settings.deployConfig.find_one({"_id": BOT_ID}, {"_id": 0})
77 | config_dict = db.settings.config.find_one({"_id": BOT_ID})
78 | if (
79 | old_config is not None and old_config == config_file or old_config is None
80 | ) and config_dict is not None:
81 | config_file["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"]
82 | config_file["UPSTREAM_BRANCH"] = config_dict.get("UPSTREAM_BRANCH", "wzv3")
83 | config_file["UPDATE_PKGS"] = config_dict.get("UPDATE_PKGS", "True")
84 | conn.close()
85 | except Exception as e:
86 | log_error(f"Database ERROR: {e}")
87 |
88 | UPSTREAM_REPO = config_file.get("UPSTREAM_REPO", "").strip()
89 | UPSTREAM_BRANCH = config_file.get("UPSTREAM_BRANCH", "").strip() or "wzv3"
90 |
91 | if UPSTREAM_REPO:
92 | if path.exists(".git"):
93 | srun(["rm", "-rf", ".git"])
94 |
95 | update = srun(
96 | [
97 | f"git init -q \
98 | && git config --global user.email 105407900+SilentDemonSD@users.noreply.github.com \
99 | && git config --global user.name SilentDemonSD \
100 | && git add . \
101 | && git commit -sm update -q \
102 | && git remote add origin {UPSTREAM_REPO} \
103 | && git fetch origin -q \
104 | && git reset --hard origin/{UPSTREAM_BRANCH} -q"
105 | ],
106 | shell=True,
107 | )
108 |
109 | repo = UPSTREAM_REPO.split("/")
110 | UPSTREAM_REPO = f"https://github.com/{repo[-2]}/{repo[-1]}"
111 | if update.returncode == 0:
112 | log_info("Successfully updated with Latest Updates !")
113 | else:
114 | log_error("Something went Wrong ! Recheck your details or Ask Support !")
115 | log_info(f"UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}")
116 |
117 |
118 | UPDATE_PKGS = config_file.get("UPDATE_PKGS", "True")
119 | if (isinstance(UPDATE_PKGS, str) and UPDATE_PKGS.lower() == "true") or UPDATE_PKGS:
120 | scall("uv pip install -U -r requirements.txt", shell=True)
121 | log_info("Successfully Updated all the Packages !")
122 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/jdownloader_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from .... import LOGGER, jd_listener_lock, jd_downloads
4 | from ....core.jdownloader_booter import jdownloader
5 | from ...ext_utils.status_utils import (
6 | MirrorStatus,
7 | EngineStatus,
8 | get_readable_file_size,
9 | get_readable_time,
10 | )
11 |
12 |
13 | def _get_combined_info(result, old_info):
14 | name = result[0].get("name")
15 | hosts = result[0].get("hosts")
16 | bytesLoaded = 0
17 | bytesTotal = 0
18 | speed = 0
19 | status = ""
20 | for res in result:
21 | if res.get("enabled"):
22 | st = res.get("status", "")
23 | if st and st.lower() != "finished":
24 | status = st
25 | bytesLoaded += res.get("bytesLoaded", 0)
26 | bytesTotal += res.get("bytesTotal", 0)
27 | speed += res.get("speed", 0)
28 | try:
29 | if not speed:
30 | speed = (bytesLoaded - old_info.get("bytesLoaded", 0)) / (
31 | time() - old_info.get("last_update", 0)
32 | )
33 | eta = (bytesTotal - bytesLoaded) / speed
34 | except Exception:
35 | eta = 0
36 | return {
37 | "name": name,
38 | "status": status,
39 | "speed": speed,
40 | "eta": eta,
41 | "hosts": hosts,
42 | "bytesLoaded": bytesLoaded,
43 | "bytesTotal": bytesTotal,
44 | "last_update": time(),
45 | }
46 |
47 |
48 | async def get_download(gid, old_info):
49 | try:
50 | result = await jdownloader.device.downloads.query_packages(
51 | [
52 | {
53 | "bytesLoaded": True,
54 | "bytesTotal": True,
55 | "enabled": True,
56 | "packageUUIDs": jd_downloads[gid]["ids"],
57 | "maxResults": -1,
58 | "running": True,
59 | "speed": True,
60 | "eta": True,
61 | "status": True,
62 | "hosts": True,
63 | }
64 | ]
65 | )
66 | return _get_combined_info(result, old_info) if len(result) > 1 else result[0]
67 | except Exception:
68 | return old_info
69 |
70 |
71 | class JDownloaderStatus:
72 | def __init__(self, listener, gid):
73 | self.listener = listener
74 | self._gid = gid
75 | self._info = {}
76 | self.engine = EngineStatus().STATUS_JD
77 |
78 | async def _update(self):
79 | self._info = await get_download(self._gid, self._info)
80 |
81 | def progress(self):
82 | try:
83 | return f"{round((self._info.get('bytesLoaded', 0) / self._info.get('bytesTotal', 0)) * 100, 2)}%"
84 | except ZeroDivisionError:
85 | return "0%"
86 |
87 | def processed_bytes(self):
88 | return get_readable_file_size(self._info.get("bytesLoaded", 0))
89 |
90 | def speed(self):
91 | return f"{get_readable_file_size(self._info.get('speed', 0))}/s"
92 |
93 | def name(self):
94 | return self._info.get("name") or self.listener.name
95 |
96 | def size(self):
97 | return get_readable_file_size(self._info.get("bytesTotal", 0))
98 |
99 | def eta(self):
100 | return get_readable_time(eta) if (eta := self._info.get("eta", False)) else "-"
101 |
102 | async def status(self):
103 | await self._update()
104 | state = self._info.get("status", "jdlimit").capitalize()
105 | if len(state) == 0:
106 | if self._info.get("bytesLoaded", 0) == 0:
107 | return MirrorStatus.STATUS_QUEUEDL
108 | else:
109 | return MirrorStatus.STATUS_DOWNLOAD
110 | return MirrorStatus.STATUS_QUEUEDL if state == "Jdlimit" else state
111 |
112 | def task(self):
113 | return self
114 |
115 | def gid(self):
116 | return self._gid
117 |
118 | async def cancel_task(self):
119 | self.listener.is_cancelled = True
120 | LOGGER.info(f"Cancelling Download: {self.name()}")
121 | await jdownloader.device.downloads.remove_links(
122 | package_ids=jd_downloads[self._gid]["ids"]
123 | )
124 | async with jd_listener_lock:
125 | del jd_downloads[self._gid]
126 | await self.listener.on_download_error("Cancelled by user!")
127 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/tg_utils.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from uuid import uuid4
3 |
4 | from pyrogram.enums import ChatAction
5 | from pyrogram.errors import ChannelInvalid, PeerIdInvalid, RPCError, UserNotParticipant
6 |
7 | from ..ext_utils.links_utils import encode_slink
8 |
9 | from ... import LOGGER, user_data
10 | from ...core.config_manager import Config
11 | from ...core.tg_client import TgClient
12 | from ..ext_utils.shortener_utils import short_url
13 | from ..ext_utils.status_utils import get_readable_time
14 | from .button_build import ButtonMaker
15 |
16 |
17 | async def chat_info(channel_id):
18 | channel_id = str(channel_id).strip()
19 | if channel_id.startswith("-100"):
20 | channel_id = int(channel_id)
21 | elif channel_id.startswith("@"):
22 | channel_id = channel_id.replace("@", "")
23 | else:
24 | return None
25 | try:
26 | return await TgClient.bot.get_chat(channel_id)
27 | except (PeerIdInvalid, ChannelInvalid) as e:
28 | LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}")
29 | return None
30 |
31 |
32 | async def forcesub(message, ids, button=None):
33 | join_button = {}
34 | _msg = ""
35 | for channel_id in ids.split():
36 | chat = await chat_info(channel_id)
37 | try:
38 | await chat.get_member(message.from_user.id)
39 | except UserNotParticipant:
40 | if username := chat.username:
41 | invite_link = f"https://t.me/{username}"
42 | else:
43 | invite_link = chat.invite_link
44 | join_button[chat.title] = invite_link
45 | except RPCError as e:
46 | LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}")
47 | except Exception as e:
48 | LOGGER.error(f"{e} for {channel_id}")
49 | if join_button:
50 | if button is None:
51 | button = ButtonMaker()
52 | _msg = "┠ Channel(s) pending to be joined, Join Now!"
53 | for key, value in join_button.items():
54 | button.url_button(f"Join {key}", value, "footer")
55 | return _msg, button
56 |
57 |
58 | async def user_info(user_id):
59 | try:
60 | return await TgClient.bot.get_users(user_id)
61 | except Exception:
62 | return ""
63 |
64 |
65 | async def check_botpm(message, button=None):
66 | try:
67 | await TgClient.bot.send_chat_action(message.from_user.id, ChatAction.TYPING)
68 | return None, button
69 | except Exception:
70 | if button is None:
71 | button = ButtonMaker()
72 | _msg = "┠ Bot isn't Started in PM or Inbox (Private)"
73 | button.url_button(
74 | "Start Bot Now", f"https://t.me/{TgClient.BNAME}?start=start", "header"
75 | )
76 | return _msg, button
77 |
78 |
79 | async def verify_token(user_id, button=None):
80 | if not Config.VERIFY_TIMEOUT or bool(
81 | user_id == Config.OWNER_ID
82 | or user_id in user_data
83 | and user_data[user_id].get("is_sudo")
84 | ):
85 | return None, button
86 | user_data.setdefault(user_id, {})
87 | data = user_data[user_id]
88 | expire = data.get("VERIFY_TIME")
89 | if Config.LOGIN_PASS and data.get("VERIFY_TOKEN", "") == Config.LOGIN_PASS:
90 | return None, button
91 | isExpired = (
92 | expire is None
93 | or expire is not None
94 | and (time() - expire) > Config.VERIFY_TIMEOUT
95 | )
96 | if isExpired:
97 | token = (
98 | data["VERIFY_TOKEN"]
99 | if expire is None and "VERIFY_TOKEN" in data
100 | else str(uuid4())
101 | )
102 | if expire is not None:
103 | del data["VERIFY_TIME"]
104 | data["VERIFY_TOKEN"] = token
105 | user_data[user_id].update(data)
106 | if button is None:
107 | button = ButtonMaker()
108 | encrypt_url = encode_slink(f"{token}&&{user_id}")
109 | button.url_button(
110 | "Verify Access Token",
111 | await short_url(f"https://t.me/{TgClient.BNAME}?start={encrypt_url}"),
112 | )
113 | return (
114 | f"┠ Verify Access Token has been expired, Kindly validate a new access token to start using bot again.\n┃\n┖ Validity : {get_readable_time(Config.VERIFY_TIMEOUT)}",
115 | button,
116 | )
117 | return None, button
118 |
--------------------------------------------------------------------------------
/bot/modules/nzb_search.py:
--------------------------------------------------------------------------------
1 | from xml.etree import ElementTree as ET
2 | from aiohttp import ClientSession
3 |
4 | from .. import LOGGER
5 | from ..core.config_manager import Config
6 | from ..helper.ext_utils.bot_utils import new_task
7 | from ..helper.ext_utils.status_utils import get_readable_file_size
8 | from ..helper.ext_utils.telegraph_helper import telegraph
9 | from ..helper.telegram_helper.button_build import ButtonMaker
10 | from ..helper.telegram_helper.message_utils import edit_message, send_message
11 |
12 |
13 | @new_task
14 | async def hydra_search(_, message):
15 | key = message.text.split()
16 | if len(key) == 1:
17 | await send_message(
18 | message,
19 | "Please provide a search query. Example: `/nzbsearch movie title`.",
20 | )
21 | return
22 |
23 | query = " ".join(key[1:]).strip()
24 | message = await send_message(message, f"Searching for '{query}'...")
25 | try:
26 | items = await search_nzbhydra(query)
27 | if not items:
28 | await edit_message(message, "No results found.")
29 | LOGGER.info(f"No results found for search query: {query}")
30 | return
31 |
32 | page_url = await create_telegraph_page(query, items)
33 | buttons = ButtonMaker()
34 | buttons.url_button("Results", page_url)
35 | button = buttons.build_menu()
36 | await edit_message(
37 | message,
38 | f"Search results for '{query}' are available here",
39 | button,
40 | )
41 | except Exception as e:
42 | LOGGER.error(f"Error in hydra_search: {e!s}")
43 | await edit_message(message, "Something went wrong.")
44 |
45 |
46 | async def search_nzbhydra(query, limit=50):
47 | search_url = f"{Config.HYDRA_IP}/api"
48 | params = {
49 | "apikey": Config.HYDRA_API_KEY,
50 | "t": "search",
51 | "q": query,
52 | "limit": limit,
53 | }
54 |
55 | headers = {
56 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
57 | }
58 |
59 | async with ClientSession() as session:
60 | try:
61 | async with session.get(
62 | search_url,
63 | params=params,
64 | headers=headers,
65 | ) as response:
66 | if response.status == 200:
67 | content = await response.text()
68 | root = ET.fromstring(content)
69 | return root.findall(".//item")
70 |
71 | LOGGER.error(
72 | f"Failed to search NZBHydra. Status Code: {response.status}",
73 | )
74 | LOGGER.error(f"Response Text: {await response.text()}")
75 | return None
76 | except ET.ParseError:
77 | LOGGER.error("Failed to parse the XML response.")
78 | return None
79 | except Exception as e:
80 | LOGGER.error(f"Error in search_nzbhydra: {e!s}")
81 | return None
82 |
83 |
84 | async def create_telegraph_page(query, items):
85 | content = "Search Results:
"
86 | sorted_items = sorted(
87 | [
88 | (
89 | int(item.find("size").text) if item.find("size") is not None else 0,
90 | item,
91 | )
92 | for item in items[:100]
93 | ],
94 | reverse=True,
95 | key=lambda x: x[0],
96 | )
97 |
98 | for idx, (size_bytes, item) in enumerate(sorted_items, 1):
99 | title = (
100 | item.find("title").text
101 | if item.find("title") is not None
102 | else "No Title Available"
103 | )
104 | download_url = (
105 | item.find("link").text
106 | if item.find("link") is not None
107 | else "No Link Available"
108 | )
109 | size = get_readable_file_size(size_bytes)
110 |
111 | content += (
112 | f"{idx}. {title}
"
113 | f"Download URL | Share Download URL
"
114 | f"Size: {size}
"
115 | f"━━━━━━━━━━━━━━━━━━━━━━
"
116 | )
117 |
118 | response = await telegraph.create_page(
119 | title=f"Search Results for '{query}'",
120 | content=content,
121 | )
122 | LOGGER.info(f"Telegraph page created for search: {query}")
123 | return f"https://telegra.ph/{response['path']}"
124 |
--------------------------------------------------------------------------------
/bot/core/tg_client.py:
--------------------------------------------------------------------------------
1 | from pyrogram import Client, enums
2 | from asyncio import Lock, gather
3 | from inspect import signature
4 |
5 | from .. import LOGGER
6 | from .config_manager import Config
7 |
8 |
9 | class TgClient:
10 | _lock = Lock()
11 | _hlock = Lock()
12 |
13 | bot = None
14 | user = None
15 | helper_bots = {}
16 | helper_loads = {}
17 |
18 | BNAME = ""
19 | ID = 0
20 | IS_PREMIUM_USER = False
21 | MAX_SPLIT_SIZE = 2097152000
22 |
23 | @classmethod
24 | def wztgClient(cls, *args, **kwargs):
25 | kwargs["api_id"] = Config.TELEGRAM_API
26 | kwargs["api_hash"] = Config.TELEGRAM_HASH
27 | kwargs["proxy"] = Config.TG_PROXY
28 | kwargs["parse_mode"] = enums.ParseMode.HTML
29 | kwargs["in_memory"] = True
30 | for param, value in {
31 | "max_concurrent_transmissions": 100,
32 | "skip_updates": False,
33 | }.items():
34 | if param in signature(Client.__init__).parameters:
35 | kwargs[param] = value
36 | return Client(*args, **kwargs)
37 |
38 | @classmethod
39 | async def start_hclient(cls, no, b_token):
40 | try:
41 | hbot = await cls.wztgClient(
42 | f"WZ-HBot{no}",
43 | bot_token=b_token,
44 | no_updates=True,
45 | ).start()
46 | LOGGER.info(f"Helper Bot [@{hbot.me.username}] Started!")
47 | cls.helper_bots[no], cls.helper_loads[no] = hbot, 0
48 | except Exception as e:
49 | LOGGER.error(f"Failed to start helper bot {no} from HELPER_TOKENS. {e}")
50 | cls.helper_bots.pop(no, None)
51 |
52 | @classmethod
53 | async def start_helper_bots(cls):
54 | if not Config.HELPER_TOKENS:
55 | return
56 | LOGGER.info("Generating helper client from HELPER_TOKENS")
57 | async with cls._hlock:
58 | await gather(
59 | *(
60 | cls.start_hclient(no, b_token)
61 | for no, b_token in enumerate(Config.HELPER_TOKENS.split(), start=1)
62 | )
63 | )
64 |
65 | @classmethod
66 | async def start_bot(cls):
67 | LOGGER.info("Generating client from BOT_TOKEN")
68 | cls.ID = Config.BOT_TOKEN.split(":", 1)[0]
69 | cls.bot = cls.wztgClient(
70 | f"WZ-Bot{cls.ID}",
71 | bot_token=Config.BOT_TOKEN,
72 | workdir="/usr/src/app",
73 | )
74 | await cls.bot.start()
75 | cls.BNAME = cls.bot.me.username
76 | cls.ID = Config.BOT_TOKEN.split(":", 1)[0]
77 | LOGGER.info(f"WZ Bot : [@{cls.BNAME}] Started!")
78 |
79 | @classmethod
80 | async def start_user(cls):
81 | if Config.USER_SESSION_STRING:
82 | LOGGER.info("Generating client from USER_SESSION_STRING")
83 | try:
84 | cls.user = cls.wztgClient(
85 | "WZ-User",
86 | session_string=Config.USER_SESSION_STRING,
87 | sleep_threshold=60,
88 | no_updates=True,
89 | )
90 | await cls.user.start()
91 | cls.IS_PREMIUM_USER = cls.user.me.is_premium
92 | if cls.IS_PREMIUM_USER:
93 | cls.MAX_SPLIT_SIZE = 4194304000
94 | uname = cls.user.me.username or cls.user.me.first_name
95 | LOGGER.info(f"WZ User : [{uname}] Started!")
96 | except Exception as e:
97 | LOGGER.error(f"Failed to start client from USER_SESSION_STRING. {e}")
98 | cls.IS_PREMIUM_USER = False
99 | cls.user = None
100 |
101 | @classmethod
102 | async def stop(cls):
103 | async with cls._lock:
104 | if cls.bot:
105 | await cls.bot.stop()
106 | cls.bot = None
107 | if cls.user:
108 | await cls.user.stop()
109 | cls.user = None
110 | if cls.helper_bots:
111 | await gather(*[h_bot.stop() for h_bot in cls.helper_bots.values()])
112 | cls.helper_bots = {}
113 | LOGGER.info("All Client(s) stopped")
114 |
115 | @classmethod
116 | async def reload(cls):
117 | async with cls._lock:
118 | await cls.bot.restart()
119 | if cls.user:
120 | await cls.user.restart()
121 | if cls.helper_bots:
122 | await gather(*[h_bot.restart() for h_bot in cls.helper_bots.values()])
123 | LOGGER.info("All Client(s) restarted")
124 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/aria2_download.py:
--------------------------------------------------------------------------------
1 | from aiofiles.os import remove, path as aiopath
2 | from aiofiles import open as aiopen
3 | from base64 import b64encode
4 | from aiohttp.client_exceptions import ClientError
5 | from asyncio import TimeoutError
6 |
7 | from .... import task_dict_lock, task_dict, LOGGER
8 | from ....core.config_manager import Config
9 | from ....core.torrent_manager import TorrentManager, is_metadata, aria2_name
10 | from ...ext_utils.bot_utils import bt_selection_buttons
11 | from ...ext_utils.task_manager import check_running_tasks
12 | from ...mirror_leech_utils.status_utils.aria2_status import Aria2Status
13 | from ...telegram_helper.message_utils import send_status_message, send_message
14 |
15 |
16 | async def add_aria2_download(listener, dpath, header, ratio, seed_time):
17 | if Config.DISABLE_TORRENTS and (
18 | listener.link.startswith("magnet:") or listener.link.endswith(".torrent")
19 | ):
20 | await listener.on_download_error("Torrent and magnet downloads are disabled.")
21 | return
22 | a2c_opt = {"dir": dpath}
23 | if listener.name:
24 | a2c_opt["out"] = listener.name
25 | if header:
26 | a2c_opt["header"] = header
27 | if ratio:
28 | a2c_opt["seed-ratio"] = ratio
29 | if seed_time:
30 | a2c_opt["seed-time"] = seed_time
31 | if TORRENT_TIMEOUT := Config.TORRENT_TIMEOUT:
32 | a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}"
33 |
34 | add_to_queue, event = await check_running_tasks(listener)
35 | if add_to_queue:
36 | if listener.link.startswith("magnet:"):
37 | a2c_opt["pause-metadata"] = "true"
38 | else:
39 | a2c_opt["pause"] = "true"
40 |
41 | try:
42 | if await aiopath.exists(listener.link):
43 | async with aiopen(listener.link, "rb") as tf:
44 | torrent = await tf.read()
45 | encoded = b64encode(torrent).decode()
46 | params = [encoded, [], a2c_opt]
47 | gid = await TorrentManager.aria2.jsonrpc("addTorrent", params)
48 | """gid = await TorrentManager.aria2.add_torrent(path=listener.link, options=a2c_opt)"""
49 | else:
50 | gid = await TorrentManager.aria2.addUri(
51 | uris=[listener.link], options=a2c_opt
52 | )
53 | except (TimeoutError, ClientError, Exception) as e:
54 | LOGGER.info(f"Aria2c Download Error: {e}")
55 | await listener.on_download_error(f"{e}")
56 | return
57 | download = await TorrentManager.aria2.tellStatus(gid)
58 | if download.get("errorMessage"):
59 | error = str(download["errorMessage"]).replace("<", " ").replace(">", " ")
60 | LOGGER.info(f"Aria2c Download Error: {error}")
61 | await TorrentManager.aria2_remove(download)
62 | await listener.on_download_error(error)
63 | return
64 | if await aiopath.exists(listener.link):
65 | await remove(listener.link)
66 |
67 | name = aria2_name(download)
68 | async with task_dict_lock:
69 | task_dict[listener.mid] = Aria2Status(listener, gid, queued=add_to_queue)
70 | if add_to_queue:
71 | LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}")
72 | if (
73 | not listener.select or "bittorrent" not in download
74 | ) and listener.multi <= 1:
75 | await send_status_message(listener.message)
76 | else:
77 | LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}")
78 |
79 | await listener.on_download_start()
80 |
81 | if (
82 | not add_to_queue
83 | and (not listener.select or not Config.BASE_URL)
84 | and listener.multi <= 1
85 | ):
86 | await send_status_message(listener.message)
87 | elif listener.select and "bittorrent" in download and not is_metadata(download):
88 | if not add_to_queue:
89 | await TorrentManager.aria2.forcePause(gid)
90 | SBUTTONS = bt_selection_buttons(gid)
91 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
92 | await send_message(listener.message, msg, SBUTTONS)
93 |
94 | if add_to_queue:
95 | await event.wait()
96 | if listener.is_cancelled:
97 | return
98 | async with task_dict_lock:
99 | task = task_dict[listener.mid]
100 | task.queued = False
101 | await task.update()
102 | new_gid = task.gid()
103 |
104 | await TorrentManager.aria2.unpause(new_gid)
105 | LOGGER.info(f"Start Queued Download from Aria2c: {name}. Gid: {new_gid}")
106 |
--------------------------------------------------------------------------------
/bot/helper/listeners/nzb_listener.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep, gather
2 |
3 | from ... import (
4 | intervals,
5 | sabnzbd_client,
6 | nzb_jobs,
7 | nzb_listener_lock,
8 | LOGGER,
9 | )
10 | from ..ext_utils.bot_utils import new_task
11 | from ..ext_utils.status_utils import get_task_by_gid, get_raw_file_size
12 | from ..ext_utils.task_manager import stop_duplicate_check, limit_checker
13 |
14 |
15 | async def _remove_job(nzo_id, mid):
16 | res1, _ = await gather(
17 | sabnzbd_client.delete_history(nzo_id, delete_files=True),
18 | sabnzbd_client.delete_category(f"{mid}"),
19 | )
20 | if not res1:
21 | await sabnzbd_client.delete_job(nzo_id, True)
22 | async with nzb_listener_lock:
23 | if nzo_id in nzb_jobs:
24 | del nzb_jobs[nzo_id]
25 |
26 |
27 | @new_task
28 | async def _on_download_error(err, nzo_id, button=None, is_limit=False):
29 | if task := await get_task_by_gid(nzo_id):
30 | LOGGER.info(f"Cancelling Download: {task.name()}")
31 | await gather(
32 | task.listener.on_download_error(err, button, is_limit),
33 | _remove_job(nzo_id, task.listener.mid),
34 | )
35 |
36 |
37 | @new_task
38 | async def _stop_duplicate(nzo_id):
39 | if task := await get_task_by_gid(nzo_id):
40 | await task.update()
41 | task.listener.name = task.name()
42 | msg, button = await stop_duplicate_check(task.listener)
43 | if msg:
44 | await _on_download_error(msg, nzo_id, button)
45 |
46 |
47 | @new_task
48 | async def _size_check(nzo_id):
49 | if task := await get_task_by_gid(nzo_id):
50 | await task.update()
51 | task.listener.size = get_raw_file_size(task.size())
52 | mmsg = await limit_checker(task.listener)
53 | if mmsg:
54 | await _on_download_error(mmsg, nzo_id, is_limit=True)
55 |
56 |
57 | @new_task
58 | async def _on_download_complete(nzo_id):
59 | if task := await get_task_by_gid(nzo_id):
60 | await task.listener.on_download_complete()
61 | if intervals["stopAll"]:
62 | return
63 | await _remove_job(nzo_id, task.listener.mid)
64 |
65 |
66 | @new_task
67 | async def _nzb_listener():
68 | while not intervals["stopAll"]:
69 | async with nzb_listener_lock:
70 | try:
71 | jobs = (await sabnzbd_client.get_history())["history"]["slots"]
72 | downloads = (await sabnzbd_client.get_downloads())["queue"]["slots"]
73 | if len(nzb_jobs) == 0:
74 | intervals["nzb"] = ""
75 | break
76 | for job in jobs:
77 | nzo_id = job["nzo_id"]
78 | if nzo_id not in nzb_jobs:
79 | continue
80 | if job["status"] == "Completed":
81 | if not nzb_jobs[nzo_id]["uploaded"]:
82 | nzb_jobs[nzo_id]["uploaded"] = True
83 | await _on_download_complete(nzo_id)
84 | nzb_jobs[nzo_id]["status"] = "Completed"
85 | elif job["status"] == "Failed":
86 | await _on_download_error(job["fail_message"], nzo_id)
87 | for dl in downloads:
88 | nzo_id = dl["nzo_id"]
89 | if nzo_id not in nzb_jobs:
90 | continue
91 | if dl["labels"] and dl["labels"][0] == "ALTERNATIVE":
92 | await _on_download_error("Duplicated Job!", nzo_id)
93 | continue
94 | if dl["status"] == "Downloading" and not dl["filename"].startswith(
95 | "Trying"
96 | ):
97 | if not nzb_jobs[nzo_id]["stop_dup_check"]:
98 | nzb_jobs[nzo_id]["stop_dup_check"] = True
99 | await _stop_duplicate(nzo_id)
100 | if not nzb_jobs[nzo_id]["size_check"]:
101 | nzb_jobs[nzo_id]["size_check"] = True
102 | await _size_check(nzo_id)
103 | except Exception as e:
104 | LOGGER.error(str(e))
105 | await sleep(3)
106 |
107 |
108 | async def on_download_start(nzo_id):
109 | async with nzb_listener_lock:
110 | nzb_jobs[nzo_id] = {
111 | "uploaded": False,
112 | "stop_dup_check": False,
113 | "size_check": False,
114 | "status": "Downloading",
115 | }
116 | if not intervals["nzb"]:
117 | intervals["nzb"] = await _nzb_listener()
118 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md:
--------------------------------------------------------------------------------
1 | RAPHIELSCAPE PUBLIC LICENSE
2 | Version 1.c, June 2019
3 |
4 | Copyright (C) 2019 Raphielscape LLC.
5 | Copyright (C) 2019 Devscapes Open Source Holding GmbH.
6 |
7 | Everyone is permitted to copy and distribute verbatim or modified
8 | copies of this license document, and changing it is allowed as long
9 | as the name is changed.
10 |
11 | RAPHIELSCAPE PUBLIC LICENSE
12 | A-1. DEFINITIONS
13 |
14 | 0. “This License” refers to version 1.c of the Raphielscape Public License.
15 |
16 | 1. “Copyright” also means copyright-like laws that apply to other kinds of works.
17 |
18 | 2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”.
19 | “Licensees” and “recipients” may be individuals or organizations.
20 |
21 | 3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
22 | other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work
23 | or a work “based on” the earlier work.
24 |
25 | 4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it.
26 | “Object code” means any non-source form of a work.
27 |
28 | The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and
29 | (for an executable work) run the object code and to modify the work, including scripts to control those activities.
30 |
31 | The Corresponding Source need not include anything that users can regenerate automatically from other parts of the
32 | Corresponding Source.
33 | The Corresponding Source for a work in source code form is that same work.
34 |
35 | 5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of
36 | the Corresponding Source.
37 |
38 | 6. "Owner" refers to any parties which is made the early form of the Corresponding Source.
39 |
40 | A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
41 |
42 | 0. You must give any other recipients of the Work or Derivative Works a copy of this License; and
43 |
44 | 1. You must cause any modified files to carry prominent notices stating that You changed the files; and
45 |
46 | 2. You must retain, in the Source form of any Derivative Works that You distribute,
47 | this license, all copyright, patent, trademark, authorships and attribution notices
48 | from the Source form of the Work; and
49 |
50 | 3. Respecting the author and owner of works that are distributed in any way.
51 |
52 | You may add Your own copyright statement to Your modifications and may provide
53 | additional or different license terms and conditions for use, reproduction,
54 | or distribution of Your modifications, or for any such Derivative Works as a whole,
55 | provided Your use, reproduction, and distribution of the Work otherwise complies
56 | with the conditions stated in this License.
57 |
58 | B. DISCLAIMER OF WARRANTY
59 |
60 | THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR
61 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
62 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS
63 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
64 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
65 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
66 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
67 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
68 |
69 | C. REVISED VERSION OF THIS LICENSE
70 |
71 | The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the
72 | Raphielscape Public License from time to time. Such new versions will be similar in spirit
73 | to the present version, but may differ in detail to address new problems or concerns.
74 |
75 | Each version is given a distinguishing version number. If the Program specifies that a
76 | certain numbered version of the Raphielscape Public License "or any later version" applies to it,
77 | you have the option of following the terms and conditions either of that numbered version or of
78 | any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a
79 | version number of the Raphielscape Public License, you may choose any version ever published
80 | by the Devscapes Open Source Holding GmbH.
81 |
82 | END OF LICENSE
83 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/aria2_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from .... import LOGGER
4 | from ....core.torrent_manager import TorrentManager, aria2_name
5 | from ...ext_utils.status_utils import (
6 | EngineStatus,
7 | MirrorStatus,
8 | get_readable_time,
9 | get_readable_file_size,
10 | )
11 |
12 |
13 | async def get_download(gid, old_info=None):
14 | try:
15 | res = await TorrentManager.aria2.tellStatus(gid)
16 | return res or old_info
17 | except Exception as e:
18 | LOGGER.error(f"{e}: Aria2c, Error while getting torrent info")
19 | return old_info
20 |
21 |
22 | class Aria2Status:
23 | def __init__(self, listener, gid, seeding=False, queued=False):
24 | self._gid = gid
25 | self._download = {}
26 | self.listener = listener
27 | self.queued = queued
28 | self.start_time = 0
29 | self.seeding = seeding
30 | self.engine = EngineStatus().STATUS_ARIA2
31 |
32 | async def update(self):
33 | self._download = await get_download(self._gid, self._download)
34 | if self._download.get("followedBy", []):
35 | self._gid = self._download["followedBy"][0]
36 | self._download = await get_download(self._gid)
37 |
38 | def progress(self):
39 | try:
40 | return f"{round(int(self._download.get("completedLength", "0")) / int(self._download.get("totalLength", "0")) * 100, 2)}%"
41 | except ZeroDivisionError:
42 | return "0%"
43 |
44 | def processed_bytes(self):
45 | return get_readable_file_size(int(self._download.get("completedLength", "0")))
46 |
47 | def speed(self):
48 | return (
49 | f"{get_readable_file_size(int(self._download.get("downloadSpeed", "0")))}/s"
50 | )
51 |
52 | def name(self):
53 | return aria2_name(self._download)
54 |
55 | def size(self):
56 | return get_readable_file_size(int(self._download.get("totalLength", "0")))
57 |
58 | def eta(self):
59 | try:
60 | return get_readable_time(
61 | (
62 | int(self._download.get("totalLength", "0"))
63 | - int(self._download.get("completedLength", "0"))
64 | )
65 | / int(self._download.get("downloadSpeed", "0"))
66 | )
67 | except ZeroDivisionError:
68 | return "-"
69 |
70 | async def status(self):
71 | await self.update()
72 | if self._download.get("status", "") == "waiting" or self.queued:
73 | if self.seeding:
74 | return MirrorStatus.STATUS_QUEUEUP
75 | else:
76 | return MirrorStatus.STATUS_QUEUEDL
77 | elif self._download.get("status", "") == "paused":
78 | return MirrorStatus.STATUS_PAUSED
79 | elif self._download.get("seeder", "") == "true" and self.seeding:
80 | return MirrorStatus.STATUS_SEED
81 | else:
82 | return MirrorStatus.STATUS_DOWNLOAD
83 |
84 | def seeders_num(self):
85 | return self._download.get("numSeeders", 0)
86 |
87 | def leechers_num(self):
88 | return self._download.get("connections", 0)
89 |
90 | def uploaded_bytes(self):
91 | return get_readable_file_size(int(self._download.get("uploadLength", "0")))
92 |
93 | def seed_speed(self):
94 | return (
95 | f"{get_readable_file_size(int(self._download.get("uploadSpeed", "0")))}/s"
96 | )
97 |
98 | def ratio(self):
99 | try:
100 | return round(
101 | int(self._download.get("uploadLength", "0"))
102 | / int(self._download.get("completedLength", "0")),
103 | 3,
104 | )
105 | except ZeroDivisionError:
106 | return 0
107 |
108 | def seeding_time(self):
109 | return get_readable_time(time() - self.start_time)
110 |
111 | def task(self):
112 | return self
113 |
114 | def gid(self):
115 | return self._gid
116 |
117 | async def cancel_task(self):
118 | self.listener.is_cancelled = True
119 | await self.update()
120 | await TorrentManager.aria2_remove(self._download)
121 | if self._download.get("seeder", "") == "true" and self.seeding:
122 | LOGGER.info(f"Cancelling Seed: {self.name()}")
123 | await self.listener.on_upload_error(
124 | f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}"
125 | )
126 | else:
127 | if self.queued:
128 | LOGGER.info(f"Cancelling QueueDl: {self.name()}")
129 | msg = "task have been removed from queue/download"
130 | else:
131 | LOGGER.info(f"Cancelling Download: {self.name()}")
132 | msg = "Stopped by user!"
133 | await self.listener.on_download_error(msg)
134 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/youtube_utils/youtube_helper.py:
--------------------------------------------------------------------------------
1 | from logging import ERROR, getLogger
2 | from os import path as ospath
3 | from pickle import load as pload
4 | from urllib.parse import parse_qs, urlparse
5 |
6 | from google_auth_httplib2 import AuthorizedHttp
7 | from googleapiclient.discovery import build
8 | from googleapiclient.http import build_http
9 | from tenacity import (
10 | retry,
11 | retry_if_exception_type,
12 | stop_after_attempt,
13 | wait_exponential,
14 | )
15 |
16 | LOGGER = getLogger(__name__)
17 | getLogger("googleapiclient.discovery").setLevel(ERROR)
18 |
19 |
20 | class YouTubeHelper:
21 | def __init__(self):
22 | self._OAUTH_SCOPE = [
23 | "https://www.googleapis.com/auth/youtube.upload",
24 | "https://www.googleapis.com/auth/youtube",
25 | ]
26 | self.token_path = "token.pickle"
27 | self.is_uploading = False
28 | self.service = None
29 | self.total_files = 0
30 | self.file_processed_bytes = 0
31 | self.proc_bytes = 0
32 | self.total_time = 0
33 | self.status = None
34 | self.update_interval = 3
35 | self.upload_progress = 0
36 |
37 | @property
38 | def speed(self):
39 | try:
40 | return self.proc_bytes / self.total_time
41 | except Exception:
42 | return 0
43 |
44 | @property
45 | def processed_bytes(self):
46 | return self.proc_bytes
47 |
48 | async def progress(self):
49 | if self.status is not None:
50 | if hasattr(self.status, "total_size") and hasattr(self.status, "progress"):
51 | chunk_size = (
52 | self.status.total_size * self.status.progress()
53 | - self.file_processed_bytes
54 | )
55 | self.file_processed_bytes = (
56 | self.status.total_size * self.status.progress()
57 | )
58 | self.proc_bytes += chunk_size
59 | self.total_time += self.update_interval
60 | else:
61 | # For YouTube uploads, we'll track progress differently
62 | self.total_time += self.update_interval
63 |
64 | def authorize(self, user_id=""):
65 | credentials = None
66 | token_path = self.token_path
67 |
68 | if user_id:
69 | token_path = f"tokens/{user_id}.pickle"
70 |
71 | if ospath.exists(token_path):
72 | LOGGER.info(f"Authorize YouTube with {token_path}")
73 | with open(token_path, "rb") as f:
74 | credentials = pload(f)
75 | else:
76 | LOGGER.error(f"YouTube token file {token_path} not found!")
77 | raise FileNotFoundError(f"YouTube token file {token_path} not found!")
78 |
79 | authorized_http = AuthorizedHttp(credentials, http=build_http())
80 | authorized_http.http.disable_ssl_certificate_validation = True
81 | return build("youtube", "v3", http=authorized_http, cache_discovery=False)
82 |
83 | def get_video_id_from_url(self, url):
84 | """Extract video ID from YouTube URL"""
85 | if "youtube.com/watch?v=" in url:
86 | parsed = urlparse(url)
87 | return parse_qs(parsed.query)["v"][0]
88 | if "youtu.be/" in url:
89 | return url.split("youtu.be/")[1].split("?")[0]
90 | return url # Assume it's already a video ID
91 |
92 | @retry(
93 | wait=wait_exponential(multiplier=2, min=3, max=6),
94 | stop=stop_after_attempt(3),
95 | retry=retry_if_exception_type(Exception),
96 | )
97 | def get_video_info(self, video_id):
98 | """Get video information"""
99 | return (
100 | self.service.videos()
101 | .list(part="snippet,statistics,status", id=video_id)
102 | .execute()
103 | )
104 |
105 | @retry(
106 | wait=wait_exponential(multiplier=2, min=3, max=6),
107 | stop=stop_after_attempt(3),
108 | retry=retry_if_exception_type(Exception),
109 | )
110 | def get_channel_info(self):
111 | """Get channel information"""
112 | return (
113 | self.service.channels().list(part="snippet,statistics", mine=True).execute()
114 | )
115 |
116 | def escapes(self, estr):
117 | """Escape special characters in strings"""
118 | chars = ["\\", "'", '"', r"\a", r"\b", r"\f", r"\n", r"\r", r"\t"]
119 | for char in chars:
120 | estr = estr.replace(char, f"\\{char}")
121 | return estr.strip()
122 |
123 | async def cancel_task(self):
124 | """Cancel the current upload task"""
125 | self.listener.is_cancelled = True
126 | if self.is_uploading:
127 | LOGGER.info(f"Cancelling YouTube Upload: {self.listener.name}")
128 | await self.listener.on_upload_error(
129 | "Your YouTube upload has been cancelled!"
130 | )
131 |
--------------------------------------------------------------------------------
/bot/modules/mediainfo.py:
--------------------------------------------------------------------------------
1 | from os import getcwd, path as ospath
2 | from re import search
3 | from shlex import split
4 |
5 | from aiofiles import open as aiopen
6 | from aiofiles.os import mkdir, path as aiopath, remove as aioremove
7 | from aiohttp import ClientSession
8 |
9 | from .. import LOGGER
10 | from ..core.tg_client import TgClient
11 | from ..helper.ext_utils.bot_utils import cmd_exec
12 | from ..helper.ext_utils.telegraph_helper import telegraph
13 | from ..helper.telegram_helper.bot_commands import BotCommands
14 | from ..helper.telegram_helper.message_utils import send_message, edit_message
15 |
16 |
17 | async def gen_mediainfo(message, link=None, media=None, mmsg=None):
18 | temp_send = await send_message(message, "Generating MediaInfo...")
19 | try:
20 | path = "mediainfo/"
21 | if not await aiopath.isdir(path):
22 | await mkdir(path)
23 | file_size = 0
24 | if link:
25 | filename = search(".+/(.+)", link).group(1)
26 | des_path = ospath.join(path, filename)
27 | headers = {
28 | "user-agent": "Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36"
29 | }
30 | async with ClientSession() as session:
31 | async with session.get(link, headers=headers) as response:
32 | file_size = int(response.headers.get("Content-Length", 0))
33 | async with aiopen(des_path, "wb") as f:
34 | async for chunk in response.content.iter_chunked(10000000):
35 | await f.write(chunk)
36 | break
37 | elif media:
38 | des_path = ospath.join(path, media.file_name)
39 | file_size = media.file_size
40 | if file_size <= 50000000:
41 | await mmsg.download(ospath.join(getcwd(), des_path))
42 | else:
43 | async for chunk in TgClient.bot.stream_media(media, limit=5):
44 | async with aiopen(des_path, "ab") as f:
45 | await f.write(chunk)
46 | stdout, _, _ = await cmd_exec(split(f'mediainfo "{des_path}"'))
47 | tc = f"📌 {ospath.basename(des_path)}
"
48 | if len(stdout) != 0:
49 | tc += parseinfo(stdout, file_size)
50 | except Exception as e:
51 | LOGGER.error(e)
52 | await edit_message(temp_send, f"MediaInfo Stopped due to {str(e)}")
53 | finally:
54 | await aioremove(des_path)
55 | link_id = (await telegraph.create_page(title="MediaInfo X", content=tc))["path"]
56 | await temp_send.edit(
57 | f"MediaInfo:\n\n➲ Link : https://graph.org/{link_id}",
58 | disable_web_page_preview=False,
59 | )
60 |
61 |
62 | section_dict = {"General": "🗒", "Video": "🎞", "Audio": "🔊", "Text": "🔠", "Menu": "🗃"}
63 |
64 |
65 | def parseinfo(out, size):
66 | tc, trigger = "", False
67 | size_line = (
68 | f"File size : {size / (1024 * 1024):.2f} MiB"
69 | )
70 | for line in out.split("\n"):
71 | for section, emoji in section_dict.items():
72 | if line.startswith(section):
73 | trigger = True
74 | if not line.startswith("General"):
75 | tc += "
"
76 | tc += f"{emoji} {line.replace('Text', 'Subtitle')}
"
77 | break
78 | if line.startswith("File size"):
79 | line = size_line
80 | if trigger:
81 | tc += "
"
82 | trigger = False
83 | else:
84 | tc += line + "\n"
85 | tc += "
"
86 | return tc
87 |
88 |
89 | async def mediainfo(_, message):
90 | rply = message.reply_to_message
91 | help_msg = f"""
92 | By replying to media:
93 | /{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]} [media]
94 |
95 | By reply/sending download link:
96 | /{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]} [link]
97 | """
98 | if len(message.command) > 1 or rply and rply.text:
99 | link = rply.text if rply else message.command[1]
100 | return await gen_mediainfo(message, link)
101 | elif rply:
102 | if file := next(
103 | (
104 | i
105 | for i in [
106 | rply.document,
107 | rply.video,
108 | rply.audio,
109 | rply.voice,
110 | rply.animation,
111 | rply.video_note,
112 | ]
113 | if i is not None
114 | ),
115 | None,
116 | ):
117 | return await gen_mediainfo(message, None, file, rply)
118 | else:
119 | return await send_message(message, help_msg)
120 | else:
121 | return await send_message(message, help_msg)
122 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/rclone_download.py:
--------------------------------------------------------------------------------
1 | from asyncio import gather
2 | from json import loads
3 | from secrets import token_hex
4 | from aiofiles.os import remove
5 |
6 | from .... import task_dict, task_dict_lock, LOGGER
7 | from ....core.config_manager import BinConfig
8 | from ...ext_utils.bot_utils import cmd_exec
9 | from ...ext_utils.task_manager import (
10 | check_running_tasks,
11 | stop_duplicate_check,
12 | limit_checker,
13 | )
14 | from ...mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper
15 | from ...mirror_leech_utils.status_utils.queue_status import QueueStatus
16 | from ...mirror_leech_utils.status_utils.rclone_status import RcloneStatus
17 | from ...telegram_helper.message_utils import send_status_message
18 |
19 |
20 | async def add_rclone_download(listener, path):
21 | if listener.link.startswith("mrcc:"):
22 | listener.link = listener.link.split("mrcc:", 1)[1]
23 | config_path = f"rclone/{listener.user_id}.conf"
24 | else:
25 | config_path = "rclone.conf"
26 |
27 | remote, listener.link = listener.link.split(":", 1)
28 | listener.link = listener.link.strip("/")
29 | rclone_select = False
30 | if listener.link.startswith("rclone_select"):
31 | rclone_select = True
32 | rpath = ""
33 | else:
34 | rpath = listener.link
35 |
36 | cmd1 = [
37 | BinConfig.RCLONE_NAME,
38 | "lsjson",
39 | "--fast-list",
40 | "--stat",
41 | "--no-mimetype",
42 | "--no-modtime",
43 | "--config",
44 | config_path,
45 | f"{remote}:{rpath}",
46 | ]
47 | cmd2 = [
48 | BinConfig.RCLONE_NAME,
49 | "size",
50 | "--fast-list",
51 | "--json",
52 | "--config",
53 | config_path,
54 | f"{remote}:{rpath}",
55 | ]
56 | if rclone_select:
57 | cmd2.extend(("--files-from", listener.link))
58 | res = await cmd_exec(cmd2)
59 | if res[2] != 0:
60 | if res[2] != -9:
61 | msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {res[1][:4000]}"
62 | await listener.on_download_error(msg)
63 | return
64 | try:
65 | rsize = loads(res[0])
66 | except Exception as err:
67 | await listener.on_download_error(f"RcloneDownload JsonLoad: {err}")
68 | return
69 | if not listener.name:
70 | listener.name = listener.link
71 | path += listener.name
72 | else:
73 | res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2))
74 | if res1[2] != 0 or res2[2] != 0:
75 | if res1[2] != -9:
76 | err = res1[1] or res2[1]
77 | msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {err[:4000]}"
78 | await listener.on_download_error(msg)
79 | return
80 | try:
81 | rstat = loads(res1[0])
82 | rsize = loads(res2[0])
83 | except Exception as err:
84 | await listener.on_download_error(f"RcloneDownload JsonLoad: {err}")
85 | return
86 | if rstat["IsDir"]:
87 | if not listener.name:
88 | listener.name = (
89 | listener.link.rsplit("/", 1)[-1] if listener.link else remote
90 | )
91 | path += listener.name
92 | else:
93 | listener.name = listener.link.rsplit("/", 1)[-1]
94 | listener.size = rsize["bytes"]
95 | gid = token_hex(5)
96 |
97 | if not rclone_select:
98 | msg, button = await stop_duplicate_check(listener)
99 | if msg:
100 | await listener.on_download_error(msg, button)
101 | return
102 | if limit_exceeded := await limit_checker(listener):
103 | await listener.on_download_error(limit_exceeded, is_limit=True)
104 | return
105 |
106 | add_to_queue, event = await check_running_tasks(listener)
107 | if add_to_queue:
108 | LOGGER.info(f"Added to Queue/Download: {listener.name}")
109 | async with task_dict_lock:
110 | task_dict[listener.mid] = QueueStatus(listener, gid, "dl")
111 | await listener.on_download_start()
112 | if listener.multi <= 1:
113 | await send_status_message(listener.message)
114 | await event.wait()
115 | if listener.is_cancelled:
116 | return
117 |
118 | RCTransfer = RcloneTransferHelper(listener)
119 | async with task_dict_lock:
120 | task_dict[listener.mid] = RcloneStatus(listener, RCTransfer, gid, "dl")
121 |
122 | if add_to_queue:
123 | LOGGER.info(f"Start Queued Download with rclone: {listener.link}")
124 | else:
125 | await listener.on_download_start()
126 | if listener.multi <= 1:
127 | await send_status_message(listener.message)
128 | LOGGER.info(f"Download with rclone: {listener.link}")
129 |
130 | await RCTransfer.download(remote, config_path, path)
131 | if rclone_select:
132 | await remove(listener.link)
133 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/nzb_status.py:
--------------------------------------------------------------------------------
1 | from asyncio import gather
2 | from collections import defaultdict
3 |
4 | from .... import LOGGER, sabnzbd_client, nzb_jobs, nzb_listener_lock
5 | from ...ext_utils.status_utils import (
6 | MirrorStatus,
7 | EngineStatus,
8 | get_readable_file_size,
9 | get_readable_time,
10 | time_to_seconds,
11 | )
12 |
13 |
14 | async def get_download(nzo_id, old_info=None):
15 | if old_info is None:
16 | old_info = {}
17 | try:
18 | queue = await sabnzbd_client.get_downloads(nzo_ids=nzo_id)
19 | if res := queue["queue"]["slots"]:
20 | slot = res[0]
21 | if msg := slot["labels"]:
22 | LOGGER.warning(" | ".join(msg))
23 | return slot
24 | else:
25 | history = await sabnzbd_client.get_history(nzo_ids=nzo_id)
26 | if res := history["history"]["slots"]:
27 | slot = res[0]
28 | if slot["status"] == "Verifying":
29 | percentage = slot["action_line"].split("Verifying: ")[-1].split("/")
30 | percentage = round(
31 | (int(float(percentage[0])) / int(float(percentage[1]))) * 100, 2
32 | )
33 | old_info["percentage"] = percentage
34 | elif slot["status"] == "Repairing":
35 | action = slot["action_line"].split("Repairing: ")[-1].split()
36 | percentage = action[0].strip("%")
37 | eta = action[2]
38 | old_info["percentage"] = percentage
39 | old_info["timeleft"] = eta
40 | elif slot["status"] == "Extracting":
41 | if "Unpacking" in slot["action_line"]:
42 | action = slot["action_line"].split("Unpacking: ")[-1].split()
43 | else:
44 | action = (
45 | slot["action_line"].split("Direct Unpack: ")[-1].split()
46 | )
47 | percentage = action[0].split("/")
48 | percentage = round(
49 | (int(float(percentage[0])) / int(float(percentage[1]))) * 100, 2
50 | )
51 | eta = action[2]
52 | old_info["percentage"] = percentage
53 | old_info["timeleft"] = eta
54 | old_info["status"] = slot["status"]
55 | return old_info
56 | except Exception as e:
57 | LOGGER.error(f"{e}: Sabnzbd, while getting job info. ID: {nzo_id}")
58 | return old_info
59 |
60 |
61 | class SabnzbdStatus:
62 | def __init__(self, listener, gid, queued=False):
63 | self.queued = queued
64 | self.listener = listener
65 | self._gid = gid
66 | self._info = {}
67 | self.engine = EngineStatus().STATUS_SABNZBD
68 |
69 | async def update(self):
70 | self._info = await get_download(self._gid, self._info)
71 |
72 | def progress(self):
73 | return f"{self._info.get('percentage', '0')}%"
74 |
75 | def processed_raw(self):
76 | return (
77 | float(self._info.get("mb", "0")) - float(self._info.get("mbleft", "0"))
78 | ) * 1048576
79 |
80 | def processed_bytes(self):
81 | return get_readable_file_size(self.processed_raw())
82 |
83 | def speed_raw(self):
84 | if self._info.get("mb", "0") == self._info.get("mbleft", "0"):
85 | return 0
86 | try:
87 | return int(float(self._info.get("mbleft", "0")) * 1048576) / self.eta_raw()
88 | except Exception:
89 | return 0
90 |
91 | def speed(self):
92 | return f"{get_readable_file_size(self.speed_raw())}/s"
93 |
94 | def name(self):
95 | return self._info.get("filename", "")
96 |
97 | def size(self):
98 | return self._info.get("size", 0)
99 |
100 | def eta_raw(self):
101 | return int(time_to_seconds(self._info.get("timeleft", "0")))
102 |
103 | def eta(self):
104 | return get_readable_time(self.eta_raw())
105 |
106 | async def status(self):
107 | await self.update()
108 | if self._info.get("mb", "0") == self._info.get("mbleft", "0"):
109 | return MirrorStatus.STATUS_QUEUEDL
110 | state = self._info.get("status")
111 | if state == "Paused" and self.queued:
112 | return MirrorStatus.STATUS_QUEUEDL
113 | elif state in [
114 | "QuickCheck",
115 | "Verifying",
116 | "Repairing",
117 | "Fetching",
118 | "Moving",
119 | "Extracting",
120 | ]:
121 | return state
122 | else:
123 | return MirrorStatus.STATUS_DOWNLOAD
124 |
125 | def task(self):
126 | return self
127 |
128 | def gid(self):
129 | return self._gid
130 |
131 | async def cancel_task(self):
132 | self.listener.is_cancelled = True
133 | await self.update()
134 | LOGGER.info(f"Cancelling Download: {self.name()}")
135 | await gather(
136 | self.listener.on_download_error("Stopped by user!"),
137 | sabnzbd_client.delete_job(self._gid, delete_files=True),
138 | sabnzbd_client.delete_category(f"{self.listener.mid}"),
139 | sabnzbd_client.delete_history(self._gid, delete_files=True),
140 | )
141 | async with nzb_listener_lock:
142 | if self._gid in nzb_jobs:
143 | del nzb_jobs[self._gid]
144 |
--------------------------------------------------------------------------------
/gen_scripts/add_to_team_drive.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | from argparse import ArgumentParser
3 | from glob import glob
4 | from json import load, JSONDecodeError
5 | from os import path
6 | from pickle import load as pickle_load, dump as pickle_dump
7 | from sys import exit
8 | from time import time
9 |
10 | from googleapiclient.discovery import build
11 | from progress.bar import Bar
12 | from google.auth.transport.requests import Request
13 | from google_auth_oauthlib.flow import InstalledAppFlow
14 |
15 |
16 | def parse_args():
17 | parser = ArgumentParser(
18 | description="Add service accounts to a shared drive using credentials files in a folder."
19 | )
20 | parser.add_argument(
21 | "--path",
22 | "-p",
23 | default="accounts",
24 | help="Path to the service accounts folder.",
25 | )
26 | parser.add_argument(
27 | "--credentials",
28 | "-c",
29 | default="./credentials.json",
30 | help="Path for the credentials file.",
31 | )
32 | parser.add_argument("--yes", "-y", action="store_true", help="Skips the prompt.")
33 | req = parser.add_argument_group("required arguments")
34 | req.add_argument(
35 | "--drive-id", "-d", required=True, help="The ID of the Shared Drive."
36 | )
37 | return parser.parse_args()
38 |
39 |
40 | def load_credentials_file(credentials_pattern):
41 | credentials_files = glob(credentials_pattern)
42 | if not credentials_files:
43 | print(">> No credentials found.")
44 | exit(0)
45 | credentials_file = credentials_files[0]
46 | try:
47 | with open(credentials_file, "r") as f:
48 | load(f)
49 | print(">> Found credentials.")
50 | except (IOError, JSONDecodeError) as e:
51 | print(">> Error reading credentials:", e)
52 | exit(1)
53 | return credentials_file
54 |
55 |
56 | def authenticate(creds_file):
57 | token_path = "token_sa.pickle"
58 | creds = None
59 | try:
60 | if path.exists(token_path):
61 | with open(token_path, "rb") as token_file:
62 | creds = pickle_load(token_file)
63 | except Exception as e:
64 | print(">> Failed to load existing token:", e)
65 | try:
66 | if not creds or not getattr(creds, "valid", False):
67 | if (
68 | creds
69 | and getattr(creds, "expired", False)
70 | and getattr(creds, "refresh_token", None)
71 | ):
72 | creds.refresh(Request())
73 | else:
74 | flow = InstalledAppFlow.from_client_secrets_file(
75 | creds_file,
76 | scopes=[
77 | "https://www.googleapis.com/auth/admin.directory.group",
78 | "https://www.googleapis.com/auth/admin.directory.group.member",
79 | ],
80 | )
81 | creds = flow.run_console()
82 | with open(token_path, "wb") as token_file:
83 | pickle_dump(creds, token_file)
84 | except Exception as e:
85 | print(">> Authentication failed:", e)
86 | exit(1)
87 | return creds
88 |
89 |
90 | def add_service_accounts(drive_client, account_dir, drive_id):
91 | account_files = glob(path.join(account_dir, "*.json"))
92 | if not account_files:
93 | print(">> No service accounts found in:", account_dir)
94 | exit(0)
95 | batch = drive_client.new_batch_http_request()
96 | pbar = Bar("Readying accounts", max=len(account_files))
97 | for acc_file in account_files:
98 | try:
99 | with open(acc_file, "r") as f:
100 | data = load(f)
101 | client_email = data["client_email"]
102 | batch.add(
103 | drive_client.permissions().create(
104 | fileId=drive_id,
105 | supportsAllDrives=True,
106 | body={
107 | "role": "organizer",
108 | "type": "user",
109 | "emailAddress": client_email,
110 | },
111 | )
112 | )
113 | except Exception as e:
114 | print(">> Error processing file {}: {}".format(acc_file, e))
115 | pbar.next()
116 | pbar.finish()
117 | print("Adding...")
118 | try:
119 | batch.execute()
120 | except Exception as e:
121 | print(">> Batch execution failed:", e)
122 | exit(1)
123 |
124 |
125 | def main():
126 | start_time = time()
127 | args = parse_args()
128 | credentials_file = load_credentials_file(args.credentials)
129 |
130 | if not args.yes:
131 | try:
132 | input(
133 | ">> Ensure the Google account that generated credentials.json has "
134 | "Manager access on your Team Drive.\n>> (Press any key to continue)"
135 | )
136 | except Exception as e:
137 | print(">> User prompt failed:", e)
138 | exit(1)
139 |
140 | creds = authenticate(credentials_file)
141 | drive_client = build("drive", "v3", credentials=creds)
142 | add_service_accounts(drive_client, args.path, args.drive_id)
143 |
144 | elapsed = time() - start_time
145 | hours, rem = divmod(elapsed, 3600)
146 | minutes, seconds = divmod(rem, 60)
147 | print("Complete.")
148 | print(
149 | "Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(
150 | int(hours), int(minutes), seconds
151 | )
152 | )
153 |
154 |
155 | if __name__ == "__main__":
156 | main()
157 |
--------------------------------------------------------------------------------
/bot/core/torrent_manager.py:
--------------------------------------------------------------------------------
1 | from asyncio import TimeoutError, gather
2 | from contextlib import suppress
3 | from inspect import iscoroutinefunction
4 | from pathlib import Path
5 |
6 | from aioaria2 import Aria2WebsocketClient
7 | from aiohttp import ClientError
8 | from aioqbt.client import create_client
9 | from tenacity import (
10 | retry,
11 | retry_if_exception_type,
12 | stop_after_attempt,
13 | wait_exponential,
14 | )
15 |
16 | from .. import LOGGER, aria2_options
17 | from .config_manager import Config
18 |
19 |
20 | def wrap_with_retry(obj, max_retries=3):
21 | for attr_name in dir(obj):
22 | if attr_name.startswith("_"):
23 | continue
24 |
25 | attr = getattr(obj, attr_name)
26 | if iscoroutinefunction(attr):
27 | retry_policy = retry(
28 | stop=stop_after_attempt(max_retries),
29 | wait=wait_exponential(multiplier=1, min=1, max=5),
30 | retry=retry_if_exception_type(
31 | (ClientError, TimeoutError, RuntimeError)
32 | ),
33 | )
34 | wrapped = retry_policy(attr)
35 | setattr(obj, attr_name, wrapped)
36 | return obj
37 |
38 |
39 | class TorrentManager:
40 | aria2 = None
41 | qbittorrent = None
42 |
43 | @classmethod
44 | async def initiate(cls):
45 | if cls.aria2:
46 | return
47 | try:
48 | cls.aria2 = await Aria2WebsocketClient.new("http://localhost:6800/jsonrpc")
49 | LOGGER.info("Aria2 initialized successfully.")
50 |
51 | if Config.DISABLE_TORRENTS:
52 | LOGGER.info("Torrents are disabled.")
53 | return
54 |
55 | cls.qbittorrent = await create_client("http://localhost:8090/api/v2/")
56 | cls.qbittorrent = wrap_with_retry(cls.qbittorrent)
57 |
58 | except Exception as e:
59 | LOGGER.error(f"Error during initialization: {e}")
60 | await cls.close_all()
61 | raise
62 |
63 | @classmethod
64 | async def close_all(cls):
65 | close_tasks = []
66 | if cls.aria2:
67 | close_tasks.append(cls.aria2.close())
68 | cls.aria2 = None
69 | if cls.qbittorrent:
70 | close_tasks.append(cls.qbittorrent.close())
71 | cls.qbittorrent = None
72 | if close_tasks:
73 | await gather(*close_tasks)
74 |
75 | @classmethod
76 | async def aria2_remove(cls, download):
77 | if download.get("status", "") in ["active", "paused", "waiting"]:
78 | await cls.aria2.forceRemove(download.get("gid", ""))
79 | else:
80 | with suppress(Exception):
81 | await cls.aria2.removeDownloadResult(download.get("gid", ""))
82 |
83 | @classmethod
84 | async def remove_all(cls):
85 | await cls.pause_all()
86 | if cls.qbittorrent:
87 | await gather(
88 | cls.qbittorrent.torrents.delete("all", False),
89 | cls.aria2.purgeDownloadResult(),
90 | )
91 | else:
92 | await gather(
93 | cls.aria2.purgeDownloadResult(),
94 | )
95 | downloads = []
96 | results = await gather(cls.aria2.tellActive(), cls.aria2.tellWaiting(0, 1000))
97 | for res in results:
98 | downloads.extend(res)
99 | tasks = []
100 | tasks.extend(
101 | cls.aria2.forceRemove(download.get("gid")) for download in downloads
102 | )
103 | with suppress(Exception):
104 | await gather(*tasks)
105 |
106 | @classmethod
107 | async def overall_speed(cls):
108 | aria2_speed = await cls.aria2.getGlobalStat()
109 | download_speed = int(aria2_speed.get("downloadSpeed", "0"))
110 | upload_speed = int(aria2_speed.get("uploadSpeed", "0"))
111 |
112 | if cls.qbittorrent:
113 | qb_speed = await cls.qbittorrent.transfer.info()
114 | download_speed += qb_speed.dl_info_speed
115 | upload_speed += qb_speed.up_info_speed
116 |
117 | return download_speed, upload_speed
118 |
119 | @classmethod
120 | async def pause_all(cls):
121 | pause_tasks = [cls.aria2.forcePauseAll()]
122 | if cls.qbittorrent:
123 | pause_tasks.append(cls.qbittorrent.torrents.stop("all"))
124 | await gather(*pause_tasks)
125 |
126 | @classmethod
127 | async def change_aria2_option(cls, key, value):
128 | downloads = []
129 | results = await gather(cls.aria2.tellActive(), cls.aria2.tellWaiting(0, 1000))
130 | for res in results:
131 | downloads.extend(res)
132 | tasks = [
133 | cls.aria2.changeOption(download.get("gid"), {key: value})
134 | for download in downloads
135 | if download.get("status", "") != "complete"
136 | ]
137 | if tasks:
138 | try:
139 | await gather(*tasks)
140 | except Exception as e:
141 | LOGGER.error(e)
142 | if key not in ["checksum", "index-out", "out", "pause", "select-file"]:
143 | await cls.aria2.changeGlobalOption({key: value})
144 | aria2_options[key] = value
145 |
146 |
147 | def aria2_name(download_info):
148 | if "bittorrent" in download_info and download_info["bittorrent"].get("info"):
149 | return download_info["bittorrent"]["info"]["name"]
150 | elif download_info.get("files"):
151 | if download_info["files"][0]["path"].startswith("[METADATA]"):
152 | return download_info["files"][0]["path"]
153 | file_path = download_info["files"][0]["path"]
154 | dir_path = download_info["dir"]
155 | if file_path.startswith(dir_path):
156 | return Path(file_path[len(dir_path) + 1 :]).parts[0]
157 | else:
158 | return ""
159 | else:
160 | return ""
161 |
162 |
163 | def is_metadata(download_info):
164 | return any(
165 | f["path"].startswith("[METADATA]") for f in download_info.get("files", [])
166 | )
167 |
--------------------------------------------------------------------------------
/config_sample.py:
--------------------------------------------------------------------------------
1 | # REQUIRED CONFIG
2 | BOT_TOKEN = ""
3 | OWNER_ID = 0
4 | TELEGRAM_API = 0
5 | TELEGRAM_HASH = ""
6 | DATABASE_URL = ""
7 |
8 | # OPTIONAL CONFIG
9 | DEFAULT_LANG = "en"
10 | TG_PROXY = (
11 | {}
12 | ) # {"scheme": ”socks5”, "hostname": ””, "port": 1234, "username": ”user”, "password": ”pass”}
13 | USER_SESSION_STRING = ""
14 | CMD_SUFFIX = ""
15 | AUTHORIZED_CHATS = ""
16 | SUDO_USERS = ""
17 | STATUS_LIMIT = 10
18 | DEFAULT_UPLOAD = "rc"
19 | STATUS_UPDATE_INTERVAL = 15
20 | FILELION_API = ""
21 | STREAMWISH_API = ""
22 | EXCLUDED_EXTENSIONS = ""
23 | INCOMPLETE_TASK_NOTIFIER = False
24 | YT_DLP_OPTIONS = ""
25 | USE_SERVICE_ACCOUNTS = False
26 | NAME_SWAP = ""
27 | FFMPEG_CMDS = {}
28 | UPLOAD_PATHS = {}
29 |
30 | # Hyper Tg Downloader
31 | HELPER_TOKENS = ""
32 |
33 | # MegaAPI v4.30
34 | MEGA_EMAIL = ""
35 | MEGA_PASSWORD = ""
36 |
37 | # Disable Options
38 | DISABLE_TORRENTS = False
39 | DISABLE_LEECH = False
40 | DISABLE_BULK = False
41 | DISABLE_MULTI = False
42 | DISABLE_SEED = False
43 | DISABLE_FF_MODE = False
44 |
45 | # Telegraph
46 | AUTHOR_NAME = "WZML-X"
47 | AUTHOR_URL = "https://t.me/WZML_X"
48 |
49 | # Task Limits
50 | DIRECT_LIMIT = 0
51 | MEGA_LIMIT = 0
52 | TORRENT_LIMIT = 0
53 | GD_DL_LIMIT = 0
54 | RC_DL_LIMIT = 0
55 | CLONE_LIMIT = 0
56 | JD_LIMIT = 0
57 | NZB_LIMIT = 0
58 | YTDLP_LIMIT = 0
59 | PLAYLIST_LIMIT = 0
60 | LEECH_LIMIT = 0
61 | EXTRACT_LIMIT = 0
62 | ARCHIVE_LIMIT = 0
63 | STORAGE_LIMIT = 0
64 |
65 | # Insta video downloader api
66 | INSTADL_API = ""
67 |
68 | # Nzb search
69 | HYDRA_IP = ""
70 | HYDRA_API_KEY = ""
71 |
72 | # Media Search
73 | IMDB_TEMPLATE = """Title: {title} [{year}]
74 | Also Known As: {aka}
75 | Rating ⭐️: {rating}
76 | Release Info: {release_date}
77 | Genre: {genres}
78 | IMDb URL: {url}
79 | Language: {languages}
80 | Country of Origin : {countries}
81 |
82 | Story Line: {plot}
83 |
84 | Read More ..."""
85 |
86 | # Task Tools
87 | FORCE_SUB_IDS = ""
88 | MEDIA_STORE = True
89 | DELETE_LINKS = False
90 | CLEAN_LOG_MSG = False
91 |
92 | # Limiters
93 | BOT_MAX_TASKS = 0
94 | USER_MAX_TASKS = 0
95 | USER_TIME_INTERVAL = 0
96 | VERIFY_TIMEOUT = 0
97 | LOGIN_PASS = ""
98 |
99 | # Bot Settings
100 | BOT_PM = False
101 | SET_COMMANDS = True
102 | TIMEZONE = "Asia/Kolkata"
103 |
104 | # GDrive Tools
105 | GDRIVE_ID = ""
106 | GD_DESP = "Uploaded with WZ Bot"
107 | IS_TEAM_DRIVE = False
108 | STOP_DUPLICATE = False
109 | INDEX_URL = ""
110 |
111 | # YT Tools
112 | YT_DESP = "Uploaded to YouTube by WZML-X bot"
113 | YT_TAGS = ["telegram", "bot", "youtube"] # or as a comma-separated string
114 | YT_CATEGORY_ID = 22
115 | YT_PRIVACY_STATUS = "unlisted"
116 |
117 | # Rclone
118 | RCLONE_PATH = ""
119 | RCLONE_FLAGS = ""
120 | RCLONE_SERVE_URL = ""
121 | SHOW_CLOUD_LINK = True
122 | RCLONE_SERVE_PORT = 0
123 | RCLONE_SERVE_USER = ""
124 | RCLONE_SERVE_PASS = ""
125 |
126 | # JDownloader
127 | JD_EMAIL = ""
128 | JD_PASS = ""
129 |
130 | # Sabnzbd
131 | USENET_SERVERS = [
132 | {
133 | "name": "main",
134 | "host": "",
135 | "port": 563,
136 | "timeout": 60,
137 | "username": "",
138 | "password": "",
139 | "connections": 8,
140 | "ssl": 1,
141 | "ssl_verify": 2,
142 | "ssl_ciphers": "",
143 | "enable": 1,
144 | "required": 0,
145 | "optional": 0,
146 | "retention": 0,
147 | "send_group": 0,
148 | "priority": 0,
149 | }
150 | ]
151 |
152 | # Update
153 | UPSTREAM_REPO = ""
154 | UPSTREAM_BRANCH = "master"
155 | UPDATE_PKGS = True
156 |
157 | # Leech
158 | LEECH_SPLIT_SIZE = 0
159 | AS_DOCUMENT = False
160 | EQUAL_SPLITS = False
161 | MEDIA_GROUP = False
162 | USER_TRANSMISSION = True
163 | HYBRID_LEECH = True
164 | LEECH_PREFIX = ""
165 | LEECH_SUFFIX = ""
166 | LEECH_FONT = ""
167 | LEECH_CAPTION = ""
168 | THUMBNAIL_LAYOUT = ""
169 |
170 | # Log Channels
171 | LEECH_DUMP_CHAT = ""
172 | LINKS_LOG_ID = ""
173 | MIRROR_LOG_ID = ""
174 |
175 | # qBittorrent/Aria2c
176 | TORRENT_TIMEOUT = 0
177 | BASE_URL = ""
178 | BASE_URL_PORT = 0
179 | WEB_PINCODE = True
180 |
181 | # Queueing system
182 | QUEUE_ALL = 0
183 | QUEUE_DOWNLOAD = 0
184 | QUEUE_UPLOAD = 0
185 |
186 | # RSS
187 | RSS_DELAY = 600
188 | RSS_CHAT = ""
189 | RSS_SIZE_LIMIT = 0
190 |
191 | # Torrent Search
192 | SEARCH_API_LINK = ""
193 | SEARCH_LIMIT = 0
194 | SEARCH_PLUGINS = [
195 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py",
196 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py",
197 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py",
198 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py",
199 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py",
200 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentproject.py",
201 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py",
202 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py",
203 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py",
204 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py",
205 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py",
206 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py",
207 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py",
208 | "https://raw.githubusercontent.com/v1k45/1337x-qBittorrent-search-plugin/master/leetx.py",
209 | "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py",
210 | "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py",
211 | "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py",
212 | ]
213 |
--------------------------------------------------------------------------------