├── .github
└── workflows
│ └── deploy.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── add_to_team_drive.py
├── aria.sh
├── bot
├── __init__.py
├── __main__.py
├── conv_pyrogram.py
├── helper
│ ├── __init__.py
│ ├── ext_utils
│ │ ├── __init__.py
│ │ ├── batch_helper.py
│ │ ├── bot_utils.py
│ │ ├── db_handler.py
│ │ ├── exceptions.py
│ │ ├── help_messages.py
│ │ ├── human_format.py
│ │ ├── media_utils.py
│ │ ├── menu_utils.py
│ │ ├── misc_utils.py
│ │ ├── rclone_data_holder.py
│ │ ├── rclone_utils.py
│ │ └── telegraph_helper.py
│ ├── mirror_leech_utils
│ │ ├── __init__.py
│ │ ├── debrid_utils
│ │ │ └── debrid_helper.py
│ │ ├── download_utils
│ │ │ ├── __init__.py
│ │ │ ├── aria2_download.py
│ │ │ ├── direct_link_generator.py
│ │ │ ├── direct_link_generator_license.md
│ │ │ ├── gd_downloader.py
│ │ │ ├── mega_download.py
│ │ │ ├── qbit_downloader.py
│ │ │ ├── rclone_copy.py
│ │ │ ├── rclone_leech.py
│ │ │ ├── telegram_downloader.py
│ │ │ └── yt_dlp_helper.py
│ │ ├── gd_utils
│ │ │ ├── clone.py
│ │ │ ├── count.py
│ │ │ ├── download.py
│ │ │ └── helper.py
│ │ ├── status_utils
│ │ │ ├── __init__.py
│ │ │ ├── aria_status.py
│ │ │ ├── clone_status.py
│ │ │ ├── extract_status.py
│ │ │ ├── gdrive_status.py
│ │ │ ├── mega_status.py
│ │ │ ├── qbit_status.py
│ │ │ ├── rclone_status.py
│ │ │ ├── split_status.py
│ │ │ ├── status_utils.py
│ │ │ ├── sync_status.py
│ │ │ ├── tg_download_status.py
│ │ │ ├── tg_upload_status.py
│ │ │ ├── yt_dlp_status.py
│ │ │ └── zip_status.py
│ │ └── upload_utils
│ │ │ ├── __init__.py
│ │ │ ├── rclone_mirror.py
│ │ │ └── telegram_uploader.py
│ └── telegram_helper
│ │ ├── bot_commands.py
│ │ ├── button_build.py
│ │ ├── filters.py
│ │ └── message_utils.py
└── modules
│ ├── __init__.py
│ ├── batch.py
│ ├── bisync.py
│ ├── botfiles.py
│ ├── cancel.py
│ ├── cleanup.py
│ ├── clone.py
│ ├── copy.py
│ ├── debrid.py
│ ├── exec.py
│ ├── gd_count.py
│ ├── leech.py
│ ├── mirror_leech.py
│ ├── mirror_select.py
│ ├── myfilesset.py
│ ├── owner_settings.py
│ ├── queue.py
│ ├── rcfm.py
│ ├── rss.py
│ ├── serve.py
│ ├── shell.py
│ ├── stats.py
│ ├── status.py
│ ├── storage.py
│ ├── sync.py
│ ├── tasks_listener.py
│ ├── tmdb.py
│ ├── torr_search.py
│ ├── torr_select.py
│ ├── user_settings.py
│ └── ytdlp.py
├── docker-compose.yml
├── gen_sa_accounts.py
├── generate_drive_token.py
├── qBittorrent
└── config
│ └── qBittorrent.conf
├── qbitweb
├── __init__.py
├── nodes.py
└── wserver.py
├── requirements-cli.txt
├── requirements.txt
├── sample_config.env
├── screenshot.png
├── session_generator.py
├── start.sh
└── update.py
/.github/workflows/deploy.yml:
--------------------------------------------------------------------------------
1 | name: Manually Deploy to Heroku
2 |
3 | on: workflow_dispatch
4 |
5 | jobs:
6 | deploy:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v2
10 | - uses: akhileshns/heroku-deploy@v3.12.12
11 | with:
12 | heroku_api_key: ${{secrets.HEROKU_API_KEY}}
13 | heroku_app_name: ${{secrets.HEROKU_APP_NAME}}
14 | heroku_email: ${{secrets.HEROKU_EMAIL}}
15 | usedocker: true
16 | docker_heroku_process_type: web
17 | stack: "container"
18 | region: "us"
19 | env:
20 | HD_CONFIG_FILE_URL: ${{secrets.CONFIG_FILE_URL}}
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | venv
2 | downloads
3 | users
4 | botlog.txt
5 | cookies.txt
6 | rclone.conf
7 | config.env
8 | *.json
9 | *.pickle
10 | *.pyc
11 | .netrc
12 | .vscode
13 | pyrogram.session
14 | pyrogram_session.session
15 | Thumbnails/*
16 |
17 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM sammax23/rcmltb
2 |
3 | WORKDIR /usr/src/app
4 | RUN chmod 777 /usr/src/app
5 |
6 | COPY requirements.txt .
7 | RUN pip3 install --no-cache-dir -r requirements.txt
8 |
9 | COPY . .
10 |
11 | CMD ["bash","start.sh"]
12 |
--------------------------------------------------------------------------------
/add_to_team_drive.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | from google.oauth2.service_account import Credentials
3 | import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time
4 | from google_auth_oauthlib.flow import InstalledAppFlow
5 | from google.auth.transport.requests import Request
6 | import os, pickle
7 |
8 | stt = time.time()
9 |
10 | parse = argparse.ArgumentParser(
11 | description='A tool to add service accounts to a shared drive from a folder containing credential files.')
12 | parse.add_argument('--path', '-p', default='accounts',
13 | help='Specify an alternative path to the service accounts folder.')
14 | parse.add_argument('--credentials', '-c', default='./credentials.json',
15 | help='Specify the relative path for the credentials file.')
16 | parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.')
17 | parsereq = parse.add_argument_group('required arguments')
18 | parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True)
19 |
20 | args = parse.parse_args()
21 | acc_dir = args.path
22 | did = args.drive_id
23 | credentials = glob.glob(args.credentials)
24 |
25 | try:
26 | open(credentials[0], 'r')
27 | print('>> Found credentials.')
28 | except IndexError:
29 | print('>> No credentials found.')
30 | sys.exit(0)
31 |
32 | if not args.yes:
33 | # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
34 | # credentials[0],'r').read()))['installed']['client_id'])
35 | input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
36 | '(shared drive) as Manager\n>> (Press any key to continue)')
37 |
38 | creds = None
39 | if os.path.exists('token_sa.pickle'):
40 | with open('token_sa.pickle', 'rb') as token:
41 | creds = pickle.load(token)
42 | # If there are no (valid) credentials available, let the user log in.
43 | if not creds or not creds.valid:
44 | if creds and creds.expired and creds.refresh_token:
45 | creds.refresh(Request())
46 | else:
47 | flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
48 | 'https://www.googleapis.com/auth/admin.directory.group',
49 | 'https://www.googleapis.com/auth/admin.directory.group.member'
50 | ])
51 | # creds = flow.run_local_server(port=0)
52 | creds = flow.run_console()
53 | # Save the credentials for the next run
54 | with open('token_sa.pickle', 'wb') as token:
55 | pickle.dump(creds, token)
56 |
57 | drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
58 | batch = drive.new_batch_http_request()
59 |
60 | aa = glob.glob('%s/*.json' % acc_dir)
61 | pbar = progress.bar.Bar("Readying accounts", max=len(aa))
62 | for i in aa:
63 | ce = json.loads(open(i, 'r').read())['client_email']
64 | batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
65 | "role": "organizer",
66 | "type": "user",
67 | "emailAddress": ce
68 | }))
69 | pbar.next()
70 | pbar.finish()
71 | print('Adding...')
72 | batch.execute()
73 |
74 | print('Complete.')
75 | hours, rem = divmod((time.time() - stt), 3600)
76 | minutes, sec = divmod(rem, 60)
77 | print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
78 |
--------------------------------------------------------------------------------
/aria.sh:
--------------------------------------------------------------------------------
1 | tracker_list=$(curl -Ns https://ngosang.github.io/trackerslist/trackers_all_http.txt | awk '$0' | tr '\n\n' ',')
2 | aria2c --allow-overwrite=true --auto-file-renaming=true --bt-enable-lpd=true --bt-detach-seed-only=true \
3 | --bt-remove-unselected-file=true --bt-tracker="[$tracker_list]" --check-certificate=false \
4 | --check-integrity=true --continue=true --content-disposition-default-utf8=true --daemon=true \
5 | --disk-cache=40M --enable-rpc=true --follow-torrent=mem --force-save=true --http-accept-gzip=true \
6 | --max-connection-per-server=10 --max-concurrent-downloads=1000 --max-file-not-found=0 --max-tries=20 \
7 | --min-split-size=10M --optimize-concurrent-downloads=true --peer-id-prefix=-qB4390- --reuse-uri=true \
8 | --peer-agent=qBittorrent/4.5.2 --quiet=true --rpc-max-request-size=1024M --split=10 \
9 | --summary-interval=0 --user-agent=Wget/1.12
--------------------------------------------------------------------------------
/bot/__main__.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from signal import signal, SIGINT
3 | from aiofiles import open as aiopen
4 | from time import time
5 | from bot import (
6 | LOGGER,
7 | Interval,
8 | QbInterval,
9 | bot,
10 | bot_loop,
11 | scheduler,
12 | )
13 | from os import path as ospath, remove as osremove, execl as osexecl
14 | from bot.helper.ext_utils.help_messages import (
15 | create_batch_help_buttons,
16 | create_leech_help_buttons,
17 | create_mirror_help_buttons,
18 | create_ytdl_help_buttons,
19 | )
20 | from pyrogram.filters import command
21 | from pyrogram.handlers import MessageHandler
22 | from sys import executable
23 | from bot.helper.telegram_helper.button_build import ButtonMaker
24 | from bot.helper.mirror_leech_utils.download_utils.aria2_download import (
25 | start_aria2_listener,
26 | )
27 | from .helper.telegram_helper.bot_commands import BotCommands
28 | from .helper.ext_utils.bot_utils import cmd_exec, run_sync_to_async
29 | from json import loads
30 | from .helper.telegram_helper.filters import CustomFilters
31 | from .helper.telegram_helper.message_utils import editMessage, sendMarkup, sendMessage
32 | from .helper.ext_utils.misc_utils import clean_all, exit_clean_up, start_cleanup
33 | from .modules import (
34 | batch,
35 | cancel,
36 | botfiles,
37 | copy,
38 | debrid,
39 | leech,
40 | mirror_leech,
41 | mirror_select,
42 | myfilesset,
43 | owner_settings,
44 | rcfm,
45 | stats,
46 | status,
47 | clone,
48 | storage,
49 | cleanup,
50 | torr_search,
51 | torr_select,
52 | user_settings,
53 | ytdlp,
54 | shell,
55 | exec,
56 | rss,
57 | serve,
58 | sync,
59 | gd_count,
60 | queue,
61 | tmdb,
62 | )
63 |
64 |
65 | async def start(_, message):
66 | buttons = ButtonMaker()
67 | buttons.url_buildbutton("Repo", "https://github.com/Sam-Max/rcmltb")
68 | buttons.url_buildbutton("Owner", "https://github.com/Sam-Max")
69 | reply_markup = buttons.build_menu(2)
70 | if CustomFilters.user_filter or CustomFilters.chat_filter:
71 | msg = """
72 | **Hello, ¡Welcome to Rclone-Telegram-Bot!\n
73 | I can help you copy files from one cloud to another.
74 | I can also can mirror-leech files and links to Telegram or cloud**\n\n
75 | """
76 | await sendMarkup(msg, message, reply_markup)
77 | else:
78 | await sendMarkup(
79 | "Not Authorized user, deploy your own version", message, reply_markup
80 | )
81 |
82 |
83 | async def restart(_, message):
84 | restart_msg = await sendMessage("Restarting...", message)
85 | if scheduler.running:
86 | scheduler.shutdown(wait=False)
87 | if Interval:
88 | for intvl in list(Interval.values()):
89 | intvl.cancel()
90 | if QbInterval:
91 | QbInterval[0].cancel()
92 | await clean_all()
93 | await (
94 | await create_subprocess_exec(
95 | "pkill", "-9", "-f", "gunicorn|aria2c|rclone|qbittorrent-nox|ffmpeg"
96 | )
97 | ).wait()
98 | await (await create_subprocess_exec("python3", "update.py")).wait()
99 | async with aiopen(".restartmsg", "w") as f:
100 | await f.write(f"{restart_msg.chat.id}\n{restart_msg.id}\n")
101 | osexecl(executable, executable, "-m", "bot")
102 |
103 |
104 | async def ping(_, message):
105 | start_time = int(round(time() * 1000))
106 | reply = await sendMessage("Starting Ping", message)
107 | end_time = int(round(time() * 1000))
108 | await editMessage(f"{end_time - start_time} ms", reply)
109 |
110 |
111 | async def get_ip(_, message):
112 | stdout, stderr, code = await cmd_exec("curl https://api.ipify.org/", shell=True)
113 | if code == 0:
114 | await message.reply_text(f"Your IP is {stdout.strip()}")
115 | else:
116 | await message.reply_text(f"Error: {stderr}")
117 |
118 |
119 | async def get_log(client, message):
120 | await client.send_document(chat_id=message.chat.id, document="botlog.txt")
121 |
122 |
123 | async def main():
124 | await start_cleanup()
125 |
126 | await create_mirror_help_buttons()
127 | await create_ytdl_help_buttons()
128 | await create_leech_help_buttons()
129 | await create_batch_help_buttons()
130 |
131 | await torr_search.initiate_search_tools()
132 | await debrid.load_debrid_token()
133 | await run_sync_to_async(start_aria2_listener, wait=False)
134 |
135 | if ospath.isfile(".restartmsg"):
136 | with open(".restartmsg") as f:
137 | chat_id, msg_id = map(int, f)
138 | try:
139 | await bot.edit_message_text(chat_id, msg_id, "Restarted successfully!")
140 | except:
141 | pass
142 | osremove(".restartmsg")
143 |
144 | bot.add_handler(MessageHandler(start, filters=command(BotCommands.StartCommand)))
145 | bot.add_handler(
146 | MessageHandler(
147 | restart,
148 | filters=command(BotCommands.RestartCommand)
149 | & (CustomFilters.owner_filter | CustomFilters.sudo_filter),
150 | )
151 | )
152 | bot.add_handler(
153 | MessageHandler(
154 | get_log,
155 | filters=command(BotCommands.LogsCommand)
156 | & (CustomFilters.owner_filter | CustomFilters.sudo_filter),
157 | )
158 | )
159 | bot.add_handler(
160 | MessageHandler(
161 | ping,
162 | filters=command(BotCommands.PingCommand)
163 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
164 | )
165 | )
166 | bot.add_handler(MessageHandler(get_ip, filters=command(BotCommands.IpCommand)))
167 | LOGGER.info("Bot Started!")
168 | signal(SIGINT, exit_clean_up)
169 |
170 |
171 | bot_loop.run_until_complete(main())
172 | bot_loop.run_forever()
173 |
--------------------------------------------------------------------------------
/bot/conv_pyrogram.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # https://github.com/Ripeey/Conversation-Pyrogram
3 | from collections import OrderedDict
4 | from typing import Union
5 | import pyrogram, asyncio
6 |
7 | class Conversation():
8 | """
9 | A conversation plugin class for pyrogram using inbuild Update Handlers.
10 | Complete list of handlers to be used without `Handlers` postfix :-
11 | https://docs.pyrogram.org/api/handlers#index
12 |
13 |
14 | Usage:
15 | In main.py where `Client` is initialized:
16 |
17 | app = Client('MyBot')
18 | Conversation(app) # That's it!
19 |
20 | Then just use inside any handler `client.listen`:
21 |
22 | @app.on_message()
23 | def button_click(client, update):
24 | answer = client.listen.CallbackQuery(filters.user(update.from_user.id))
25 |
26 | Method client.listen.Message(or any other types)
27 | Parameters:
28 | filters:
29 | Single or combined filters like https://docs.pyrogram.org/topics/use-filters.
30 | Default is `None` but either filter or id is required.
31 |
32 | id:
33 | An id for uniquely identify each listen only required if you want to Cancel() manually.
34 | You can pass any of the three types here:
35 | -> pyrogram.filters.user
36 | -> pyrogram.filters.chat
37 | -> str
38 | if pyrogram filter's `user` or `chat` is passed as `id` then it gets combined with rest `filters`.
39 |
40 | Default is `None` but either filter or id is required.
41 |
42 | timeout:
43 | In seconds (int) for waiting time of getting a response.
44 |
45 | Returns:
46 | `update` (like pyrogram.types.Message ...etc) if user reponded within given conditions.
47 | `None` if listen cancelled using `listen.Cancel`
48 | `Exception` An asyncio.TimeoutError is raise if waiting timeout occurs.
49 | Example:
50 | @app.on_message(filters.command('start'))
51 | async def start(client, message):
52 | await client.send_mesage(messsage.chat.id, "What's your name?")
53 | reply_msg = await client.listen.Message(filters.chat(messsage.chat.id), timeout = None)
54 | if reply_msg:
55 | reply_msg.reply(f'hello {reply_msg.text}')
56 |
57 |
58 | Method client.listen.Cancel
59 | Parameters:
60 | id:
61 | An id for uniquely identify the listen you want to Cancel() manually.
62 | You can pass any of the three types here:
63 | -> pyrogram.filters.user
64 | -> pyrogram.filters.chat
65 | -> str
66 | Returns:
67 | `Boolean` True if `id` was present and listen was Cancelped or False if `id` was invalid.
68 |
69 | Example:
70 | @app.on_message(filters.command('stop'))
71 | async def stop(client, message):
72 | await client.listen.Cancel(message.from_user.id)
73 | """
74 | def __init__(self, client : pyrogram.Client):
75 | client.listen = self
76 | self.client = client
77 | self.handlers = {}
78 | self.hdlr_lock = asyncio.Lock()
79 |
80 | async def __add(self, hdlr, filters = None, id = None, timeout = None):
81 | _id = id
82 |
83 | if type(_id) in [pyrogram.filters.InvertFilter, pyrogram.filters.OrFilter, pyrogram.filters.AndFilter]:
84 | raise ValueError('Combined filters are not allowed as unique id .')
85 |
86 | if _id and type(_id) not in [pyrogram.filters.user, pyrogram.filters.chat, str]:
87 | raise TypeError('Unique (id) has to be one of pyrogram\'s filters user/chat or a string.')
88 |
89 | if not (_id or filters):
90 | raise ValueError('Atleast either filters or _id as parameter is required.')
91 |
92 | if str(_id) in self.handlers:
93 | await self.__remove(str(_id))
94 | #raise ValueError('Dupicate id provided.')
95 |
96 | # callback handler
97 | async def dump(_, update):
98 | await self.__remove(dump._id, update)
99 |
100 | dump._id = str(_id) if _id else hash(dump)
101 | group = -0x3e7
102 | event = asyncio.Event()
103 | filters = (_id & filters) if _id and filters and not isinstance(_id, str) else filters or (filters if isinstance(_id, str) else _id)
104 | handler = hdlr(dump, filters)
105 |
106 |
107 | if group not in self.client.dispatcher.groups:
108 | self.client.dispatcher.groups[group] = []
109 | self.client.dispatcher.groups = OrderedDict(sorted(self.client.dispatcher.groups.items()))
110 |
111 | async with self.hdlr_lock:
112 | self.client.dispatcher.groups[group].append(handler)
113 | self.handlers[dump._id] = (handler, group, event)
114 |
115 | try:
116 | await asyncio.wait_for(event.wait(), timeout)
117 | except asyncio.exceptions.TimeoutError:
118 | await self.__remove(dump._id)
119 | raise asyncio.exceptions.TimeoutError
120 | finally:
121 | result = self.handlers.pop(dump._id, None)
122 | self.hdlr_lock.release()
123 | return result
124 |
125 | async def __remove(self, _id, update = None):
126 | handler, group, event = self.handlers[_id]
127 | self.client.dispatcher.groups[group].remove(handler)
128 | await self.hdlr_lock.acquire()
129 | self.handlers[_id] = update
130 | event.set()
131 |
132 | async def Cancel(self, _id):
133 | if str(_id) in self.handlers:
134 | await self.__remove(str(_id))
135 | return True
136 | else:
137 | return False
138 |
139 | def __getattr__(self, name):
140 | async def wrapper(*args, **kwargs):
141 | return await self.__add(getattr(pyrogram.handlers, f'{name}Handler'), *args, **kwargs)
142 | return wrapper
143 |
144 |
145 | from pyrogram import Client, filters
146 | from pyrogram.types import Message
147 | from asyncio.exceptions import TimeoutError
148 |
149 | async def listen_message(client:Client, chat_id:int, timeout=None) -> Union[Message, None]:
150 | try:
151 | return await client.listen.Message(filters.chat(chat_id), timeout=timeout)
152 | except TimeoutError:
153 | return None
--------------------------------------------------------------------------------
/bot/helper/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/helper/__init__.py
--------------------------------------------------------------------------------
/bot/helper/ext_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/helper/ext_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/ext_utils/batch_helper.py:
--------------------------------------------------------------------------------
1 | # Source: Tg:MaheshChauhan/DroneBots Github.com/Vasusen-code
2 |
3 | from re import findall
4 |
5 |
6 | def get_link(string):
7 | regex = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))"
8 | url = findall(regex, string)
9 | try:
10 | link = [x[0] for x in url][0]
11 | if link:
12 | return link
13 | else:
14 | return False
15 | except Exception:
16 | return False
17 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/db_handler.py:
--------------------------------------------------------------------------------
1 | from os import path as ospath, makedirs
2 | from motor.motor_asyncio import AsyncIOMotorClient
3 | from pymongo.errors import PyMongoError
4 | from dotenv import dotenv_values
5 | from bot import (
6 | DATABASE_URL,
7 | user_data,
8 | rss_dict,
9 | LOGGER,
10 | bot_id,
11 | config_dict,
12 | aria2_options,
13 | qbit_options,
14 | bot_loop,
15 | )
16 |
17 |
18 | class DbManager:
19 | def __init__(self):
20 | self.__err = False
21 | self.__db = None
22 | self.__conn = None
23 | self.__connect()
24 |
25 | def __connect(self):
26 | try:
27 | self.__conn = AsyncIOMotorClient(DATABASE_URL)
28 | self.__db = self.__conn.rcmltb
29 | except PyMongoError as e:
30 | LOGGER.error(f"Error in DB connection: {e}")
31 | self.__err = True
32 |
33 | async def db_load(self):
34 | if self.__err:
35 | return
36 | # Save bot settings
37 | await self.__db.settings.config.update_one(
38 | {"_id": bot_id}, {"$set": config_dict}, upsert=True
39 | )
40 | # Save Aria2c options
41 | if await self.__db.settings.aria2c.find_one({"_id": bot_id}) is None:
42 | await self.__db.settings.aria2c.update_one(
43 | {"_id": bot_id}, {"$set": aria2_options}, upsert=True
44 | )
45 | # Save qbittorrent options
46 | if await self.__db.settings.qbittorrent.find_one({"_id": bot_id}) is None:
47 | await self.__db.settings.qbittorrent.update_one(
48 | {"_id": bot_id}, {"$set": qbit_options}, upsert=True
49 | )
50 | # User Data
51 | if await self.__db.users.find_one():
52 | rows = self.__db.users.find({})
53 | # user - return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_opt, equal_splits, split_size, rclone}
54 | # owner - return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_opt, equal_splits, split_size, rclone, rclone_global}
55 | async for row in rows:
56 | uid = row["_id"]
57 | del row["_id"]
58 | thumb_path = f"Thumbnails/{uid}.jpg"
59 | rclone_user = f"rclone/{uid}/rclone.conf"
60 | rclone_global = f"rclone/rclone_global/rclone.conf"
61 | if row.get("thumb"):
62 | if not ospath.exists("Thumbnails"):
63 | makedirs("Thumbnails")
64 | with open(thumb_path, "wb+") as f:
65 | f.write(row["thumb"])
66 | row["thumb"] = thumb_path
67 | if row.get("rclone"):
68 | if not ospath.exists(f"rclone/{uid}"):
69 | makedirs(f"rclone/{uid}")
70 | with open(rclone_user, "wb+") as f:
71 | f.write(row["rclone"])
72 | if row.get("rclone_global"):
73 | if not ospath.exists("rclone/rclone_global"):
74 | makedirs("rclone/rclone_global")
75 | with open(rclone_global, "wb+") as f:
76 | f.write(row["rclone_global"])
77 | LOGGER.info("Users data has been imported from Database")
78 | # Rss Data
79 | if await self.__db.rss[bot_id].find_one():
80 | rows = self.__db.rss[bot_id].find(
81 | {}
82 | ) # return a dict ==> {_id, link, last_feed, last_name, filters}
83 | async for row in rows:
84 | title = row["_id"]
85 | del row["_id"]
86 | rss_dict[title] = row
87 | LOGGER.info("Rss data has been imported from Database.")
88 | self.__conn.close()
89 |
90 | async def update_deploy_config(self):
91 | if self.__err:
92 | return
93 | current_config = dict(dotenv_values("config.env"))
94 | await self.__db.settings.deployConfig.replace_one(
95 | {"_id": bot_id}, current_config, upsert=True
96 | )
97 | self.__conn.close
98 |
99 | async def update_config(self, dict_):
100 | if self.__err:
101 | return
102 | await self.__db.settings.config.update_one(
103 | {"_id": bot_id}, {"$set": dict_}, upsert=True
104 | )
105 | self.__conn.close
106 |
107 | async def update_aria2(self, key, value):
108 | if self.__err:
109 | return
110 | await self.__db.settings.aria2c.update_one(
111 | {"_id": bot_id}, {"$set": {key: value}}, upsert=True
112 | )
113 | self.__conn.close
114 |
115 | async def update_qbittorrent(self, key, value):
116 | if self.__err:
117 | return
118 | await self.__db.settings.qbittorrent.update_one(
119 | {"_id": bot_id}, {"$set": {key: value}}, upsert=True
120 | )
121 | self.__conn.close
122 |
123 | async def update_private_file(self, path):
124 | if self.__err:
125 | return
126 | if ospath.exists(path):
127 | with open(path, "rb+") as pf:
128 | pf_bin = pf.read()
129 | else:
130 | pf_bin = ""
131 | path = path.replace(".", "__")
132 | await self.__db.settings.files.update_one(
133 | {"_id": bot_id}, {"$set": {path: pf_bin}}, upsert=True
134 | )
135 | if path == "config.env":
136 | await self.update_deploy_config()
137 | self.__conn.close
138 |
139 | async def update_user_doc(self, user_id, key, path=""):
140 | if self.__err:
141 | return
142 | if path:
143 | with open(path, "rb+") as doc:
144 | doc_bin = doc.read()
145 | else:
146 | doc_bin = ""
147 | await self.__db.users.update_one(
148 | {"_id": user_id}, {"$set": {key: doc_bin}}, upsert=True
149 | )
150 | self.__conn.close
151 |
152 | async def update_user_data(self, user_id):
153 | if self.__err:
154 | return
155 | data = user_data[user_id]
156 | if data.get("thumb"):
157 | del data["thumb"]
158 | await self.__db.users.update_one({"_id": user_id}, {"$set": data}, upsert=True)
159 | self.__conn.close
160 |
161 | async def update_thumb(self, user_id, path=None):
162 | if self.__err:
163 | return
164 | if path is not None:
165 | with open(path, "rb+") as image:
166 | image_bin = image.read()
167 | else:
168 | image_bin = ""
169 | await self.__db.users.update_one(
170 | {"_id": user_id}, {"$set": {"thumb": image_bin}}, upsert=True
171 | )
172 | self.__conn.close
173 |
174 | async def rss_update(self, user_id):
175 | if self.__err:
176 | return
177 | await self.__db.rss[bot_id].replace_one(
178 | {"_id": user_id}, rss_dict[user_id], upsert=True
179 | )
180 | self.__conn.close
181 |
182 | async def rss_update_all(self):
183 | if self.__err:
184 | return
185 | for user_id in list(rss_dict.keys()):
186 | await self.__db.rss[bot_id].replace_one(
187 | {"_id": user_id}, rss_dict[user_id], upsert=True
188 | )
189 | self.__conn.close
190 |
191 | async def rss_delete(self, user_id):
192 | if self.__err:
193 | return
194 | await self.__db.rss[bot_id].delete_one({"_id": user_id})
195 | self.__conn.close
196 |
197 | async def trunc_table(self, name):
198 | if self.__err:
199 | return
200 | await self.__db[name][bot_id].drop()
201 | self.__conn.close
202 |
203 |
204 | if DATABASE_URL:
205 | bot_loop.run_until_complete(DbManager().db_load())
206 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/exceptions.py:
--------------------------------------------------------------------------------
1 | class DirectDownloadLinkException(Exception):
2 | """Not method found for extracting direct download link from the http link"""
3 |
4 | pass
5 |
6 |
7 | class NotSupportedExtractionArchive(Exception):
8 | """The archive format use is trying to extract is not supported"""
9 |
10 | pass
11 |
12 |
13 | class NotRclonePathFound(Exception):
14 | """Rclone path not found"""
15 |
16 | pass
17 |
18 |
19 | class RssShutdownException(Exception):
20 | """This exception should be raised when shutdown is called to stop the montior"""
21 |
22 | pass
23 |
24 |
25 | class ProviderException(Exception):
26 | def __init__(self, message):
27 | self.message = message
28 | super().__init__(self.message)
29 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/human_format.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 |
3 | SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB"]
4 |
5 |
6 | def get_readable_file_size(size_in_bytes) -> str:
7 | if size_in_bytes is None:
8 | return "0B"
9 | index = 0
10 | while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1:
11 | size_in_bytes /= 1024
12 | index += 1
13 | return (
14 | f"{size_in_bytes:.2f}{SIZE_UNITS[index]}" if index > 0 else f"{size_in_bytes}B"
15 | )
16 |
17 |
18 | def human_readable_bytes(value, digits=2, delim="", postfix=""):
19 | """Return a human-readable file size."""
20 | if value is None:
21 | return None
22 | chosen_unit = "B"
23 | for unit in ("KiB", "MiB", "GiB", "TiB"):
24 | if value > 1000:
25 | value /= 1024
26 | chosen_unit = unit
27 | else:
28 | break
29 | return f"{value:.{digits}f}" + delim + chosen_unit + postfix
30 |
31 |
32 | def human_readable_timedelta(seconds, precision=0):
33 | """Return a human-readable time delta as a string."""
34 | pieces = []
35 | value = timedelta(seconds=seconds)
36 |
37 | if value.days:
38 | pieces.append(f"{value.days}d")
39 |
40 | seconds = value.seconds
41 |
42 | if seconds >= 3600:
43 | hours = int(seconds / 3600)
44 | pieces.append(f"{hours}h")
45 | seconds -= hours * 3600
46 |
47 | if seconds >= 60:
48 | minutes = int(seconds / 60)
49 | pieces.append(f"{minutes}m")
50 | seconds -= minutes * 60
51 |
52 | if seconds > 0 or not pieces:
53 | pieces.append(f"{seconds}s")
54 |
55 | if not precision:
56 | return "".join(pieces)
57 |
58 | return "".join(pieces[:precision])
59 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/media_utils.py:
--------------------------------------------------------------------------------
1 | from os import path as ospath
2 | from aiofiles.os import path as aiopath, makedirs
3 | from time import time
4 | from bot import LOGGER
5 | from bot.helper.ext_utils.bot_utils import cmd_exec
6 | from bot.helper.ext_utils.misc_utils import get_media_info
7 |
8 |
9 | async def take_ss(video_file, ss_nb) -> list:
10 | ss_nb = min(ss_nb, 10)
11 | duration = (await get_media_info(video_file))[0]
12 | if duration != 0:
13 | dirpath, name = video_file.rsplit("/", 1)
14 | name, _ = ospath.splitext(name)
15 | dirpath = f"{dirpath}/screenshots/"
16 | await makedirs(dirpath, exist_ok=True)
17 | interval = duration // (ss_nb + 1)
18 | cap_time = interval
19 | outputs = []
20 | cmd = ""
21 | for i in range(ss_nb):
22 | output = f"{dirpath}SS.{name}_{i:02}.png"
23 | outputs.append(output)
24 | cmd += f'ffmpeg -hide_banner -loglevel error -ss {cap_time} -i "{video_file}" -q:v 1 -frames:v 1 "{output}"'
25 | cap_time += interval
26 | if i + 1 != ss_nb:
27 | cmd += " && "
28 | _, err, code = await cmd_exec(cmd, True)
29 | if code != 0:
30 | LOGGER.error(
31 | f"Error while creating sreenshots from video. Path: {video_file} stderr: {err}"
32 | )
33 | return []
34 | return outputs
35 | else:
36 | LOGGER.error("take_ss: Can't get the duration of video")
37 | return []
38 |
39 |
40 | async def create_thumb(video_file, duration):
41 | des_dir = "Thumbnails"
42 | await makedirs(des_dir, exist_ok=True)
43 | des_dir = ospath.join(des_dir, f"{time()}.jpg")
44 | if duration is None:
45 | duration = (await get_media_info(video_file))[0]
46 | if duration == 0:
47 | duration = 3
48 | duration = duration // 2
49 | cmd = [
50 | "ffmpeg",
51 | "-hide_banner",
52 | "-loglevel",
53 | "error",
54 | "-ss",
55 | str(duration),
56 | "-i",
57 | video_file,
58 | "-vf",
59 | "thumbnail",
60 | "-frames:v",
61 | "1",
62 | des_dir,
63 | ]
64 | _, err, code = await cmd_exec(cmd)
65 | if code != 0 or not await aiopath.exists(des_dir):
66 | LOGGER.error(
67 | f"Error while extracting thumbnail from video. Name: {video_file} stderr: {err}"
68 | )
69 | return None
70 | return des_dir
71 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/menu_utils.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.human_format import get_readable_file_size
2 | from bot.helper.ext_utils.rclone_data_holder import update_rclone_data
3 |
4 |
5 | class Menus:
6 | LEECH = "leechmenu"
7 | COPY = "copymenu"
8 | MYFILES = "myfilesmenu"
9 | STORAGE = "storagemenu"
10 | CLEANUP = "cleanupmenu"
11 | SYNC = "syncmenu"
12 | REMOTE_SELECT = "remoteselectmenu"
13 | MIRROR_SELECT = "mirrorselectmenu"
14 |
15 |
16 | def rcloneListNextPage(info, offset=0, max_results=10):
17 | start = offset
18 | end = max_results + start
19 | total = len(info)
20 | next_offset = offset + max_results
21 |
22 | if end > total:
23 | next_page = info[start:]
24 | elif start >= total:
25 | next_page = []
26 | else:
27 | next_page = info[start:end]
28 |
29 | return next_page, next_offset
30 |
31 |
32 | def rcloneListButtonMaker(
33 | info, button, menu_type, dir_callback, file_callback, user_id
34 | ):
35 | for index, dir in enumerate(info):
36 | path = dir["Path"]
37 | update_rclone_data(str(index), path, user_id)
38 |
39 | if dir["MimeType"] == "inode/directory":
40 | button.cb_buildbutton(
41 | f"📁{path}", data=f"{menu_type}^{dir_callback}^{index}^{user_id}")
42 | else:
43 | size = get_readable_file_size(dir["Size"])
44 | button.cb_buildbutton(
45 | f"[{size}] {path}",
46 | data=f"{menu_type}^{file_callback}^{index}^True^{user_id}",
47 | )
48 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/rclone_data_holder.py:
--------------------------------------------------------------------------------
1 | rclone_dict = {}
2 |
3 |
4 | def get_rclone_data(key, user_id):
5 | value_dict = rclone_dict.get(user_id, {})
6 | return value_dict.get(key, "")
7 |
8 |
9 | def update_rclone_data(key, value, user_id):
10 | if user_id in rclone_dict:
11 | rclone_dict[user_id][key] = value
12 | else:
13 | rclone_dict[user_id] = {key: value}
14 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/telegraph_helper.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from string import ascii_letters
3 | from random import SystemRandom
4 | from telegraph.aio import Telegraph
5 | from telegraph.exceptions import RetryAfterError
6 | from bot import LOGGER, bot_loop
7 |
8 |
9 | class TelegraphHelper:
10 | def __init__(self, author_name=None, author_url=None):
11 | self.telegraph = Telegraph(domain="graph.org")
12 | self.short_name = "".join(SystemRandom().choices(ascii_letters, k=8))
13 | self.access_token = None
14 | self.author_name = author_name
15 | self.author_url = author_url
16 |
17 | async def create_account(self):
18 | await self.telegraph.create_account(
19 | short_name=self.short_name,
20 | author_name=self.author_name,
21 | author_url=self.author_url,
22 | )
23 | self.access_token = self.telegraph.get_access_token()
24 | LOGGER.info("Creating Telegraph Account")
25 |
26 | async def create_page(self, title, content):
27 | try:
28 | return await self.telegraph.create_page(
29 | title=title,
30 | author_name=self.author_name,
31 | author_url=self.author_url,
32 | html_content=content,
33 | )
34 | except RetryAfterError as st:
35 | LOGGER.warning(
36 | f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds."
37 | )
38 | await sleep(st.retry_after)
39 | return await self.create_page(title, content)
40 |
41 | async def edit_page(self, path, title, content):
42 | try:
43 | return await self.telegraph.edit_page(
44 | path=path,
45 | title=title,
46 | author_name=self.author_name,
47 | author_url=self.author_url,
48 | html_content=content,
49 | )
50 | except RetryAfterError as st:
51 | LOGGER.warning(
52 | f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds."
53 | )
54 | await sleep(st.retry_after)
55 | return await self.edit_page(path, title, content)
56 |
57 | async def edit_telegraph(self, path, telegraph_content):
58 | nxt_page = 1
59 | prev_page = 0
60 | num_of_path = len(path)
61 | for content in telegraph_content:
62 | if nxt_page == 1:
63 | content += (
64 | f'Next'
65 | )
66 | nxt_page += 1
67 | else:
68 | if prev_page <= num_of_path:
69 | content += f'Prev'
70 | prev_page += 1
71 | if nxt_page < num_of_path:
72 | content += f' | Next'
73 | nxt_page += 1
74 | await self.edit_page(
75 | path=path[prev_page],
76 | title="Rclone-Telegram-Bot Torrent Search",
77 | content=content,
78 | )
79 | return
80 |
81 |
82 | telegraph = TelegraphHelper("Rclone-Telegram-Bot", "https://github.com/Sam-Max/rcmltb/")
83 | bot_loop.run_until_complete(telegraph.create_account())
84 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/helper/mirror_leech_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/helper/mirror_leech_utils/download_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/aria2_download.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from os import path as ospath, remove as osremove
3 | from bot import (
4 | LOGGER,
5 | config_dict,
6 | status_dict_lock,
7 | status_dict,
8 | aria2,
9 | aria2c_global,
10 | aria2_options,
11 | )
12 | from bot.helper.ext_utils.bot_utils import clean_unwanted, new_thread, run_sync_to_async
13 | from bot.helper.telegram_helper.message_utils import (
14 | sendMessage,
15 | sendStatusMessage,
16 | update_all_messages,
17 | )
18 | from bot.helper.ext_utils.misc_utils import getTaskByGid
19 | from bot.helper.mirror_leech_utils.status_utils.aria_status import AriaStatus
20 |
21 |
22 | async def add_aria2c_download(link, path, listener, filename, auth):
23 | a2c_opt = {**aria2_options}
24 | [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options]
25 | a2c_opt["dir"] = path
26 | if filename:
27 | a2c_opt["out"] = filename
28 | if auth:
29 | a2c_opt["header"] = f"authorization: {auth}"
30 | if TORRENT_TIMEOUT := config_dict["TORRENT_TIMEOUT"]:
31 | a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}"
32 | try:
33 | download = (await run_sync_to_async(aria2.add, link, a2c_opt))[0]
34 | except Exception as e:
35 | LOGGER.info(f"Aria2c Download Error: {e}")
36 | await sendMessage(f"{e}", listener.message)
37 | return
38 | if ospath.exists(link):
39 | osremove(link)
40 | if download.error_message:
41 | error = str(download.error_message).replace("<", " ").replace(">", " ")
42 | LOGGER.info(f"Aria2c Download Error: {error}")
43 | await sendMessage(error, listener.message)
44 | return
45 |
46 | gid = download.gid
47 | name = download.name
48 | async with status_dict_lock:
49 | status_dict[listener.uid] = AriaStatus(gid, listener)
50 |
51 | if not config_dict["NO_TASKS_LOGS"]:
52 | LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}")
53 |
54 | await listener.onDownloadStart()
55 | await sendStatusMessage(listener.message)
56 |
57 |
58 | ###### ARIA LISTENER #######
59 |
60 |
61 | @new_thread
62 | async def __onDownloadStarted(api, gid):
63 | download = await run_sync_to_async(api.get_download, gid)
64 | if not config_dict["NO_TASKS_LOGS"]:
65 | LOGGER.info(f"onDownloadStarted: {download.name} - Gid: {gid}")
66 |
67 |
68 | @new_thread
69 | async def __onDownloadComplete(api, gid):
70 | try:
71 | download = await run_sync_to_async(api.get_download, gid)
72 | except:
73 | return
74 | if not config_dict["NO_TASKS_LOGS"]:
75 | LOGGER.info(f"onDownloadComplete: {download.name} - Gid: {gid}")
76 | if dl := await getTaskByGid(gid):
77 | listener = dl.listener()
78 | await listener.onDownloadComplete()
79 | await run_sync_to_async(api.remove, [download], force=True, files=True)
80 |
81 |
82 | @new_thread
83 | async def __onBtDownloadComplete(api, gid):
84 | seed_start_time = time()
85 | await sleep(1)
86 | download = await run_sync_to_async(api.get_download, gid)
87 | if not config_dict["NO_TASKS_LOGS"]:
88 | LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}")
89 | if dl := await getTaskByGid(gid):
90 | listener = dl.listener()
91 | if listener.select:
92 | res = download.files
93 | for file_o in res:
94 | f_path = file_o.path
95 | if not file_o.selected and ospath.exists(f_path):
96 | try:
97 | osremove(f_path)
98 | except:
99 | pass
100 | await clean_unwanted(download.dir)
101 | if listener.seed:
102 | try:
103 | await run_sync_to_async(
104 | api.set_options, {"max-upload-limit": "0"}, [download]
105 | )
106 | except Exception as e:
107 | LOGGER.error(
108 | f"{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent GID: {gid}"
109 | )
110 | else:
111 | try:
112 | await run_sync_to_async(api.client.force_pause, gid)
113 | except Exception as e:
114 | LOGGER.error(f"{e} GID: {gid}")
115 | await listener.onDownloadComplete()
116 | download = download.live
117 | if listener.seed:
118 | if download.is_complete:
119 | if dl := await getTaskByGid(gid):
120 | if not config_dict["NO_TASKS_LOGS"]:
121 | LOGGER.info(f"Cancelling Seed: {download.name}")
122 | await listener.onUploadError(
123 | f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}"
124 | )
125 | await run_sync_to_async(
126 | api.remove, [download], force=True, files=True
127 | )
128 | else:
129 | async with status_dict_lock:
130 | if listener.uid not in status_dict:
131 | await run_sync_to_async(
132 | api.remove, [download], force=True, files=True
133 | )
134 | return
135 | status_dict[listener.uid] = AriaStatus(gid, listener, True)
136 | status_dict[listener.uid].start_time = seed_start_time
137 | if not config_dict["NO_TASKS_LOGS"]:
138 | LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}")
139 | await update_all_messages()
140 | else:
141 | await run_sync_to_async(api.remove, [download], force=True, files=True)
142 |
143 |
144 | @new_thread
145 | async def __onDownloadStopped(api, gid):
146 | await sleep(6)
147 | if dl := await getTaskByGid(gid):
148 | listener = dl.listener()
149 | await listener.onDownloadError("Dead torrent!")
150 |
151 |
152 | @new_thread
153 | async def __onDownloadError(api, gid):
154 | LOGGER.info(f"onDownloadError: {gid}")
155 | error = "None"
156 | try:
157 | download = await run_sync_to_async(api.get_download, gid)
158 | error = download.error_message
159 | LOGGER.info(f"Download Error: {error}")
160 | except:
161 | pass
162 | if dl := await getTaskByGid(gid):
163 | listener = dl.listener()
164 | await listener.onDownloadError(error)
165 |
166 |
167 | def start_aria2_listener():
168 | aria2.listen_to_notifications(
169 | threaded=False,
170 | on_download_start=__onDownloadStarted,
171 | on_download_error=__onDownloadError,
172 | on_download_stop=__onDownloadStopped,
173 | on_download_complete=__onDownloadComplete,
174 | on_bt_download_complete=__onBtDownloadComplete,
175 | timeout=60,
176 | )
177 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md:
--------------------------------------------------------------------------------
1 | RAPHIELSCAPE PUBLIC LICENSE
2 | Version 1.c, June 2019
3 |
4 | Copyright (C) 2019 Raphielscape LLC.
5 | Copyright (C) 2019 Devscapes Open Source Holding GmbH.
6 |
7 | Everyone is permitted to copy and distribute verbatim or modified
8 | copies of this license document, and changing it is allowed as long
9 | as the name is changed.
10 |
11 | RAPHIELSCAPE PUBLIC LICENSE
12 | A-1. DEFINITIONS
13 |
14 | 0. “This License” refers to version 1.c of the Raphielscape Public License.
15 |
16 | 1. “Copyright” also means copyright-like laws that apply to other kinds of works.
17 |
18 | 2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”.
19 | “Licensees” and “recipients” may be individuals or organizations.
20 |
21 | 3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
22 | other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work
23 | or a work “based on” the earlier work.
24 |
25 | 4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it.
26 | “Object code” means any non-source form of a work.
27 |
28 | The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and
29 | (for an executable work) run the object code and to modify the work, including scripts to control those activities.
30 |
31 | The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
32 | The Corresponding Source for a work in source code form is that same work.
33 |
34 | 5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of
35 | the Corresponding Source.
36 |
37 | 6. "Owner" refers to any parties which is made the early form of the Corresponding Source.
38 |
39 | A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
40 |
41 | 0. You must give any other recipients of the Work or Derivative Works a copy of this License; and
42 |
43 | 1. You must cause any modified files to carry prominent notices stating that You changed the files; and
44 |
45 | 2. You must retain, in the Source form of any Derivative Works that You distribute,
46 | this license, all copyright, patent, trademark, authorships and attribution notices
47 | from the Source form of the Work; and
48 |
49 | 3. Respecting the author and owner of works that are distributed in any way.
50 |
51 | You may add Your own copyright statement to Your modifications and may provide
52 | additional or different license terms and conditions for use, reproduction,
53 | or distribution of Your modifications, or for any such Derivative Works as a whole,
54 | provided Your use, reproduction, and distribution of the Work otherwise complies
55 | with the conditions stated in this License.
56 |
57 | B. DISCLAIMER OF WARRANTY
58 |
59 | THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR
60 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
61 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS
62 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
63 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
64 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
65 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
66 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
67 |
68 | C. REVISED VERSION OF THIS LICENSE
69 |
70 | The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the
71 | Raphielscape Public License from time to time. Such new versions will be similar in spirit
72 | to the present version, but may differ in detail to address new problems or concerns.
73 |
74 | Each version is given a distinguishing version number. If the Program specifies that a
75 | certain numbered version of the Raphielscape Public License "or any later version" applies to it,
76 | you have the option of following the terms and conditions either of that numbered version or of
77 | any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a
78 | version number of the Raphielscape Public License, you may choose any version ever published
79 | by the Devscapes Open Source Holding GmbH.
80 |
81 | END OF LICENSE
82 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/gd_downloader.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 | from bot import status_dict, status_dict_lock, config_dict, LOGGER
4 | from bot.helper.ext_utils.bot_utils import run_sync_to_async
5 | from bot.helper.mirror_leech_utils.gd_utils.count import gdCount
6 | from bot.helper.mirror_leech_utils.gd_utils.download import gdDownload
7 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
8 | from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus
9 |
10 |
11 | async def add_gd_download(link, new_name, path, listener):
12 | drive = gdCount()
13 | name, mime_type, size, _, _ = await run_sync_to_async(
14 | drive.count, link, listener.user_id
15 | )
16 | if mime_type is None:
17 | await sendMessage(name, listener.message)
18 | return
19 |
20 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=12))
21 | name = new_name or name
22 |
23 | drive = gdDownload(listener, name, link, path)
24 | async with status_dict_lock:
25 | status_dict[listener.uid] = GdriveStatus(drive, size, listener.message, gid)
26 |
27 | if not config_dict["NO_TASKS_LOGS"]:
28 | LOGGER.info(f"Download from GDrive: {name}")
29 |
30 | await listener.onDownloadStart()
31 | await sendStatusMessage(listener.message)
32 | await run_sync_to_async(drive.download)
33 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/mega_download.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 | from os import makedirs
4 | from threading import Event
5 | from mega import MegaApi, MegaListener, MegaRequest, MegaTransfer, MegaError
6 | from bot import LOGGER, config_dict, status_dict, status_dict_lock
7 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
8 | from bot.helper.mirror_leech_utils.status_utils.mega_status import MegaDownloadStatus
9 | from bot.helper.ext_utils.bot_utils import (
10 | get_mega_link_type,
11 | run_async_to_sync,
12 | run_sync_to_async,
13 | )
14 |
15 |
16 | class MegaAppListener(MegaListener):
17 | _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN, MegaRequest.TYPE_FETCH_NODES)
18 | NO_ERROR = "no error"
19 |
20 | def __init__(self, continue_event: Event, listener):
21 | self.continue_event = continue_event
22 | self.node = None
23 | self.public_node = None
24 | self.listener = listener
25 | self.__bytes_transferred = 0
26 | self.is_cancelled = False
27 | self.completed = False
28 | self.isFile = False
29 | self.__speed = 0
30 | self.__name = ""
31 | self.error = None
32 | super().__init__()
33 |
34 | @property
35 | def speed(self):
36 | return self.__speed
37 |
38 | @property
39 | def downloaded_bytes(self):
40 | return self.__bytes_transferred
41 |
42 | def onRequestFinish(self, api, request, error):
43 | if str(error).lower() != "no error":
44 | self.error = error.copy()
45 | LOGGER.error(f"Mega onRequestFinishError: {self.error}")
46 | self.continue_event.set()
47 | return
48 | request_type = request.getType()
49 | if request_type == MegaRequest.TYPE_LOGIN:
50 | api.fetchNodes()
51 | elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE:
52 | self.public_node = request.getPublicMegaNode()
53 | self.__name = self.public_node.getName()
54 | elif request_type == MegaRequest.TYPE_FETCH_NODES:
55 | LOGGER.info("Fetching Root Node.")
56 | self.node = api.getRootNode()
57 | self.__name = self.node.getName()
58 | LOGGER.info(f"Node Name: {self.node.getName()}")
59 | if (
60 | request_type not in self._NO_EVENT_ON
61 | or self.node
62 | and "cloud drive" not in self.__name.lower()
63 | ):
64 | self.continue_event.set()
65 |
66 | def onRequestTemporaryError(self, api, request, error: MegaError):
67 | LOGGER.error(f"Mega Request error in {error}")
68 | if not self.is_cancelled:
69 | self.is_cancelled = True
70 | run_async_to_sync(
71 | self.listener.onDownloadError, f"RequestTempError: {error.toString()}"
72 | )
73 | self.error = error.toString()
74 | self.continue_event.set()
75 |
76 | def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer):
77 | if self.is_cancelled:
78 | api.cancelTransfer(transfer, None)
79 | self.continue_event.set()
80 | return
81 | self.__speed = transfer.getSpeed()
82 | self.__bytes_transferred = transfer.getTransferredBytes()
83 |
84 | def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error):
85 | try:
86 | if self.is_cancelled:
87 | self.continue_event.set()
88 | elif transfer.isFinished() and (transfer.isFolderTransfer() or self.isFile):
89 | self.completed = True
90 | self.continue_event.set()
91 | except Exception as e:
92 | LOGGER.error(e)
93 |
94 | def onTransferTemporaryError(self, api, transfer, error):
95 | filen = transfer.getFileName()
96 | state = transfer.getState()
97 | errStr = error.toString()
98 | LOGGER.error(f"Mega download error in file {transfer} {filen}: {error}")
99 | if state in [1, 4]:
100 | # Sometimes MEGA (offical client) can't stream a node either and raises a temp failed error.
101 | # Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault]
102 | return
103 |
104 | self.error = errStr
105 | if not self.is_cancelled:
106 | self.is_cancelled = True
107 | run_async_to_sync(
108 | self.listener.onDownloadError, f"TransferTempError: {errStr} ({filen})"
109 | )
110 | self.continue_event.set()
111 |
112 | async def cancel_task(self):
113 | self.is_cancelled = True
114 | await self.listener.onDownloadError("Download Canceled by user")
115 |
116 |
117 | class AsyncExecutor:
118 | def __init__(self):
119 | self.continue_event = Event()
120 |
121 | def do(self, function, args):
122 | self.continue_event.clear()
123 | function(*args)
124 | self.continue_event.wait()
125 |
126 |
127 | async def add_mega_download(mega_link, path: str, listener, name: str):
128 | MEGA_EMAIL = config_dict["MEGA_EMAIL"]
129 | MEGA_PASSWORD = config_dict["MEGA_PASSWORD"]
130 |
131 | api = MegaApi(None, None, None, "rcmltb")
132 | executor = AsyncExecutor()
133 | folder_api = None
134 |
135 | mega_listener = MegaAppListener(executor.continue_event, listener)
136 | api.addListener(mega_listener)
137 |
138 | if MEGA_EMAIL and MEGA_PASSWORD:
139 | await run_sync_to_async(executor.do, api.login, (MEGA_EMAIL, MEGA_PASSWORD))
140 |
141 | if get_mega_link_type(mega_link) == "file":
142 | await run_sync_to_async(executor.do, api.getPublicNode, (mega_link,))
143 | node = mega_listener.public_node
144 | mega_listener.isFile = True
145 | else:
146 | folder_api = MegaApi(None, None, None, "rcmltb")
147 | folder_api.addListener(mega_listener)
148 | await run_sync_to_async(executor.do, folder_api.loginToFolder, (mega_link,))
149 | node = await run_sync_to_async(folder_api.authorizeNode, mega_listener.node)
150 | if mega_listener.error is not None:
151 | await sendMessage(str(mega_listener.error), listener.message)
152 | await run_sync_to_async(executor.do, api.logout, ())
153 | if folder_api is not None:
154 | await run_sync_to_async(executor.do, folder_api.logout, ())
155 | return
156 |
157 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=8))
158 | name = name or node.getName()
159 | size = api.getSize(node)
160 |
161 | async with status_dict_lock:
162 | status_dict[listener.uid] = MegaDownloadStatus(
163 | name, size, gid, mega_listener, listener
164 | )
165 |
166 | await sendStatusMessage(listener.message)
167 |
168 | if not config_dict["NO_TASKS_LOGS"]:
169 | LOGGER.info(f"Download from Mega: {name}")
170 |
171 | makedirs(path, exist_ok=True)
172 | await run_sync_to_async(
173 | executor.do, api.startDownload, (node, path, name, None, False, None)
174 | )
175 | await run_sync_to_async(executor.do, api.logout, ())
176 | if folder_api is not None:
177 | await run_sync_to_async(executor.do, folder_api.logout, ())
178 |
179 | if mega_listener.completed:
180 | await listener.onDownloadComplete()
181 | elif (error := mega_listener.error) and mega_listener.is_cancelled:
182 | await listener.onDownloadError(error)
183 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/rclone_copy.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from asyncio.subprocess import PIPE
3 | from configparser import ConfigParser
4 | from os import listdir, path as ospath
5 | from random import SystemRandom, randrange
6 | from string import ascii_letters, digits
7 | from bot import LOGGER, status_dict, status_dict_lock, config_dict
8 | from bot.helper.telegram_helper.message_utils import sendStatusMessage
9 | from bot.helper.ext_utils.rclone_utils import (
10 | get_rclone_path,
11 | is_gdrive_remote,
12 | setRcloneFlags,
13 | )
14 | from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus
15 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
16 |
17 |
18 | class RcloneCopy:
19 | def __init__(self, user_id, listener) -> None:
20 | self.__listener = listener
21 | self._user_id = user_id
22 | self.message = self.__listener.message
23 | self.name = ""
24 | self.sa_error = ""
25 | self.size = 0
26 | self.__sa_count = 0
27 | self.__sa_number = 0
28 | self.__service_account_index = 0
29 | self.process = None
30 | self.__is_gdrive = False
31 | self.__is_cancelled = False
32 | self.status_type = MirrorStatus.STATUS_COPYING
33 |
34 | async def copy(self, origin_remote, origin_dir, dest_remote, dest_dir):
35 | rc_config = await get_rclone_path(self._user_id, self.message)
36 | self.__is_gdrive = await is_gdrive_remote(dest_remote, rc_config)
37 |
38 | if config_dict["USE_SERVICE_ACCOUNTS"] and ospath.exists("accounts"):
39 | self.__sa_number = len(listdir("accounts"))
40 | if self.__sa_count == 0:
41 | self.__service_account_index = randrange(self.__sa_number)
42 | config = ConfigParser()
43 | config.read(rc_config)
44 | if SERVICE_ACCOUNTS_REMOTE := config_dict["SERVICE_ACCOUNTS_REMOTE"]:
45 | if SERVICE_ACCOUNTS_REMOTE in config.sections():
46 | if id := config[SERVICE_ACCOUNTS_REMOTE]["team_drive"]:
47 | self.__create_teamdrive_sa_config(rc_config, id)
48 | LOGGER.info(
49 | f"Using service account remote {SERVICE_ACCOUNTS_REMOTE}"
50 | )
51 | else:
52 | LOGGER.info("No remote found on your rclone.conf")
53 | else:
54 | LOGGER.info("No SERVICE_ACCOUNTS_REMOTE found")
55 |
56 | source = f"{origin_remote}:{origin_dir}"
57 |
58 | if ospath.splitext(origin_dir)[1]:
59 | folder_name = ospath.splitext(origin_dir)[0]
60 | else:
61 | folder_name = origin_dir
62 |
63 | destination = f"{dest_remote}:{dest_dir}{folder_name}"
64 |
65 | cmd = [
66 | "rclone",
67 | "copy",
68 | f"--config={rc_config}",
69 | "--ignore-case",
70 | source,
71 | destination,
72 | "--drive-acknowledge-abuse",
73 | "-P",
74 | ]
75 |
76 | await setRcloneFlags(cmd, "copy")
77 | self.name = source
78 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=10))
79 |
80 | self.process = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
81 | async with status_dict_lock:
82 | status = RcloneStatus(self, self.__listener, gid)
83 | status_dict[self.__listener.uid] = status
84 | await sendStatusMessage(self.message)
85 | await status.start()
86 |
87 | return_code = await self.process.wait()
88 |
89 | if self.__is_cancelled:
90 | return
91 |
92 | if return_code == 0:
93 | await self.__listener.onRcloneCopyComplete(
94 | rc_config, destination, folder_name, self.__is_gdrive
95 | )
96 | else:
97 | err = (await self.process.stderr.read()).decode().strip()
98 | LOGGER.info(f"Error: {err}")
99 | if config_dict["USE_SERVICE_ACCOUNTS"] and "RATE_LIMIT_EXCEEDED" in err:
100 | if self.__sa_number != 0 and self.__sa_count < self.__sa_number:
101 | self.__switchServiceAccount()
102 | await self.copy(origin_remote, origin_dir, dest_remote, dest_dir)
103 | return
104 | else:
105 | LOGGER.info(f"Reached maximum number of service accounts")
106 | await self.__listener.onDownloadError(err)
107 |
108 | def __switchServiceAccount(self):
109 | if self.__service_account_index == self.__sa_number - 1:
110 | self.__service_account_index = 0
111 | else:
112 | self.__service_account_index += 1
113 | self.__sa_count += 1
114 | LOGGER.info(f"Switching to {self.__service_account_index}.json service account")
115 |
116 | def __create_teamdrive_sa_config(self, conf_path, id):
117 | rc_content = "type = drive\n"
118 | rc_content += "scope = drive\n"
119 | rc_content += (
120 | f"service_account_file = accounts/{self.__service_account_index}.json\n"
121 | )
122 | rc_content += f"team_drive = {id}\n\n"
123 | with open(conf_path, "w") as f:
124 | f.write(rc_content)
125 |
126 | async def cancel_task(self):
127 | self.__is_cancelled = True
128 | if self.process is not None:
129 | try:
130 | self.process.kill()
131 | except:
132 | pass
133 | await self.__listener.onDownloadError("Copy cancelled!")
134 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/rclone_leech.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from asyncio.subprocess import PIPE
3 | from os import path as ospath
4 | from random import SystemRandom
5 | from string import ascii_letters, digits
6 | from bot.helper.telegram_helper.filters import CustomFilters
7 | from bot.helper.telegram_helper.message_utils import sendStatusMessage
8 | from bot.helper.ext_utils.rclone_data_holder import get_rclone_data
9 | from bot import status_dict, status_dict_lock, config_dict
10 | from bot.helper.ext_utils.rclone_utils import get_rclone_path, setRcloneFlags
11 | from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus
12 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
13 |
14 |
15 | class RcloneLeech:
16 | def __init__(self, origin_dir, dest_dir, listener, isFolder=False):
17 | self.__listener = listener
18 | self.__user_id = listener.user_id
19 | self.__origin_path = origin_dir
20 | self.__dest_path = dest_dir
21 | self.__isFolder = isFolder
22 | self.size = 0
23 | self.name = None
24 | self.process = None
25 | self.__is_cancelled = False
26 | self.status_type = MirrorStatus.STATUS_DOWNLOADING
27 |
28 | async def leech(self):
29 | conf_path = await get_rclone_path(self.__user_id, self.__listener.message)
30 | if config_dict["MULTI_RCLONE_CONFIG"] or CustomFilters.sudo_filter(
31 | "", self.__listener.message
32 | ):
33 | leech_drive = get_rclone_data("LEECH_REMOTE", self.__user_id)
34 | cmd = [
35 | "rclone",
36 | "copy",
37 | f"--config={conf_path}",
38 | f"{leech_drive}:{self.__origin_path}",
39 | f"{self.__dest_path}",
40 | "-P",
41 | ]
42 | else:
43 | if DEFAULT_GLOBAL_REMOTE := config_dict["DEFAULT_GLOBAL_REMOTE"]:
44 | cmd = [
45 | "rclone",
46 | "copy",
47 | f"--config={conf_path}",
48 | f"{DEFAULT_GLOBAL_REMOTE}:{self.__origin_path}",
49 | f"{self.__dest_path}",
50 | "-P",
51 | ]
52 | else:
53 | await self.__listener.onDownloadError("DEFAULT_GLOBAL_REMOTE not found")
54 | return
55 | await setRcloneFlags(cmd, "download")
56 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=10))
57 | if self.__isFolder:
58 | self.name = ospath.basename(ospath.normpath(self.__dest_path))
59 | else:
60 | self.name = ospath.basename(self.__dest_path)
61 | async with status_dict_lock:
62 | status = RcloneStatus(self, self.__listener, gid)
63 | status_dict[self.__listener.uid] = status
64 | await sendStatusMessage(self.__listener.message)
65 | self.process = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
66 | await status.start()
67 | return_code = await self.process.wait()
68 | if self.__is_cancelled:
69 | return
70 | if return_code == 0:
71 | await self.__listener.onDownloadComplete()
72 | else:
73 | err_message = (await self.process.stderr.read()).decode()
74 | await self.__listener.onDownloadError(f"Error: {err_message}!")
75 |
76 | async def cancel_task(self):
77 | self.__is_cancelled = True
78 | if self.process is not None:
79 | try:
80 | self.process.kill()
81 | except:
82 | pass
83 | await self.__listener.onDownloadError("Download cancelled!")
84 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/download_utils/telegram_downloader.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from bot import (
3 | IS_PREMIUM_USER,
4 | bot,
5 | app,
6 | status_dict,
7 | config_dict,
8 | status_dict_lock,
9 | LOGGER,
10 | )
11 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
12 | from bot.helper.mirror_leech_utils.status_utils.tg_download_status import TelegramStatus
13 |
14 |
15 | class TelegramDownloader:
16 | def __init__(self, file, client, listener, path, name=""):
17 | self.__client = client
18 | self.__listener = listener
19 | self.name = name
20 | self.gid = ""
21 | self.size = 0
22 | self.progress = 0
23 | self.downloaded_bytes = 0
24 | self.__file = file
25 | self.__path = path
26 | self.__start_time = time()
27 | self.__is_cancelled = False
28 |
29 | @property
30 | def download_speed(self):
31 | return self.downloaded_bytes / (time() - self.__start_time)
32 |
33 | async def __onDownloadStart(self, name, size, file_id):
34 | self.name = name
35 | self.size = size
36 | self.gid = file_id
37 |
38 | async with status_dict_lock:
39 | status_dict[self.__listener.uid] = TelegramStatus(
40 | self, size, self.__listener.message, self.gid
41 | )
42 | await sendStatusMessage(self.__listener.message)
43 |
44 | if not config_dict["NO_TASKS_LOGS"]:
45 | LOGGER.info(f"Download from Telegram: {name}")
46 |
47 | async def onDownloadProgress(self, current, total):
48 | if self.__is_cancelled:
49 | if IS_PREMIUM_USER:
50 | app.stop_transmission()
51 | else:
52 | bot.stop_transmission()
53 | return
54 | self.downloaded_bytes = current
55 | try:
56 | self.progress = current / self.size * 100
57 | except:
58 | pass
59 |
60 | async def download(self):
61 | if IS_PREMIUM_USER and not self.__listener.isSuperGroup:
62 | await sendMessage(
63 | "Use SuperGroup to download with User!", self.__listener.message
64 | )
65 | return
66 | if self.__file is None:
67 | return
68 | if self.name == "":
69 | name = (
70 | self.__file.file_name if hasattr(self.__file, "file_name") else "None"
71 | )
72 | else:
73 | name = self.name
74 | self.__path = self.__path + name
75 | size = self.__file.file_size
76 | gid = self.__file.file_unique_id
77 | await self.__onDownloadStart(name, size, gid)
78 | try:
79 | download = await self.__client.download_media(
80 | message=self.__file,
81 | file_name=self.__path,
82 | progress=self.onDownloadProgress,
83 | )
84 | if self.__is_cancelled:
85 | await self.__onDownloadError("Cancelled by user!")
86 | return
87 | except Exception as e:
88 | LOGGER.error(str(e))
89 | await self.__onDownloadError(str(e))
90 | return
91 | if download is not None:
92 | await self.__listener.onDownloadComplete()
93 | elif not self.__is_cancelled:
94 | await self.__onDownloadError("Internal error occurred")
95 |
96 | async def __onDownloadError(self, error):
97 | await self.__listener.onDownloadError(error)
98 |
99 | async def cancel_task(self):
100 | LOGGER.info("Cancelling download by user request")
101 | self.__is_cancelled = True
102 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/gd_utils/clone.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger
2 | from time import time
3 | from os import path as ospath
4 | from bot.helper.mirror_leech_utils.gd_utils.helper import GoogleDriveHelper
5 | from googleapiclient.errors import HttpError
6 | from tenacity import (
7 | retry,
8 | wait_exponential,
9 | stop_after_attempt,
10 | retry_if_exception_type,
11 | RetryError,
12 | )
13 |
14 | from bot.helper.ext_utils.bot_utils import run_async_to_sync
15 |
16 | LOGGER = getLogger(__name__)
17 |
18 |
19 | class gdClone(GoogleDriveHelper):
20 | def __init__(self, link, listener):
21 | self.link = link
22 | self.listener = listener
23 | self._start_time = time()
24 | super().__init__()
25 | self.is_cloning = True
26 | self.upDest = None
27 | self.user_setting()
28 |
29 | def user_setting(self):
30 | if self.link.startswith("mtp:"):
31 | self.token_path = f"tokens/{self.listener.user_id}.pickle"
32 | self.upDest = self.link.lstrip("mtp:")
33 | self.use_sa = False
34 | elif self.link.startswith("tp:"):
35 | self.upDest = self.link.lstrip("tp:")
36 | self.use_sa = False
37 | elif self.link.startswith("sa:"):
38 | self.upDest = self.link.lstrip("sa:")
39 | self.use_sa = True
40 |
41 | def clone(self):
42 | try:
43 | file_id = self.getIdFromUrl(self.link)
44 | except (KeyError, IndexError):
45 | return (
46 | "Google Drive ID could not be found in the provided link",
47 | None,
48 | None,
49 | None,
50 | None,
51 | None,
52 | )
53 | self.service = self.authorize()
54 | msg = ""
55 | LOGGER.info(f"File ID: {file_id}")
56 | try:
57 | meta = self.getFileMetadata(file_id)
58 | mime_type = meta.get("mimeType")
59 | if mime_type == self.G_DRIVE_DIR_MIME_TYPE:
60 | dir_id = self.create_directory(meta.get("name"), self.upDest)
61 | self._cloneFolder(meta.get("name"), meta.get("id"), dir_id)
62 | durl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id)
63 | if self.is_cancelled:
64 | LOGGER.info("Deleting cloned data from Drive...")
65 | self.service.files().delete(
66 | fileId=dir_id, supportsAllDrives=True
67 | ).execute()
68 | return None, None, None, None, None, None
69 | mime_type = "Folder"
70 | size = self.proc_bytes
71 | else:
72 | file = self._copyFile(meta.get("id"), self.upDest)
73 | msg += f'Name: {file.get("name")}
'
74 | durl = self.G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id"))
75 | if mime_type is None:
76 | mime_type = "File"
77 | size = int(meta.get("size", 0))
78 | return (
79 | durl,
80 | size,
81 | mime_type,
82 | self.total_files,
83 | self.total_folders,
84 | self.getIdFromUrl(durl),
85 | )
86 | except Exception as err:
87 | if isinstance(err, RetryError):
88 | LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
89 | err = err.last_attempt.exception()
90 | err = str(err).replace(">", "").replace("<", "")
91 | if "User rate limit exceeded" in err:
92 | msg = "User rate limit exceeded."
93 | elif "File not found" in err:
94 | if not self.alt_auth and self.use_sa:
95 | self.alt_auth = True
96 | self.use_sa = False
97 | LOGGER.error("File not found. Trying with token.pickle...")
98 | return self.clone()
99 | msg = "File not found."
100 | else:
101 | msg = f"Error.\n{err}"
102 | run_async_to_sync(self.listener.onUploadError, msg)
103 | return None, None, None, None, None, None
104 |
105 | def _cloneFolder(self, folder_name, folder_id, dest_id):
106 | LOGGER.info(f"Syncing: {folder_name}")
107 | files = self.getFilesByFolderId(folder_id)
108 | if len(files) == 0:
109 | return dest_id
110 | for file in files:
111 | if file.get("mimeType") == self.G_DRIVE_DIR_MIME_TYPE:
112 | self.total_folders += 1
113 | file_path = ospath.join(folder_name, file.get("name"))
114 | current_dir_id = self.create_directory(file.get("name"), dest_id)
115 | self._cloneFolder(file_path, file.get("id"), current_dir_id)
116 | elif (
117 | not file.get("name")
118 | .lower()
119 | .endswith(tuple(self.listener.extension_filter))
120 | ):
121 | self.total_files += 1
122 | self._copyFile(file.get("id"), dest_id)
123 | self.proc_bytes += int(file.get("size", 0))
124 | self.total_time = int(time() - self._start_time)
125 | if self.is_cancelled:
126 | break
127 |
128 | @retry(
129 | wait=wait_exponential(multiplier=2, min=3, max=6),
130 | stop=stop_after_attempt(3),
131 | retry=retry_if_exception_type(Exception),
132 | )
133 | def _copyFile(self, file_id, dest_id):
134 | body = {"parents": [dest_id]}
135 | try:
136 | return (
137 | self.service.files()
138 | .copy(fileId=file_id, body=body, supportsAllDrives=True)
139 | .execute()
140 | )
141 | except HttpError as err:
142 | if err.resp.get("content-type", "").startswith("application/json"):
143 | reason = eval(err.content).get("error").get("errors")[0].get("reason")
144 | if reason not in [
145 | "userRateLimitExceeded",
146 | "dailyLimitExceeded",
147 | "cannotCopyFile",
148 | ]:
149 | raise err
150 | if reason == "cannotCopyFile":
151 | LOGGER.error(err)
152 | elif self.use_sa:
153 | if self.sa_count >= self.sa_number:
154 | LOGGER.info(
155 | f"Reached maximum number of service accounts switching, which is {self.sa_count}"
156 | )
157 | raise err
158 | else:
159 | if self.is_cancelled:
160 | return
161 | self.switchServiceAccount()
162 | return self._copyFile(file_id, dest_id)
163 | else:
164 | LOGGER.error(f"Got: {reason}")
165 | raise err
166 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/gd_utils/count.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger
2 | from bot.helper.mirror_leech_utils.gd_utils.helper import GoogleDriveHelper
3 | from tenacity import RetryError
4 |
5 | LOGGER = getLogger(__name__)
6 |
7 |
8 | class gdCount(GoogleDriveHelper):
9 | def __init__(self):
10 | super().__init__()
11 |
12 | def count(self, link, user_id):
13 | try:
14 | file_id = self.getIdFromUrl(link, user_id)
15 | except (KeyError, IndexError):
16 | return (
17 | "Google Drive ID could not be found in the provided link",
18 | None,
19 | None,
20 | None,
21 | None,
22 | )
23 | self.service = self.authorize()
24 | LOGGER.info(f"File ID: {file_id}")
25 | try:
26 | return self._proceed_count(file_id)
27 | except Exception as err:
28 | if isinstance(err, RetryError):
29 | LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
30 | err = err.last_attempt.exception()
31 | err = str(err).replace(">", "").replace("<", "")
32 | if "File not found" in err:
33 | if not self.alt_auth and self.use_sa:
34 | self.alt_auth = True
35 | self.use_sa = False
36 | LOGGER.error("File not found. Trying with token.pickle...")
37 | return self.count(link, user_id)
38 | msg = "File not found."
39 | else:
40 | msg = f"Error.\n{err}"
41 | return msg, None, None, None, None
42 |
43 | def _proceed_count(self, file_id):
44 | meta = self.getFileMetadata(file_id)
45 | name = meta["name"]
46 | LOGGER.info(f"Counting: {name}")
47 | mime_type = meta.get("mimeType")
48 | if mime_type == self.G_DRIVE_DIR_MIME_TYPE:
49 | self._gDrive_directory(meta)
50 | mime_type = "Folder"
51 | else:
52 | if mime_type is None:
53 | mime_type = "File"
54 | self.total_files += 1
55 | self._gDrive_file(meta)
56 | return name, mime_type, self.proc_bytes, self.total_files, self.total_folders
57 |
58 | def _gDrive_file(self, filee):
59 | size = int(filee.get("size", 0))
60 | self.proc_bytes += size
61 |
62 | def _gDrive_directory(self, drive_folder):
63 | files = self.getFilesByFolderId(drive_folder["id"])
64 | if len(files) == 0:
65 | return
66 | for filee in files:
67 | shortcut_details = filee.get("shortcutDetails")
68 | if shortcut_details is not None:
69 | mime_type = shortcut_details["targetMimeType"]
70 | file_id = shortcut_details["targetId"]
71 | filee = self.getFileMetadata(file_id)
72 | else:
73 | mime_type = filee.get("mimeType")
74 | if mime_type == self.G_DRIVE_DIR_MIME_TYPE:
75 | self.total_folders += 1
76 | self._gDrive_directory(filee)
77 | else:
78 | self.total_files += 1
79 | self._gDrive_file(filee)
80 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/gd_utils/download.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger
2 | from os import makedirs, path as ospath
3 | from io import FileIO
4 | from bot.helper.mirror_leech_utils.gd_utils.helper import GoogleDriveHelper
5 | from googleapiclient.errors import HttpError
6 | from googleapiclient.http import MediaIoBaseDownload
7 | from tenacity import (
8 | retry,
9 | wait_exponential,
10 | stop_after_attempt,
11 | retry_if_exception_type,
12 | RetryError,
13 | )
14 |
15 | from bot.helper.ext_utils.bot_utils import run_async_to_sync, setInterval
16 |
17 | LOGGER = getLogger(__name__)
18 |
19 |
20 | class gdDownload(GoogleDriveHelper):
21 | def __init__(self, listener, name, link, path):
22 | self.listener = listener
23 | self.link = link
24 | self.name = name
25 | self._updater = None
26 | self._path = path
27 | super().__init__()
28 | self.is_downloading = True
29 |
30 | def download(self):
31 | file_id = self.getIdFromUrl(self.link, self.listener.user_id)
32 | self.service = self.authorize()
33 | self._updater = setInterval(self.update_interval, self.progress)
34 | try:
35 | meta = self.getFileMetadata(file_id)
36 | if meta.get("mimeType") == self.G_DRIVE_DIR_MIME_TYPE:
37 | self._download_folder(file_id, self._path, self.name)
38 | else:
39 | makedirs(self._path, exist_ok=True)
40 | self._download_file(
41 | file_id, self._path, self.name, meta.get("mimeType")
42 | )
43 | except Exception as err:
44 | if isinstance(err, RetryError):
45 | LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}")
46 | err = err.last_attempt.exception()
47 | err = str(err).replace(">", "").replace("<", "")
48 | if "downloadQuotaExceeded" in err:
49 | err = "Download Quota Exceeded."
50 | elif "File not found" in err:
51 | if not self.alt_auth and self.use_sa:
52 | self.alt_auth = True
53 | self.use_sa = False
54 | LOGGER.error("File not found. Trying with token.pickle...")
55 | self._updater.cancel()
56 | return self.download()
57 | err = "File not found!"
58 | run_async_to_sync(self.listener.onDownloadError, err)
59 | self.is_cancelled = True
60 | finally:
61 | self._updater.cancel()
62 | if self.is_cancelled:
63 | return
64 | run_async_to_sync(self.listener.onDownloadComplete)
65 |
66 | def _download_folder(self, folder_id, path, folder_name):
67 | folder_name = folder_name.replace("/", "")
68 | if not ospath.exists(f"{path}/{folder_name}"):
69 | makedirs(f"{path}/{folder_name}")
70 | path += f"/{folder_name}"
71 | result = self.getFilesByFolderId(folder_id)
72 | if len(result) == 0:
73 | return
74 | result = sorted(result, key=lambda k: k["name"])
75 | for item in result:
76 | file_id = item["id"]
77 | filename = item["name"]
78 | shortcut_details = item.get("shortcutDetails")
79 | if shortcut_details is not None:
80 | file_id = shortcut_details["targetId"]
81 | mime_type = shortcut_details["targetMimeType"]
82 | else:
83 | mime_type = item.get("mimeType")
84 | if mime_type == self.G_DRIVE_DIR_MIME_TYPE:
85 | self._download_folder(file_id, path, filename)
86 | elif not ospath.isfile(
87 | f"{path}{filename}"
88 | ) and not filename.lower().endswith(tuple(self.listener.extension_filter)):
89 | self._download_file(file_id, path, filename, mime_type)
90 | if self.is_cancelled:
91 | break
92 |
93 | @retry(
94 | wait=wait_exponential(multiplier=2, min=3, max=6),
95 | stop=stop_after_attempt(3),
96 | retry=(retry_if_exception_type(Exception)),
97 | )
98 | def _download_file(self, file_id, path, filename, mime_type):
99 | request = self.service.files().get_media(fileId=file_id, supportsAllDrives=True)
100 | filename = filename.replace("/", "")
101 | if len(filename.encode()) > 255:
102 | ext = ospath.splitext(filename)[1]
103 | filename = f"{filename[:245]}{ext}"
104 | if self.name.endswith(ext):
105 | self.name = filename
106 | if self.is_cancelled:
107 | return
108 | fh = FileIO(f"{path}/{filename}", "wb")
109 | downloader = MediaIoBaseDownload(fh, request, chunksize=100 * 1024 * 1024)
110 | done = False
111 | retries = 0
112 | while not done:
113 | if self.is_cancelled:
114 | fh.close()
115 | break
116 | try:
117 | self.status, done = downloader.next_chunk()
118 | except HttpError as err:
119 | if err.resp.status in [500, 502, 503, 504, 429] and retries < 10:
120 | retries += 1
121 | continue
122 | if err.resp.get("content-type", "").startswith("application/json"):
123 | reason = (
124 | eval(err.content).get("error").get("errors")[0].get("reason")
125 | )
126 | if reason not in [
127 | "downloadQuotaExceeded",
128 | "dailyLimitExceeded",
129 | ]:
130 | raise err
131 | if self.use_sa:
132 | if self.sa_count >= self.sa_number:
133 | LOGGER.info(
134 | f"Reached maximum number of service accounts switching, which is {self.sa_count}"
135 | )
136 | raise err
137 | else:
138 | if self.is_cancelled:
139 | return
140 | self.switchServiceAccount()
141 | LOGGER.info(f"Got: {reason}, Trying Again...")
142 | return self._download_file(
143 | file_id, path, filename, mime_type
144 | )
145 | else:
146 | LOGGER.error(f"Got: {reason}")
147 | raise err
148 | self.file_processed_bytes = 0
149 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/helper/mirror_leech_utils/status_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/aria_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from bot import aria2, LOGGER
3 | from bot.helper.ext_utils.bot_utils import get_readable_time, run_sync_to_async
4 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
5 |
6 |
7 | def get_download(gid):
8 | try:
9 | return aria2.get_download(gid)
10 | except Exception as e:
11 | LOGGER.error(f"{e}: Aria2c, Error while getting torrent info")
12 | return None
13 |
14 |
15 | class AriaStatus:
16 | def __init__(self, gid, listener, seeding=False):
17 | self.__gid = gid
18 | self.__listener = listener
19 | self.__download = get_download(gid)
20 | self.start_time = 0
21 | self.seeding = seeding
22 | self.message = listener.message
23 |
24 | def __update(self):
25 | if self.__download is None:
26 | self.__download = get_download(self.__gid)
27 | else:
28 | self.__download = self.__download.live
29 | if self.__download.followed_by_ids:
30 | self.__gid = self.__download.followed_by_ids[0]
31 | self.__download = get_download(self.__gid)
32 |
33 | def progress(self):
34 | return self.__download.progress_string()
35 |
36 | def processed_bytes(self):
37 | return self.__download.completed_length_string()
38 |
39 | def speed(self):
40 | return self.__download.download_speed_string()
41 |
42 | def name(self):
43 | return self.__download.name
44 |
45 | def size(self):
46 | return self.__download.total_length_string()
47 |
48 | def eta(self):
49 | return self.__download.eta_string()
50 |
51 | def status(self):
52 | self.__update()
53 | download = self.__download
54 | if download.is_waiting:
55 | if self.seeding:
56 | return MirrorStatus.STATUS_QUEUEUP
57 | else:
58 | return MirrorStatus.STATUS_QUEUEDL
59 | elif download.is_paused:
60 | return MirrorStatus.STATUS_PAUSED
61 | elif download.seeder and self.seeding:
62 | return MirrorStatus.STATUS_SEEDING
63 | else:
64 | return MirrorStatus.STATUS_DOWNLOADING
65 |
66 | def seeders_num(self):
67 | return self.__download.num_seeders
68 |
69 | def leechers_num(self):
70 | return self.__download.connections
71 |
72 | def uploaded_bytes(self):
73 | return self.__download.upload_length_string()
74 |
75 | def upload_speed(self):
76 | self.__update()
77 | return self.__download.upload_speed_string()
78 |
79 | def ratio(self):
80 | return f"{round(self.__download.upload_length / self.__download.completed_length, 3)}"
81 |
82 | def seeding_time(self):
83 | return get_readable_time(time() - self.start_time)
84 |
85 | def listener(self):
86 | return self.__listener
87 |
88 | def task(self):
89 | return self
90 |
91 | def gid(self):
92 | self.__update()
93 | return self.__gid
94 |
95 | def type(self):
96 | return "Aria"
97 |
98 | async def cancel_task(self):
99 | self.__update()
100 | await run_sync_to_async(self.__update)
101 | if self.__download.seeder and self.seeding:
102 | LOGGER.info(f"Cancelling Seed: {self.name()}")
103 | await self.__listener.onUploadError(
104 | f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}"
105 | )
106 | await run_sync_to_async(aria2.remove, [self.__download], force=True, files=True)
107 | elif downloads := self.__download.followed_by:
108 | LOGGER.info(f"Cancelling Download: {self.name()}")
109 | await self.__listener.onDownloadError("Download cancelled by user!")
110 | downloads.append(self.__download)
111 | await run_sync_to_async(aria2.remove, downloads, force=True, files=True)
112 | else:
113 | LOGGER.info(f"Cancelling Download: {self.name()}")
114 | await self.__listener.onDownloadError("Download stopped by user!")
115 | await run_sync_to_async(aria2.remove, [self.__download], force=True, files=True)
116 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/clone_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import (
2 | MirrorStatus,
3 | get_readable_file_size,
4 | get_readable_time,
5 | )
6 |
7 |
8 | class CloneStatus:
9 | def __init__(self, obj, size, message, gid):
10 | self.__obj = obj
11 | self.__size = size
12 | self.__gid = gid
13 | self.message = message
14 |
15 | def processed_bytes(self):
16 | return self.__obj.transferred_size
17 |
18 | def size(self):
19 | return get_readable_file_size(self.__size)
20 |
21 | def status(self):
22 | return MirrorStatus.STATUS_CLONING
23 |
24 | def name(self):
25 | return self.__obj.name
26 |
27 | def gid(self) -> str:
28 | return self.__gid
29 |
30 | def progress_raw(self):
31 | try:
32 | return self.__obj.transferred_size / self.__size * 100
33 | except:
34 | return 0
35 |
36 | def progress(self):
37 | return f"{round(self.progress_raw(), 2)}%"
38 |
39 | def speed_raw(self):
40 | """
41 | :return: Download speed in Bytes/Seconds
42 | """
43 | return self.__obj.cspeed()
44 |
45 | def speed(self):
46 | return f"{get_readable_file_size(self.speed_raw())}/s"
47 |
48 | def eta(self):
49 | try:
50 | seconds = (self.__size - self.__obj.transferred_size) / self.speed_raw()
51 | return f"{get_readable_time(seconds)}"
52 | except:
53 | return "-"
54 |
55 | def task(self):
56 | return self.__obj
57 |
58 | def type(self):
59 | return "Clone"
60 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/extract_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from bot import LOGGER, config_dict
3 | from bot.helper.ext_utils.bot_utils import (
4 | get_readable_file_size,
5 | MirrorStatus,
6 | get_readable_time,
7 | run_async_to_sync,
8 | )
9 | from bot.helper.ext_utils.misc_utils import get_path_size
10 |
11 |
12 | class ExtractStatus:
13 | def __init__(self, name, size, gid, listener):
14 | self.__name = name
15 | self.__size = size
16 | self.__gid = gid
17 | self.__listener = listener
18 | self.__uid = listener.uid
19 | self.__start_time = time()
20 | self.message = listener.message
21 |
22 | def gid(self):
23 | return self.__gid
24 |
25 | def speed_raw(self):
26 | return self.processed_raw() / (time() - self.__start_time)
27 |
28 | def progress_raw(self):
29 | try:
30 | return self.processed_raw() / self.__size * 100
31 | except:
32 | return 0
33 |
34 | def progress(self):
35 | return f"{round(self.progress_raw(), 2)}%"
36 |
37 | def speed(self):
38 | return f"{get_readable_file_size(self.speed_raw())}/s"
39 |
40 | def name(self):
41 | return self.__name
42 |
43 | def size(self):
44 | return get_readable_file_size(self.__size)
45 |
46 | def eta(self):
47 | try:
48 | seconds = (self.__size - self.processed_raw()) / self.speed_raw()
49 | return get_readable_time(seconds)
50 | except:
51 | return "-"
52 |
53 | def status(self):
54 | return MirrorStatus.STATUS_EXTRACTING
55 |
56 | def processed_bytes(self):
57 | return get_readable_file_size(self.processed_raw())
58 |
59 | def processed_raw(self):
60 | if self.__listener.newDir:
61 | return run_async_to_sync(get_path_size, self.__listener.newDir)
62 | else:
63 | return run_async_to_sync(get_path_size, self.__listener.dir) - self.__size
64 |
65 | def task(self):
66 | return self
67 |
68 | def type(self):
69 | return "Extract"
70 |
71 | async def cancel_task(self):
72 | if not config_dict["NO_TASKS_LOGS"]:
73 | LOGGER.info(f"Cancelling Extract: {self.__name}")
74 | if self.__listener.suproc is not None:
75 | self.__listener.suproc.kill()
76 | else:
77 | self.__listener.suproc = "cancelled"
78 | await self.__listener.onUploadError("extracting stopped by user!")
79 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import (
2 | MirrorStatus,
3 | get_readable_file_size,
4 | get_readable_time,
5 | )
6 |
7 |
8 | class GdriveStatus:
9 | def __init__(self, obj, size, message, gid):
10 | self.__obj = obj
11 | self.__size = size
12 | self.__gid = gid
13 | self.message = message
14 |
15 | def processed_bytes(self):
16 | return get_readable_file_size(self.__obj.processed_bytes)
17 |
18 | def size(self):
19 | return get_readable_file_size(self.__size)
20 |
21 | def status(self):
22 | return MirrorStatus.STATUS_DOWNLOADING
23 |
24 | def name(self):
25 | return self.__obj.name
26 |
27 | def gid(self) -> str:
28 | return self.__gid
29 |
30 | def progress_raw(self):
31 | try:
32 | return self.__obj.processed_bytes / self.__size * 100
33 | except:
34 | return 0
35 |
36 | def progress(self):
37 | return f"{round(self.progress_raw(), 2)}%"
38 |
39 | def speed(self):
40 | return f"{get_readable_file_size(self.__obj.speed)}/s"
41 |
42 | def eta(self):
43 | try:
44 | seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed
45 | return get_readable_time(seconds)
46 | except:
47 | return "-"
48 |
49 | def type(self):
50 | return "Gdrive"
51 |
52 | def task(self):
53 | return self.__obj
54 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/mega_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import (
2 | MirrorStatus,
3 | get_readable_file_size,
4 | get_readable_time,
5 | )
6 |
7 |
8 | class MegaDownloadStatus:
9 | def __init__(self, name, size, gid, obj, listener):
10 | self.__obj = obj
11 | self.__name = name
12 | self.__size = size
13 | self.__gid = gid
14 | self.message = listener.message
15 |
16 | def name(self) -> str:
17 | return self.__name
18 |
19 | def progress_raw(self):
20 | try:
21 | return round(self.__obj.downloaded_bytes / self.__obj.size * 100, 2)
22 | except:
23 | return 0.0
24 |
25 | def progress(self):
26 | return f"{self.progress_raw()}%"
27 |
28 | def status(self) -> str:
29 | return MirrorStatus.STATUS_DOWNLOADING
30 |
31 | def processed_bytes(self):
32 | return get_readable_file_size(self.__obj.downloaded_bytes)
33 |
34 | def eta(self):
35 | try:
36 | seconds = (self.__size - self.__obj.downloaded_bytes) / self.__obj.speed
37 | return get_readable_time(seconds)
38 | except ZeroDivisionError:
39 | return "-"
40 |
41 | def size(self):
42 | return get_readable_file_size(self.__size)
43 |
44 | def speed(self):
45 | return f"{get_readable_file_size(self.__obj.speed)}/s"
46 |
47 | def gid(self) -> str:
48 | return self.__gid
49 |
50 | def task(self):
51 | return self.__obj
52 |
53 | def type(self):
54 | return "Mega.nz"
55 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/qbit_status.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from bot import LOGGER, QbTorrents, qb_listener_lock, config_dict, get_client
3 | from bot.helper.ext_utils.bot_utils import (
4 | MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time,
7 | run_sync_to_async,
8 | )
9 |
10 |
11 | def get_download(client, tag):
12 | try:
13 | return client.torrents_info(tag=tag)[0]
14 | except Exception as e:
15 | LOGGER.error(f"{e}: Qbittorrent, Error while getting torrent info")
16 | return None
17 |
18 |
19 | class QbitTorrentStatus:
20 | def __init__(self, listener, seeding=False):
21 | self.__client = get_client()
22 | self.__listener = listener
23 | self.__info = get_download(self.__client, f"{listener.uid}")
24 | self.seeding = seeding
25 | self.message = listener.message
26 |
27 | def __update(self):
28 | new_info = get_download(self.__client, f"{self.__listener.uid}")
29 | if new_info is not None:
30 | self.__info = new_info
31 |
32 | def progress(self):
33 | return f"{round(self.__info.progress*100, 2)}%"
34 |
35 | def processed_bytes(self):
36 | return get_readable_file_size(self.__info.downloaded)
37 |
38 | def speed(self):
39 | return f"{get_readable_file_size(self.__info.dlspeed)}/s"
40 |
41 | def name(self):
42 | if self.__info.state in ["metaDL", "checkingResumeData"]:
43 | return f"[METADATA]{self.__info.name}"
44 | else:
45 | return self.__info.name
46 |
47 | def size(self):
48 | return get_readable_file_size(self.__info.size)
49 |
50 | def eta(self):
51 | return get_readable_time(self.__info.eta)
52 |
53 | def status(self):
54 | self.__update()
55 | state = self.__info.state
56 | if state == "queuedDL":
57 | return MirrorStatus.STATUS_QUEUEDL
58 | elif state == "queuedUP":
59 | return MirrorStatus.STATUS_QUEUEUP
60 | elif state in ["pausedDL", "pausedUP"]:
61 | return MirrorStatus.STATUS_PAUSED
62 | elif state in ["checkingUP", "checkingDL"]:
63 | return MirrorStatus.STATUS_CHECKING
64 | elif state in ["stalledUP", "uploading"] and self.seeding:
65 | return MirrorStatus.STATUS_SEEDING
66 | else:
67 | return MirrorStatus.STATUS_DOWNLOADING
68 |
69 | def seeders_num(self):
70 | return self.__info.num_seeds
71 |
72 | def leechers_num(self):
73 | return self.__info.num_leechs
74 |
75 | def uploaded_bytes(self):
76 | return get_readable_file_size(self.__info.uploaded)
77 |
78 | def upload_speed(self):
79 | return f"{get_readable_file_size(self.__info.upspeed)}/s"
80 |
81 | def ratio(self):
82 | return f"{round(self.__info.ratio, 3)}"
83 |
84 | def seeding_time(self):
85 | return get_readable_time(self.__info.seeding_time)
86 |
87 | def task(self):
88 | return self
89 |
90 | def gid(self):
91 | return self.hash()[:12]
92 |
93 | def hash(self):
94 | self.__update()
95 | return self.__info.hash
96 |
97 | def client(self):
98 | return self.__client
99 |
100 | def listener(self):
101 | return self.__listener
102 |
103 | def type(self):
104 | return "Qbit"
105 |
106 | async def cancel_task(self):
107 | self.__update()
108 | await run_sync_to_async(self.__client.torrents_pause, torrent_hashes=self.__info.hash)
109 | if self.status() != MirrorStatus.STATUS_SEEDING:
110 | if not config_dict["NO_TASKS_LOGS"]:
111 | LOGGER.info(f"Cancelling Download: {self.__info.name}")
112 | await sleep(0.3)
113 | await self.__listener.onDownloadError("Download stopped by user!")
114 | await run_sync_to_async(
115 | self.__client.torrents_delete,
116 | torrent_hashes=self.__info.hash,
117 | delete_files=True,
118 | )
119 | await run_sync_to_async(self.__client.torrents_delete_tags, tags=self.__info.tags)
120 | async with qb_listener_lock:
121 | if self.__info.tags in QbTorrents:
122 | del QbTorrents[self.__info.tags]
123 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/rclone_status.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from re import findall
3 | from bot.helper.ext_utils.human_format import get_readable_file_size
4 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
5 |
6 |
7 | class RcloneStatus:
8 | def __init__(self, obj, listener, gid):
9 | self.__obj = obj
10 | self.__gid = gid
11 | self.__percent = 0
12 | self.__speed = 0
13 | self.__transfered_bytes = 0
14 | self.__blank = 0
15 | self.__eta = "-"
16 | self.message = listener.message
17 | self.is_rclone = True
18 |
19 | async def start(self):
20 | while True:
21 | data = (await self.__obj.process.stdout.readline()).decode()
22 | if match := findall("Transferred:.*ETA.*", data):
23 | self.info = match[0].replace("Transferred:", "").strip().split(",")
24 | self.__transfered_bytes = self.info[0]
25 | try:
26 | self.__percent = int(self.info[1].strip("% "))
27 | except:
28 | pass
29 | self.__speed = self.info[2]
30 | self.__eta = self.info[3].replace("ETA", "")
31 | self.__blank = 0
32 | if not match:
33 | self.__blank += 1
34 | if self.__blank == 15:
35 | break
36 | await sleep(0)
37 |
38 | def gid(self):
39 | return self.__gid
40 |
41 | def processed_bytes(self):
42 | return self.__transfered_bytes
43 |
44 | def size(self):
45 | return get_readable_file_size(self.__obj.size)
46 |
47 | def status(self):
48 | if self.__obj.status_type == MirrorStatus.STATUS_UPLOADING:
49 | return MirrorStatus.STATUS_UPLOADING
50 | elif self.__obj.status_type == MirrorStatus.STATUS_COPYING:
51 | return MirrorStatus.STATUS_COPYING
52 | else:
53 | return MirrorStatus.STATUS_DOWNLOADING
54 |
55 | def name(self):
56 | return self.__obj.name
57 |
58 | def progress(self):
59 | return self.__percent
60 |
61 | def speed(self):
62 | return f"{self.__speed}"
63 |
64 | def eta(self):
65 | return self.__eta
66 |
67 | def task(self):
68 | return self.__obj
69 |
70 | def type(self):
71 | return "Rclone"
72 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/split_status.py:
--------------------------------------------------------------------------------
1 | from bot import config_dict, LOGGER
2 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus
3 |
4 |
5 | class SplitStatus:
6 | def __init__(self, name, size, gid, listener):
7 | self.__name = name
8 | self.__gid = gid
9 | self.__size = size
10 | self.__listener = listener
11 | self.message = listener.message
12 |
13 | def gid(self):
14 | return self.__gid
15 |
16 | def progress(self):
17 | return "0"
18 |
19 | def speed(self):
20 | return "0"
21 |
22 | def name(self):
23 | return self.__name
24 |
25 | def size(self):
26 | return get_readable_file_size(self.__size)
27 |
28 | def eta(self):
29 | return "0s"
30 |
31 | def status(self):
32 | return MirrorStatus.STATUS_SPLITTING
33 |
34 | def processed_bytes(self):
35 | return 0
36 |
37 | def task(self):
38 | return self
39 |
40 | async def cancel_task(self):
41 | if not config_dict["NO_TASKS_LOGS"]:
42 | LOGGER.info(f"Cancelling Split: {self.__name}")
43 | if self.__listener.suproc is not None:
44 | self.__listener.suproc.kill()
45 | else:
46 | self.__listener.suproc = "cancelled"
47 | await self.__listener.onUploadError("splitting stopped by user!")
48 |
49 | def type(self):
50 | return "Split"
51 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/status_utils.py:
--------------------------------------------------------------------------------
1 | import time
2 | from math import floor
3 | from functools import partial
4 | from os import listdir, rmdir, walk, path as ospath, remove
5 | from psutil import cpu_percent, virtual_memory
6 | from bot import LOGGER, botUptime, bot_loop
7 | from shutil import disk_usage, rmtree
8 | from bot.helper.ext_utils.human_format import (
9 | human_readable_bytes,
10 | human_readable_timedelta,
11 | )
12 |
13 |
14 | def get_bottom_status():
15 | diff = time.time() - botUptime
16 | diff = human_readable_timedelta(diff)
17 | usage = disk_usage("/")
18 | free = human_readable_bytes(usage.free)
19 | msg = f"\n**CPU:** {cpu_percent()}% | **FREE:** {free}"
20 | msg += f"\n**RAM:** {virtual_memory().percent}% | **UPTIME:** {diff}"
21 | return msg
22 |
23 |
24 | class MirrorStatus:
25 | STATUS_UPLOADING = "Uploading"
26 | STATUS_CLONING = "Cloning"
27 | STATUS_DOWNLOADING = "Downloading"
28 | STATUS_COPYING = "Copying"
29 | STATUS_SYNCING = "Syncing"
30 | STATUS_ARCHIVING = "Archiving 🔐"
31 | STATUS_EXTRACTING = "Extracting 📂"
32 | STATUS_SPLITTING = "Splitting ✂️"
33 | STATUS_QUEUEDL = "QueueDl"
34 | STATUS_QUEUEUP = "QueueUp"
35 | STATUS_PAUSED = "Pause"
36 | STATUS_CHECKING = "CheckUp"
37 | STATUS_SEEDING = "Seed"
38 |
39 |
40 | class TaskType:
41 | RCLONE = "Rclone"
42 | RCLONE_SYNC = "RcloneSync"
43 | TELEGRAM = "Telegram"
44 |
45 |
46 | def get_progress_bar_string(pct):
47 | pct = float(pct.strip("%"))
48 | p = min(max(pct, 0), 100)
49 | cFull = int(p // 8)
50 | p_str = "■" * cFull
51 | p_str += "□" * (12 - cFull)
52 | return f"[{p_str}]"
53 |
54 |
55 | def get_progress_bar_rclone(percentage):
56 | return "{0}{1}".format(
57 | "".join(["■" for i in range(floor(percentage / 10))]),
58 | "".join(["□" for i in range(10 - floor(percentage / 10))]),
59 | )
60 |
61 |
62 | async def clean_unwanted(path: str):
63 | LOGGER.info(f"Cleaning unwanted files/folders: {path}")
64 | for dirpath, subdir, files in await bot_loop.run_in_executor(
65 | None, partial(walk, path, topdown=False)
66 | ):
67 | for filee in files:
68 | if (
69 | filee.endswith(".!qB")
70 | or filee.endswith(".parts")
71 | and filee.startswith(".")
72 | ):
73 | remove(ospath.join(dirpath, filee))
74 | if dirpath.endswith((".unwanted", "splited_files")):
75 | rmtree(dirpath)
76 | for dirpath, subdir, files in await bot_loop.run_in_executor(
77 | None, partial(walk, path, topdown=False)
78 | ):
79 | if not listdir(dirpath):
80 | rmdir(dirpath)
81 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/sync_status.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from re import findall
3 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
4 |
5 |
6 | class SyncStatus:
7 | def __init__(self, process, gid, source, destination, listener):
8 | self.message = listener.message
9 | self.__source = source
10 | self.__destination = destination
11 | self.__process = process
12 | self.__gid = gid
13 | self.__percent = 0
14 | self.__speed = 0
15 | self.__transfered_bytes = 0
16 | self.__eta = "-"
17 | self.is_rclone = True
18 |
19 | async def start(self):
20 | blank = 0
21 | while True:
22 | data = await self.__process.stdout.readline()
23 | match = findall("Transferred:.*ETA.*", data.decode().strip())
24 | if len(match) > 0:
25 | nstr = match[0].replace("Transferred:", "")
26 | self.info = nstr.strip().split(",")
27 | self.__transfered_bytes = self.info[0]
28 | try:
29 | self.__percent = int(self.info[1].strip("% "))
30 | except:
31 | pass
32 | self.__speed = self.info[2]
33 | self.__eta = self.info[3].replace("ETA", "")
34 | if len(match) == 0:
35 | blank += 1
36 | if blank == 15:
37 | break
38 | else:
39 | blank = 0
40 | await sleep(0)
41 |
42 | def gid(self):
43 | return self.__gid
44 |
45 | def processed_bytes(self):
46 | return self.__transfered_bytes
47 |
48 | def status(self):
49 | return MirrorStatus.STATUS_SYNCING
50 |
51 | def name(self):
52 | return f"{self.__source} 🔄 {self.__destination}"
53 |
54 | def size_raw(self):
55 | return 0
56 |
57 | def progress(self):
58 | return self.__percent
59 |
60 | def speed(self):
61 | return self.__speed
62 |
63 | def eta(self):
64 | return self.__eta
65 |
66 | def task(self):
67 | return self.__process
68 |
69 | def type(self):
70 | return "RcloneSync"
71 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/tg_download_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import get_readable_time
2 | from bot.helper.ext_utils.human_format import get_readable_file_size
3 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
4 |
5 |
6 | class TelegramStatus:
7 | def __init__(self, obj, size, message, gid):
8 | self.message = message
9 | self.__obj = obj
10 | self.__size = size
11 | self.__gid = gid
12 |
13 | def gid(self):
14 | return self.__gid
15 |
16 | def processed_bytes(self):
17 | return get_readable_file_size(self.__obj.downloaded_bytes)
18 |
19 | def size(self):
20 | return get_readable_file_size(self.__obj.size)
21 |
22 | def status(self):
23 | return MirrorStatus.STATUS_DOWNLOADING
24 |
25 | def name(self):
26 | return self.__obj.name
27 |
28 | def progress_raw(self):
29 | return self.__obj.progress
30 |
31 | def progress(self):
32 | return f"{round(self.progress_raw(), 2)}%"
33 |
34 | def speed_raw(self):
35 | return self.__obj.download_speed
36 |
37 | def speed(self):
38 | return f"{get_readable_file_size(self.speed_raw())}/s"
39 |
40 | def eta(self):
41 | try:
42 | seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed
43 | return get_readable_time(seconds)
44 | except:
45 | return "-"
46 |
47 | def task(self):
48 | return self.__obj
49 |
50 | def type(self):
51 | return "Telegram"
52 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/tg_upload_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import (
2 | MirrorStatus,
3 | get_readable_file_size,
4 | get_readable_time,
5 | )
6 |
7 |
8 | class TgUploadStatus:
9 | def __init__(self, obj, size, gid, listener):
10 | self.__obj = obj
11 | self.__size = size
12 | self.__gid = gid
13 | self.message = listener.message
14 |
15 | def processed_bytes(self):
16 | return get_readable_file_size(self.__obj.processed_bytes)
17 |
18 | def size(self):
19 | return get_readable_file_size(self.__size)
20 |
21 | def status(self):
22 | return MirrorStatus.STATUS_UPLOADING
23 |
24 | def name(self):
25 | return self.__obj.name
26 |
27 | def progress_raw(self):
28 | try:
29 | return self.__obj.processed_bytes / self.__size * 100
30 | except ZeroDivisionError:
31 | return 0
32 |
33 | def progress(self):
34 | return f"{round(self.progress_raw(), 2)}%"
35 |
36 | def speed(self):
37 | return f"{get_readable_file_size(self.__obj.speed)}/s"
38 |
39 | def eta(self):
40 | try:
41 | seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed
42 | return get_readable_time(seconds)
43 | except:
44 | return "-"
45 |
46 | def gid(self) -> str:
47 | return self.__gid
48 |
49 | def task(self):
50 | return self.__obj
51 |
52 | def type(self):
53 | return "Telegram"
54 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/yt_dlp_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import (
2 | MirrorStatus,
3 | get_readable_file_size,
4 | get_readable_time,
5 | run_async_to_sync,
6 | )
7 | from bot.helper.ext_utils.misc_utils import get_path_size
8 |
9 |
10 | class YtDlpDownloadStatus:
11 | def __init__(self, obj, listener, gid):
12 | self.__obj = obj
13 | self.__listener = listener
14 | self.__gid = gid
15 | self.message = listener.message
16 |
17 | def gid(self):
18 | return self.__gid
19 |
20 | def processed_bytes(self):
21 | return get_readable_file_size(self.processed_raw())
22 |
23 | def processed_raw(self):
24 | if self.__obj.downloaded_bytes != 0:
25 | return self.__obj.downloaded_bytes
26 | else:
27 | return run_async_to_sync(get_path_size, self.__listener.dir)
28 |
29 | def size(self):
30 | return get_readable_file_size(self.__obj.size)
31 |
32 | def status(self):
33 | return MirrorStatus.STATUS_DOWNLOADING
34 |
35 | def name(self):
36 | return self.__obj.name
37 |
38 | def progress(self):
39 | return f"{round(self.__obj.progress, 2)}%"
40 |
41 | def speed(self):
42 | return f"{get_readable_file_size(self.__obj.download_speed)}/s"
43 |
44 | def eta(self):
45 | if self.__obj.eta != "-":
46 | return get_readable_time(self.__obj.eta)
47 | try:
48 | seconds = (
49 | self.__obj.size - self.processed_raw()
50 | ) / self.__obj.download_speed
51 | return get_readable_time(seconds)
52 | except:
53 | return "-"
54 |
55 | def task(self):
56 | return self.__obj
57 |
58 | def type(self):
59 | return "Ytdl"
60 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/status_utils/zip_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from bot import LOGGER, config_dict
3 | from bot.helper.ext_utils.bot_utils import (
4 | get_readable_file_size,
5 | MirrorStatus,
6 | get_readable_time,
7 | run_async_to_sync,
8 | )
9 | from bot.helper.ext_utils.misc_utils import get_path_size
10 |
11 |
12 | class ZipStatus:
13 | def __init__(self, name, size, gid, listener):
14 | self.__name = name
15 | self.__size = size
16 | self.__gid = gid
17 | self.__listener = listener
18 | self.__start_time = time()
19 | self.message = listener.message
20 |
21 | def gid(self):
22 | return self.__gid
23 |
24 | def speed_raw(self):
25 | return self.processed_raw() / (time() - self.__start_time)
26 |
27 | def progress_raw(self):
28 | try:
29 | return self.processed_raw() / self.__size * 100
30 | except:
31 | return 0
32 |
33 | def progress(self):
34 | return f"{round(self.progress_raw(), 2)}%"
35 |
36 | def speed(self):
37 | return f"{get_readable_file_size(self.speed_raw())}/s"
38 |
39 | def name(self):
40 | return self.__name
41 |
42 | def size(self):
43 | return get_readable_file_size(self.__size)
44 |
45 | def eta(self):
46 | try:
47 | seconds = (self.__size - self.processed_raw()) / self.speed_raw()
48 | return get_readable_time(seconds)
49 | except:
50 | return "-"
51 |
52 | def status(self):
53 | return MirrorStatus.STATUS_ARCHIVING
54 |
55 | def processed_raw(self):
56 | if self.__listener.newDir:
57 | return run_async_to_sync(get_path_size, self.__listener.newDir)
58 | else:
59 | return run_async_to_sync(get_path_size, self.__listener.dir) - self.__size
60 |
61 | def processed_bytes(self):
62 | return get_readable_file_size(self.processed_raw())
63 |
64 | def task(self):
65 | return self
66 |
67 | async def cancel_task(self):
68 | if not config_dict["NO_TASKS_LOGS"]:
69 | LOGGER.info(f"Cancelling Archive: {self.__name}")
70 | if self.__listener.suproc is not None:
71 | self.__listener.suproc.kill()
72 | else:
73 | self.__listener.suproc = "cancelled"
74 | await self.__listener.onUploadError("archiving stopped by user!")
75 |
76 | def type(self):
77 | return "Zip"
78 |
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/upload_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/helper/mirror_leech_utils/upload_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_leech_utils/upload_utils/rclone_mirror.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from asyncio.subprocess import PIPE
3 | from random import SystemRandom
4 | from os import path as ospath, remove as osremove, walk
5 | from string import ascii_letters, digits
6 | from bot import (
7 | GLOBAL_EXTENSION_FILTER,
8 | status_dict,
9 | status_dict_lock,
10 | remotes_multi,
11 | config_dict,
12 | )
13 | from bot.helper.telegram_helper.filters import CustomFilters
14 | from bot.helper.ext_utils.human_format import get_readable_file_size
15 | from bot.helper.telegram_helper.message_utils import sendStatusMessage
16 | from bot.helper.ext_utils.misc_utils import clean_download, count_files_and_folders
17 | from bot.helper.ext_utils.rclone_data_holder import get_rclone_data
18 | from bot.helper.ext_utils.rclone_utils import (
19 | get_rclone_path,
20 | is_gdrive_remote,
21 | setRcloneFlags,
22 | )
23 | from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus
24 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
25 |
26 |
27 | class RcloneMirror:
28 | def __init__(self, path, name, size, user_id, listener):
29 | self.__path = path
30 | self.__listener = listener
31 | self.message = self.__listener.message
32 | self.__user_id = user_id
33 | self.name = name
34 | self.size = size
35 | self.process = None
36 | self.__is_cancelled = False
37 | self.status_type = MirrorStatus.STATUS_UPLOADING
38 |
39 | async def mirror(self):
40 | await self.delete_files_with_extensions()
41 |
42 | if ospath.isdir(self.__path):
43 | mime_type = "Folder"
44 | else:
45 | mime_type = "File"
46 |
47 | conf_path = await get_rclone_path(self.__user_id, self.message)
48 | folder_name = self.name.replace(".", "")
49 | is_multi_remote_up = config_dict["MULTI_REMOTE_UP"]
50 | is_sudo_filter = CustomFilters.sudo_filter("", self.message)
51 |
52 | if config_dict["MULTI_RCLONE_CONFIG"] or is_sudo_filter:
53 | if is_multi_remote_up and len(remotes_multi) > 0:
54 | for rc_remote in remotes_multi:
55 | await self.upload(self.__path, conf_path, mime_type, rc_remote)
56 | await clean_download(self.__path)
57 | return
58 |
59 | rc_remote = get_rclone_data("MIRROR_SELECT_REMOTE", self.__user_id)
60 | base_dir = get_rclone_data("MIRROR_SELECT_BASE_DIR", self.__user_id)
61 |
62 | await self.upload(
63 | self.__path, conf_path, mime_type, rc_remote, folder_name, base_dir
64 | )
65 | else:
66 | DEFAULT_GLOBAL_REMOTE = config_dict["DEFAULT_GLOBAL_REMOTE"]
67 | if DEFAULT_GLOBAL_REMOTE:
68 | await self.upload(
69 | self.__path,
70 | conf_path,
71 | mime_type,
72 | DEFAULT_GLOBAL_REMOTE,
73 | folder_name,
74 | )
75 | else:
76 | await self.__listener.onUploadError("DEFAULT_GLOBAL_REMOTE not found")
77 | return
78 |
79 | async def upload(
80 | self, path, conf_path, mime_type, remote, folder_name, base_dir=""
81 | ):
82 | if mime_type == "Folder":
83 | self.name = folder_name
84 | if base_dir:
85 | rclone_path = f"{remote}:{base_dir}{folder_name}"
86 | else:
87 | rclone_path = f"{remote}:/{folder_name}"
88 | else:
89 | if base_dir:
90 | rclone_path = f"{remote}:{base_dir}"
91 | else:
92 | rclone_path = f"{remote}:/"
93 |
94 | cmd = [
95 | "rclone",
96 | "copy",
97 | f"--config={conf_path}",
98 | f"{path}",
99 | rclone_path,
100 | "-P",
101 | ]
102 |
103 | is_gdrive = is_gdrive_remote(remote, conf_path)
104 | await setRcloneFlags(cmd, "upload")
105 |
106 | if ospath.isdir(path):
107 | folders, files = await count_files_and_folders(path)
108 | else:
109 | if path.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)):
110 | await self.__listener.onUploadError(
111 | "This file extension is excluded by extension filter!"
112 | )
113 | return
114 | folders = 0
115 | files = 1
116 |
117 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=10))
118 | async with status_dict_lock:
119 | status = RcloneStatus(self, self.__listener, gid)
120 | status_dict[self.__listener.uid] = status
121 | await sendStatusMessage(self.message)
122 |
123 | self.process = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
124 | await status.start()
125 | return_code = await self.process.wait()
126 | if self.__is_cancelled:
127 | return
128 | if return_code == 0:
129 | size = get_readable_file_size(self.size)
130 |
131 | await self.__listener.onUploadComplete(
132 | None,
133 | size,
134 | files,
135 | folders,
136 | mime_type,
137 | self.name,
138 | rclone_config=conf_path,
139 | rclone_path=rclone_path,
140 | is_gdrive=is_gdrive,
141 | )
142 | else:
143 | error = (await self.process.stderr.read()).decode().strip()
144 | await self.__listener.onUploadError(f"Error: {error}!")
145 |
146 | async def delete_files_with_extensions(self):
147 | for dirpath, _, files in walk(self.__path):
148 | for file in files:
149 | if file.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)):
150 | try:
151 | del_file = ospath.join(dirpath, file)
152 | osremove(del_file)
153 | except:
154 | return
155 |
156 | async def cancel_task(self):
157 | self.__is_cancelled = True
158 | if self.process is not None:
159 | try:
160 | self.process.kill()
161 | except:
162 | pass
163 | await self.__listener.onUploadError("Upload cancelled!")
164 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/bot_commands.py:
--------------------------------------------------------------------------------
1 | from bot import CMD_INDEX
2 |
3 |
4 | class _BotCommands:
5 | def __init__(self):
6 | self.StartCommand = f"start{CMD_INDEX}"
7 | self.MirrorCommand = [f"mirror{CMD_INDEX}", f"m{CMD_INDEX}"]
8 | self.MirrorBatchCommand = [f"mirror_batch{CMD_INDEX}", f"mb{CMD_INDEX}"]
9 | self.MirrorSelectCommand = [f"mirror_select{CMD_INDEX}", f"ms{CMD_INDEX}"]
10 | self.LeechCommand = [f"leech{CMD_INDEX}", f"l{CMD_INDEX}"]
11 | self.LeechBatchCommand = [f"leech_batch{CMD_INDEX}", f"lb{CMD_INDEX}"]
12 | self.YtdlMirrorCommand = [f"ytdl{CMD_INDEX}", f"y{CMD_INDEX}"]
13 | self.YtdlLeechCommand = [f"ytdl_leech{CMD_INDEX}", f"yl{CMD_INDEX}"]
14 | self.BotFilesCommand = [f"files{CMD_INDEX}", f"bf{CMD_INDEX}"]
15 | self.CopyCommand = f"copy{CMD_INDEX}"
16 | self.CloneCommand = f"clone{CMD_INDEX}"
17 | self.CountCommand = f"count{CMD_INDEX}"
18 | self.TorrentSearchCommand = f"torrsch{CMD_INDEX}"
19 | self.StatusCommand = f"status{CMD_INDEX}"
20 | self.StatsCommand = f"stats{CMD_INDEX}"
21 | self.ShellCommand = f"shell{CMD_INDEX}"
22 | self.ExecCommand = f"exec{CMD_INDEX}"
23 | self.ServeCommand = f"serve{CMD_INDEX}"
24 | self.SyncCommand = f"sync{CMD_INDEX}"
25 | self.RcfmCommand = f"rcfm{CMD_INDEX}"
26 | self.StorageCommand = f"storage{CMD_INDEX}"
27 | self.CleanupCommand = f"cleanup{CMD_INDEX}"
28 | self.BiSyncCommand = f"bisync{CMD_INDEX}"
29 | self.UserSetCommand = f"user_setting{CMD_INDEX}"
30 | self.OwnerSetCommand = f"own_setting{CMD_INDEX}"
31 | self.CancelAllCommand = f"cancel_all{CMD_INDEX}"
32 | self.CancelCommand = f"cancel{CMD_INDEX}"
33 | self.RssCommand = f"rss{CMD_INDEX}"
34 | self.LogsCommand = f"log{CMD_INDEX}"
35 | self.RestartCommand = f"restart{CMD_INDEX}"
36 | self.PingCommand = f"ping{CMD_INDEX}"
37 | self.IpCommand = f"ip{CMD_INDEX}"
38 | self.TMDB = f"tmdb{CMD_INDEX}"
39 | self.Debrid = f"debrid{CMD_INDEX}"
40 | self.DebridInfo = f"info{CMD_INDEX}"
41 |
42 |
43 | BotCommands = _BotCommands()
44 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/button_build.py:
--------------------------------------------------------------------------------
1 | from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
2 |
3 |
4 | class ButtonMaker:
5 | def __init__(self):
6 | self.first_button = []
7 | self.__header_button = []
8 | self.__footer_button = []
9 | self.__footer_second_button = []
10 | self.__footer_third_button = []
11 |
12 | def url_buildbutton(self, key, link):
13 | self.first_button.append(InlineKeyboardButton(text=key, url=link))
14 |
15 | def cb_buildbutton(self, key, data, position=None):
16 | if not position:
17 | self.first_button.append(InlineKeyboardButton(text=key, callback_data=data))
18 | elif position == "header":
19 | self.__header_button.append(
20 | InlineKeyboardButton(text=key, callback_data=data)
21 | )
22 | elif position == "footer":
23 | self.__footer_button.append(
24 | InlineKeyboardButton(text=key, callback_data=data)
25 | )
26 | elif position == "footer_second":
27 | self.__footer_second_button.append(
28 | InlineKeyboardButton(text=key, callback_data=data)
29 | )
30 | elif position == "footer_third":
31 | self.__footer_third_button.append(
32 | InlineKeyboardButton(text=key, callback_data=data)
33 | )
34 |
35 | def build_menu(self, n_cols=1):
36 | menu = [
37 | self.first_button[i : i + n_cols]
38 | for i in range(0, len(self.first_button), n_cols)
39 | ]
40 | if self.__header_button:
41 | menu.insert(0, self.__header_button)
42 | if self.__footer_button:
43 | if len(self.__footer_button) > 8:
44 | [
45 | menu.append(self.__footer_button[i : i + 8])
46 | for i in range(0, len(self.__footer_button), 8)
47 | ]
48 | else:
49 | menu.append(self.__footer_button)
50 | if self.__footer_second_button:
51 | menu.append(self.__footer_second_button)
52 | if self.__footer_third_button:
53 | menu.append(self.__footer_third_button)
54 | return InlineKeyboardMarkup(menu)
55 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/filters.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import create
2 | from bot import OWNER_ID, user_data
3 |
4 |
5 | class CustomFilters:
6 | async def custom_owner_filter(self, client, update):
7 | uid = update.from_user.id or update.sender_chat.id
8 | return uid == OWNER_ID
9 |
10 | owner_filter = create(custom_owner_filter)
11 |
12 | async def custom_chat_filter(self, client, update):
13 | chat_id = update.chat.id
14 | return chat_id in user_data and user_data[chat_id].get("is_auth", False)
15 |
16 | chat_filter = create(custom_chat_filter)
17 |
18 | async def custom_user_filter(self, client, update):
19 | uid = update.from_user.id or update.sender_chat.id
20 | return (
21 | uid == OWNER_ID
22 | or uid in user_data
23 | and (
24 | user_data[uid].get("is_auth", False)
25 | or user_data[uid].get("is_sudo", False)
26 | )
27 | )
28 |
29 | user_filter = create(custom_user_filter)
30 |
31 | async def custom_sudo_filter(self, client, update):
32 | uid = update.from_user.id or update.sender_chat.id
33 | return bool(uid == OWNER_ID or uid in user_data and user_data[uid].get("is_sudo"))
34 |
35 | sudo_filter = create(custom_sudo_filter)
36 |
37 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/message_utils.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from time import time
3 | from bot import (
4 | LOGGER,
5 | bot,
6 | Interval,
7 | app,
8 | config_dict,
9 | status_reply_dict_lock,
10 | status_reply_dict,
11 | )
12 | from pyrogram.errors.exceptions import FloodWait, MessageNotModified
13 | from pyrogram.enums.parse_mode import ParseMode
14 | from bot.helper.ext_utils.bot_utils import get_readable_message, run_sync_to_async, setInterval
15 |
16 |
17 | async def sendMessage(text: str, message, reply_markup=None):
18 | try:
19 | return await bot.send_message(
20 | message.chat.id,
21 | reply_to_message_id=message.id,
22 | text=text,
23 | reply_markup=reply_markup,
24 | disable_web_page_preview=True,
25 | )
26 | except FloodWait as fw:
27 | await sleep(fw.value * 1.2)
28 | return await sendMessage(text, message, reply_markup)
29 | except Exception as e:
30 | LOGGER.error(str(e))
31 |
32 |
33 | async def sendPhoto(text, message, path, reply_markup):
34 | try:
35 | return await bot.send_photo(
36 | chat_id=message.chat.id,
37 | reply_to_message_id=message.id,
38 | photo=path,
39 | caption=text,
40 | reply_markup=reply_markup,
41 | )
42 | except FloodWait as fw:
43 | await sleep(fw.value * 1.2)
44 | return await sendPhoto(text, message, path, reply_markup)
45 | except Exception as e:
46 | LOGGER.error(str(e))
47 |
48 |
49 | async def sendMarkup(text: str, message, reply_markup):
50 | try:
51 | return await bot.send_message(
52 | message.chat.id,
53 | reply_to_message_id=message.id,
54 | text=text,
55 | reply_markup=reply_markup,
56 | )
57 | except FloodWait as fw:
58 | await sleep(fw.value * 1.2)
59 | return await sendMarkup(text, message, reply_markup)
60 | except Exception as e:
61 | LOGGER.error(str(e))
62 |
63 |
64 | async def editMarkup(text: str, message, reply_markup):
65 | try:
66 | return await bot.edit_message_text(
67 | message.chat.id, message.id, text=text, reply_markup=reply_markup
68 | )
69 | except FloodWait as fw:
70 | await sleep(fw.value * 1.2)
71 | return await editMarkup(text, message, reply_markup)
72 | except MessageNotModified:
73 | await sleep(1)
74 | except Exception as e:
75 | LOGGER.error(str(e))
76 |
77 |
78 | async def editMessage(text: str, message, reply_markup=None):
79 | try:
80 | return await bot.edit_message_text(
81 | text=text,
82 | message_id=message.id,
83 | disable_web_page_preview=True,
84 | chat_id=message.chat.id,
85 | reply_markup=reply_markup,
86 | )
87 | except FloodWait as fw:
88 | await sleep(fw.value * 1.2)
89 | return await editMessage(text, message, reply_markup)
90 | except MessageNotModified:
91 | await sleep(1)
92 | except Exception as e:
93 | LOGGER.error(str(e))
94 | return str(e)
95 |
96 |
97 | async def sendRss(text):
98 | try:
99 | if app:
100 | return await app.send_message(
101 | chat_id=config_dict["RSS_CHAT_ID"],
102 | text=text,
103 | disable_web_page_preview=True,
104 | disable_notification=True,
105 | )
106 | else:
107 | return await bot.send_message(
108 | chat_id=config_dict["RSS_CHAT_ID"],
109 | text=text,
110 | disable_web_page_preview=True,
111 | disable_notification=True,
112 | )
113 | except FloodWait as f:
114 | LOGGER.warning(str(f))
115 | await sleep(f.value * 1.2)
116 | return await sendRss(text)
117 | except Exception as e:
118 | LOGGER.error(str(e))
119 | return str(e)
120 |
121 |
122 | async def deleteMessage(message):
123 | try:
124 | await bot.delete_messages(chat_id=message.chat.id, message_ids=message.id)
125 | except Exception as e:
126 | LOGGER.error(str(e))
127 |
128 |
129 | async def sendFile(message, file, caption=""):
130 | try:
131 | return await bot.send_document(
132 | document=file,
133 | reply_to_message_id=message.id,
134 | caption=caption,
135 | parse_mode=ParseMode.HTML,
136 | chat_id=message.chat.id,
137 | )
138 | except FloodWait as fw:
139 | await sleep(fw.value * 1.2)
140 | return await sendFile(message, file, caption)
141 | except Exception as e:
142 | LOGGER.error(str(e))
143 | return
144 |
145 |
146 | async def delete_all_messages():
147 | async with status_reply_dict_lock:
148 | for key, data in list(status_reply_dict.items()):
149 | try:
150 | del status_reply_dict[key]
151 | await deleteMessage(data[0])
152 | except Exception as e:
153 | LOGGER.error(str(e))
154 |
155 |
156 | async def update_all_messages(force=False):
157 | async with status_reply_dict_lock:
158 | if (
159 | not status_reply_dict
160 | or not Interval
161 | or (not force and time() - list(status_reply_dict.values())[0][1] < 3)
162 | ):
163 | return
164 | for chat_id in list(status_reply_dict.keys()):
165 | status_reply_dict[chat_id][1] = time()
166 | msg, buttons = await run_sync_to_async(get_readable_message)
167 | if msg is None:
168 | return
169 | async with status_reply_dict_lock:
170 | for chat_id in list(status_reply_dict.keys()):
171 | if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id][0].text:
172 | rmsg = await editMessage(msg, status_reply_dict[chat_id][0], buttons)
173 | if isinstance(rmsg, str) and rmsg.startswith("Telegram says: [400"):
174 | del status_reply_dict[chat_id]
175 | continue
176 | status_reply_dict[chat_id][0].text = msg
177 | status_reply_dict[chat_id][1] = time()
178 |
179 |
180 | async def sendStatusMessage(msg):
181 | progress, buttons = await run_sync_to_async(get_readable_message)
182 | if progress is None:
183 | return
184 | async with status_reply_dict_lock:
185 | chat_id = msg.chat.id
186 | if chat_id in list(status_reply_dict.keys()):
187 | message = status_reply_dict[chat_id][0]
188 | await deleteMessage(message)
189 | del status_reply_dict[chat_id]
190 | message = await sendMarkup(progress, msg, buttons)
191 | message.text = progress
192 | status_reply_dict[chat_id] = [message, time()]
193 | if not Interval:
194 | Interval.append(
195 | setInterval(config_dict["STATUS_UPDATE_INTERVAL"], update_all_messages)
196 | )
197 |
198 |
199 | async def auto_delete_message(cmd_message=None, bot_message=None):
200 | if config_dict["AUTO_DELETE_MESSAGE_DURATION"] != -1:
201 | await sleep(config_dict["AUTO_DELETE_MESSAGE_DURATION"])
202 | if cmd_message is not None:
203 | await deleteMessage(cmd_message)
204 | if bot_message is not None:
205 | await deleteMessage(bot_message)
206 |
--------------------------------------------------------------------------------
/bot/modules/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sam-Max/rcmltb/49571f29c02f948c2327f1bc5aebbb93b352ef44/bot/modules/__init__.py
--------------------------------------------------------------------------------
/bot/modules/bisync.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import command, regex
2 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
3 | from asyncio.subprocess import PIPE, create_subprocess_exec as exec
4 | from bot import bot, config_dict
5 | from bot.helper.ext_utils.menu_utils import Menus
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 | from bot.helper.ext_utils.rclone_utils import (
9 | get_rclone_path,
10 | is_rclone_config,
11 | list_remotes,
12 | )
13 | from bot.helper.telegram_helper.message_utils import editMarkup, sendMessage
14 | from bot.helper.telegram_helper.button_build import ButtonMaker
15 |
16 |
17 | sync_dict = {}
18 |
19 |
20 | async def handle_bisync(client, message):
21 | user_id = message.from_user.id
22 | if await is_rclone_config(user_id, message):
23 | await list_remotes(message, menu_type=Menus.SYNC, remote_type="origin")
24 | # msg= "Select origin cloud"
25 | # msg+= "\n\nNote: Bisync check for changes on each side and propagate changes on Origin to Destination, and vice-versa."
26 |
27 |
28 | async def bysync_cb(client, callbackQuery):
29 | query = callbackQuery
30 | data = query.data
31 | data = data.split("^")
32 | message = query.message
33 | user_id = query.from_user.id
34 | path = await get_rclone_path(user_id, message)
35 |
36 | if data[1] == "origin":
37 | await query.answer()
38 | sync_dict["origin"] = data[2]
39 | await list_remotes(message, menu_type=Menus.SYNC, remote_type="destination")
40 | elif data[1] == "destination":
41 | await query.answer()
42 | sync_dict["destination"] = data[2]
43 | await start_bisync(message, path)
44 | else:
45 | await query.answer()
46 | await message.delete()
47 |
48 |
49 | async def start_bisync(message, path):
50 | origin = sync_dict["origin"]
51 | destination = sync_dict["destination"]
52 | if config_dict["SERVER_SIDE"]:
53 | cmd = [
54 | "rclone",
55 | "bisync ",
56 | "--server-side-across-configs",
57 | "--remove-empty-dirs",
58 | "--resync",
59 | f"--config={path}",
60 | f"{origin}:",
61 | f"{destination}:",
62 | ]
63 | else:
64 | cmd = [
65 | "rclone",
66 | "bisync",
67 | "--remove-empty-dirs",
68 | "--resync",
69 | f"--config={path}",
70 | f"{origin}:",
71 | f"{destination}:",
72 | ]
73 | process = await exec(*cmd, stdout=PIPE, stderr=PIPE)
74 | button = ButtonMaker()
75 | msg = f"Syncing: {origin} 🔄 {destination}"
76 | button.cb_buildbutton("Stop", "bisync^stop")
77 | await editMarkup(msg, message, button.build_menu(1))
78 | return_code = await process.wait()
79 | if return_code != 0:
80 | err = await process.stderr.read()
81 | msg = f"Error: {err}"
82 | await sendMessage(msg, message)
83 | await message.delete()
84 |
85 |
86 | bot.add_handler(MessageHandler(
87 | handle_bisync,
88 | filters=command(BotCommands.BiSyncCommand)
89 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
90 | ))
91 | bot.add_handler(CallbackQueryHandler(bysync_cb, filters=regex("bisyncmenu")))
92 |
--------------------------------------------------------------------------------
/bot/modules/cancel.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from bot.modules.queue import queue
3 | from pyrogram.filters import regex
4 | from bot import (
5 | status_dict_lock,
6 | OWNER_ID,
7 | bot,
8 | status_dict,
9 | user_data,
10 | )
11 | from bot.helper.telegram_helper.bot_commands import BotCommands
12 | from bot.helper.telegram_helper.filters import CustomFilters
13 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
14 | from pyrogram import filters
15 | from bot.helper.telegram_helper.message_utils import sendMarkup, sendMessage
16 | from bot.helper.telegram_helper.button_build import ButtonMaker
17 | from bot.helper.ext_utils.misc_utils import getAllTasks, getTaskByGid
18 | from bot.helper.mirror_leech_utils.status_utils.status_utils import MirrorStatus
19 |
20 |
21 | async def cancel_task(client, message):
22 | user_id = message.from_user.id if message.from_user else message.sender_chat.id
23 | msg = message.text.split()
24 | if len(msg) > 1:
25 | gid = msg[1]
26 | task = await getTaskByGid(gid)
27 | if task is None:
28 | await sendMessage(f"GID: {gid}
Not Found.", message)
29 | return
30 | elif reply_to_id := message.reply_to_message_id:
31 | async with status_dict_lock:
32 | task = status_dict.get(reply_to_id, None)
33 | if task is None:
34 | await sendMessage(message, "This is not an active task!")
35 | return
36 | elif len(msg) == 1:
37 | msg = (
38 | "Reply to an active Command message which was used to start the download"
39 | f" or send /{BotCommands.CancelCommand} GID
to cancel it!"
40 | )
41 | await sendMessage(message, msg)
42 | return
43 |
44 | if (
45 | OWNER_ID != user_id
46 | and task.message.from_user.id != user_id
47 | and (user_id not in user_data or not user_data[user_id].get("is_sudo"))
48 | ):
49 | await sendMessage("This is not for you!", message)
50 | return
51 |
52 | obj = task.task()
53 | if task.type() == "RcloneSync":
54 | obj.kill
55 | else:
56 | await obj.cancel_task()
57 |
58 |
59 | async def cancell_all_buttons(client, message):
60 | async with status_dict_lock:
61 | count = len(status_dict)
62 | if count == 0:
63 | await sendMessage("No active tasks", message)
64 | return
65 | buttons = ButtonMaker()
66 | buttons.cb_buildbutton("Downloading", f"canall {MirrorStatus.STATUS_DOWNLOADING}")
67 | buttons.cb_buildbutton("Uploading", f"canall {MirrorStatus.STATUS_UPLOADING}")
68 | buttons.cb_buildbutton("Seeding", f"canall {MirrorStatus.STATUS_SEEDING}")
69 | buttons.cb_buildbutton("Cloning", f"canall {MirrorStatus.STATUS_CLONING}")
70 | buttons.cb_buildbutton("Splitting", f"canall {MirrorStatus.STATUS_SPLITTING}")
71 | buttons.cb_buildbutton("Extracting", f"canall {MirrorStatus.STATUS_EXTRACTING}")
72 | buttons.cb_buildbutton("Archiving", f"canall {MirrorStatus.STATUS_ARCHIVING}")
73 | buttons.cb_buildbutton("QueuedDl", f"canall {MirrorStatus.STATUS_QUEUEDL}")
74 | buttons.cb_buildbutton("QueuedUp", f"canall {MirrorStatus.STATUS_QUEUEUP}")
75 | buttons.cb_buildbutton("Paused", f"canall {MirrorStatus.STATUS_PAUSED}")
76 | buttons.cb_buildbutton("All", "canall all")
77 | buttons.cb_buildbutton("Close", "canall close")
78 | await sendMarkup("Choose tasks to cancel.", message, buttons.build_menu(2))
79 |
80 |
81 | async def cancel_all_update(client, query):
82 | message = query.message
83 | data = query.data.split()
84 | await query.answer()
85 | if data[1] == "close":
86 | await query.message.delete()
87 | else:
88 | res = await cancel_all_(data[1])
89 | if not res:
90 | await sendMessage(f"No matching tasks for {data[1]}!", message)
91 |
92 |
93 | async def cancel_all_(status):
94 | tasks = await getAllTasks(status)
95 | if not tasks:
96 | return False
97 | for task in tasks:
98 | obj = task.task()
99 | await obj.cancel_task()
100 | await sleep(2)
101 | return True
102 |
103 |
104 | bot.add_handler(
105 | MessageHandler(
106 | cancel_task,
107 | filters.command(BotCommands.CancelCommand)
108 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
109 | )
110 | )
111 | bot.add_handler(
112 | MessageHandler(
113 | cancell_all_buttons,
114 | filters=filters.command(BotCommands.CancelAllCommand)
115 | & (CustomFilters.owner_filter | CustomFilters.sudo_filter),
116 | )
117 | )
118 | bot.add_handler(
119 | CallbackQueryHandler(
120 | cancel_all_update,
121 | filters=regex("canall")
122 | & (CustomFilters.owner_filter | CustomFilters.sudo_filter),
123 | )
124 | )
125 |
--------------------------------------------------------------------------------
/bot/modules/cleanup.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import command, regex
2 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
3 | from asyncio.subprocess import PIPE, create_subprocess_exec as exec
4 | from bot import bot
5 | from bot.helper.ext_utils.menu_utils import Menus
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 | from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
9 | from bot.helper.ext_utils.rclone_utils import (
10 | get_rclone_path,
11 | is_rclone_config,
12 | list_remotes,
13 | )
14 |
15 |
16 | async def cleanup(client, message):
17 | if await is_rclone_config(message.from_user.id, message):
18 | await list_remotes(message, menu_type=Menus.CLEANUP)
19 |
20 |
21 | async def cleanup_callback(client, callback_query):
22 | query = callback_query
23 | data = query.data
24 | cmd = data.split("^")
25 | message = query.message
26 | tag = f"@{message.reply_to_message.from_user.username}"
27 | user_id = query.from_user.id
28 |
29 | if int(cmd[-1]) != user_id:
30 | await query.answer("This menu is not for you!", show_alert=True)
31 | return
32 | if cmd[1] == "remote":
33 | await rclone_cleanup(message, cmd[2], user_id, tag)
34 | elif cmd[1] == "back":
35 | await list_remotes(message, menu_type="cleanupmenu", edit=True)
36 | await query.answer()
37 | else:
38 | await query.answer()
39 | await message.delete()
40 |
41 |
42 | async def rclone_cleanup(message, remote_name, user_id, tag):
43 | conf_path = await get_rclone_path(user_id, message)
44 | msg = "**⏳Cleaning remote trash**\n"
45 | msg += "\nIt may take some time depending on number of files"
46 | edit_msg = await editMessage(msg, message)
47 | cmd = ["rclone", "cleanup", f"--config={conf_path}", f"{remote_name}:"]
48 | process = await exec(*cmd, stdout=PIPE, stderr=PIPE)
49 | stdout, stderr = await process.communicate()
50 | return_code = await process.wait()
51 | stdout = stdout.decode().strip()
52 | if return_code != 0:
53 | err = stderr.decode().strip()
54 | await sendMessage(f"Error: {err}", message)
55 | else:
56 | msg = "Trash successfully cleaned ✅\n"
57 | msg += f"cc: {tag}\n"
58 | await editMessage(msg, edit_msg)
59 |
60 |
61 | bot.add_handler(MessageHandler(
62 | cleanup,
63 | filters=command(BotCommands.CleanupCommand)
64 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
65 | ))
66 | bot.add_handler(CallbackQueryHandler(cleanup_callback, filters=regex("cleanupmenu")))
67 |
--------------------------------------------------------------------------------
/bot/modules/clone.py:
--------------------------------------------------------------------------------
1 | from argparse import ArgumentParser
2 | from asyncio import sleep
3 | from random import SystemRandom
4 | from string import ascii_letters, digits
5 | from bot.helper.ext_utils.human_format import get_readable_file_size
6 | from bot.helper.mirror_leech_utils.gd_utils.clone import gdClone
7 | from bot.helper.mirror_leech_utils.gd_utils.count import gdCount
8 | from pyrogram import filters
9 | from pyrogram.handlers import MessageHandler
10 | from bot import bot, LOGGER, status_dict, status_dict_lock, config_dict
11 | from bot.helper.mirror_leech_utils.download_utils import direct_link_generator
12 | from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
13 | from bot.helper.ext_utils.help_messages import CLONE_HELP_MESSAGE
14 | from bot.helper.telegram_helper.bot_commands import BotCommands
15 | from bot.helper.ext_utils.bot_utils import (
16 | is_gdrive_id,
17 | is_gdrive_link,
18 | is_share_link,
19 | new_task,
20 | run_sync_to_async,
21 | )
22 | from bot.helper.telegram_helper.filters import CustomFilters
23 | from bot.helper.telegram_helper.message_utils import (
24 | deleteMessage,
25 | sendMessage,
26 | sendStatusMessage,
27 | )
28 | from bot.helper.mirror_leech_utils.status_utils.clone_status import CloneStatus
29 | from bot.modules.tasks_listener import TaskListener
30 |
31 |
32 | async def clone(client, message):
33 | message_list = message.text.split()
34 | user = message.from_user or message.sender_chat
35 |
36 | try:
37 | args = parser.parse_args(message_list[1:])
38 | except Exception:
39 | await sendMessage(CLONE_HELP_MESSAGE, message)
40 | return
41 |
42 | multi = args.multi
43 | link = " ".join(args.link)
44 |
45 | if username := user.username:
46 | tag = f"@{username}"
47 | else:
48 | tag = message.from_user.mention
49 |
50 | if not link and (reply_to := message.reply_to_message):
51 | link = reply_to.text.split("\n", 1)[0].strip()
52 |
53 | @new_task
54 | async def __run_multi():
55 | if multi > 1:
56 | await sleep(5)
57 | msg = [s.strip() for s in message_list]
58 | index = msg.index("-i")
59 | msg[index + 1] = f"{multi - 1}"
60 | nextmsg = await client.get_messages(
61 | chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1
62 | )
63 | nextmsg = await sendMessage(" ".join(msg), nextmsg)
64 | nextmsg = await client.get_messages(
65 | chat_id=message.chat.id, message_ids=nextmsg.id
66 | )
67 | nextmsg.from_user = message.from_user
68 | await sleep(5)
69 | await clone(client, nextmsg)
70 |
71 | __run_multi()
72 |
73 | if not link:
74 | await sendMessage(CLONE_HELP_MESSAGE, message)
75 | return
76 |
77 | if is_share_link(link):
78 | try:
79 | link = await run_sync_to_async(direct_link_generator, link)
80 | LOGGER.info(f"Generated link: {link}")
81 | except DirectDownloadLinkException as e:
82 | LOGGER.error(str(e))
83 | if str(e).startswith("ERROR:"):
84 | await sendMessage(str(e), message)
85 | return
86 |
87 | if is_gdrive_link(link) or is_gdrive_id(link):
88 | name, mime_type, size, files, _ = await run_sync_to_async(
89 | gdCount().count, link, user.id
90 | )
91 | if mime_type is None:
92 | await sendMessage(name, message)
93 | return
94 | user_id = message.from_user.id
95 | listener = TaskListener(message, tag, user_id)
96 | drive = gdClone(link, listener)
97 | if files <= 20:
98 | msg = await sendMessage(f"Cloning: {link}
", message)
99 | else:
100 | msg = ""
101 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=12))
102 | async with status_dict_lock:
103 | status_dict[message.id] = CloneStatus(drive, size, message, gid)
104 | await sendStatusMessage(message)
105 | link, size, mime_type, files, folders, dir_id = await run_sync_to_async(
106 | drive.clone
107 | )
108 | if msg:
109 | await deleteMessage(msg)
110 | if not link:
111 | return
112 | if not config_dict["NO_TASKS_LOGS"]:
113 | LOGGER.info(f"Cloning Done: {name}")
114 | size = get_readable_file_size(size)
115 | await listener.onUploadComplete(
116 | link, size, files, folders, mime_type, name, is_gdrive=True, dir_id=dir_id
117 | )
118 | else:
119 | await sendMessage(CLONE_HELP_MESSAGE, message)
120 |
121 |
122 | parser = ArgumentParser(description="Clone args usage:")
123 | parser.add_argument("link", nargs="*", default="")
124 | parser.add_argument("-i", nargs="?", default=0, dest="multi", type=int)
125 |
126 | bot.add_handler(
127 | MessageHandler(
128 | clone,
129 | filters=filters.command(BotCommands.CloneCommand)
130 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
131 | )
132 | )
133 |
--------------------------------------------------------------------------------
/bot/modules/exec.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_shell, subprocess
2 | from bot import bot
3 | from bot.helper.telegram_helper.bot_commands import BotCommands
4 | from bot.helper.telegram_helper.filters import CustomFilters
5 | from pyrogram.filters import command
6 | from pyrogram.handlers import MessageHandler
7 |
8 |
9 | async def execute(client, message):
10 | cmd = message.text.split(maxsplit=1)
11 | if len(cmd) == 1:
12 | await message.reply_text("No command to execute was given.")
13 | return
14 | cmd = cmd[1]
15 | process = await create_subprocess_shell(
16 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
17 | )
18 | stdout, stderr = await process.communicate()
19 | e = stderr.decode()
20 | if not e:
21 | e = "No Error"
22 | o = stdout.decode()
23 | if not o:
24 | o = "No Output"
25 | else:
26 | _o = o.split("\n")
27 | o = "`\n".join(_o)
28 | OUTPUT = f"**QUERY:**\n__Command:__\n`{cmd}` \n__PID:__\n`{process.pid}`\n\n**stderr:** \n`{e}`\n**Output:**\n{o}"
29 | if len(OUTPUT) > 3900:
30 | with open("exec.txt", "w") as out_file:
31 | out_file.write(str(OUTPUT))
32 | with open("exec.txt", "rb") as doc:
33 | await client.send_document(
34 | chat_id=message.chat.id,
35 | document=doc,
36 | file_name=doc.name,
37 | reply_to_message_id=message.id,
38 | )
39 | elif len(OUTPUT) != 0:
40 | await message.reply_text(OUTPUT)
41 | else:
42 | await message.reply_text("No Reply")
43 |
44 |
45 | bot.add_handler(
46 | MessageHandler(
47 | execute, filters=command(BotCommands.ExecCommand) & (CustomFilters.owner_filter)
48 | )
49 | )
50 |
--------------------------------------------------------------------------------
/bot/modules/gd_count.py:
--------------------------------------------------------------------------------
1 | from bot.helper.mirror_leech_utils.gd_utils.count import gdCount
2 | from pyrogram.handlers import MessageHandler
3 | from pyrogram.filters import command
4 | from bot import bot
5 | from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage
6 | from bot.helper.telegram_helper.filters import CustomFilters
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.ext_utils.bot_utils import (
9 | is_gdrive_link,
10 | get_readable_file_size,
11 | run_sync_to_async,
12 | )
13 |
14 |
15 | async def count(_, message):
16 | args = message.text.split()
17 | user = message.from_user or message.sender_chat
18 | if username := user.username:
19 | tag = f"@{username}"
20 | else:
21 | tag = message.from_user.mention
22 |
23 | link = args[1] if len(args) > 1 else ""
24 | if len(link) == 0 and (reply_to := message.reply_to_message):
25 | link = reply_to.text.split(maxsplit=1)[0].strip()
26 |
27 | if is_gdrive_link(link):
28 | msg = await sendMessage(f"Counting: {link}
", message)
29 | name, mime_type, size, files, folders = await run_sync_to_async(
30 | gdCount().count, link, user.id
31 | )
32 | if mime_type is None:
33 | await sendMessage(name, message)
34 | return
35 | await deleteMessage(msg)
36 | msg = f"Name: {name}
"
37 | msg += f"\n\nSize: {get_readable_file_size(size)}"
38 | msg += f"\n\nType: {mime_type}"
39 | if mime_type == "Folder":
40 | msg += f"\nSubFolders: {folders}"
41 | msg += f"\nFiles: {files}"
42 | msg += f"\n\ncc: {tag}"
43 | else:
44 | msg = (
45 | "Send Gdrive link along with command or by replying to the link by command"
46 | )
47 |
48 | await sendMessage(msg, message)
49 |
50 |
51 | bot.add_handler(
52 | MessageHandler(
53 | count, filters=command(BotCommands.CountCommand) & CustomFilters.user_filter
54 | )
55 | )
56 |
--------------------------------------------------------------------------------
/bot/modules/mirror_select.py:
--------------------------------------------------------------------------------
1 | from bot import OWNER_ID, bot, config_dict, remotes_multi
2 | from bot.helper.ext_utils.bot_utils import run_sync_to_async
3 | from bot.helper.telegram_helper.bot_commands import BotCommands
4 | from bot.helper.telegram_helper.filters import CustomFilters
5 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
6 | from pyrogram.filters import regex
7 | from pyrogram import filters
8 | from bot.helper.ext_utils.menu_utils import (
9 | Menus,
10 | rcloneListButtonMaker,
11 | rcloneListNextPage,
12 | )
13 | from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
14 | from bot.helper.telegram_helper.button_build import ButtonMaker
15 | from bot.helper.ext_utils.rclone_utils import (
16 | create_next_buttons,
17 | is_rclone_config,
18 | is_valid_path,
19 | list_folder,
20 | list_remotes,
21 | )
22 | from bot.helper.ext_utils.rclone_data_holder import get_rclone_data, update_rclone_data
23 |
24 |
25 | async def handle_mirrorselect(_, message):
26 | user_id = message.from_user.id
27 | if await is_rclone_config(user_id, message):
28 | if DEFAULT_OWNER_REMOTE := config_dict["DEFAULT_OWNER_REMOTE"]:
29 | if user_id == OWNER_ID:
30 | update_rclone_data(
31 | "MIRROR_SELECT_REMOTE", DEFAULT_OWNER_REMOTE, user_id
32 | )
33 | if config_dict["MULTI_RCLONE_CONFIG"] or CustomFilters.sudo_filter("", message):
34 | await list_remotes(message, menu_type=Menus.MIRROR_SELECT)
35 | else:
36 | await sendMessage("Not allowed to use", message)
37 |
38 |
39 | async def mirrorselect_callback(_, query):
40 | data = query.data
41 | cmd = data.split("^")
42 | message = query.message
43 | user_id = query.from_user.id
44 | base_dir = get_rclone_data("MIRROR_SELECT_BASE_DIR", user_id)
45 | rclone_remote = get_rclone_data("MIRROR_SELECT_REMOTE", user_id)
46 |
47 | if int(cmd[-1]) != user_id:
48 | await query.answer("This menu is not for you!", show_alert=True)
49 | return
50 | if cmd[1] == "remote":
51 | is_crypt = False if cmd[-2] == "False" else True
52 | if CustomFilters.sudo_filter("", message):
53 | if config_dict["MULTI_REMOTE_UP"]:
54 | remotes_multi.append(cmd[2])
55 | await list_remotes(message, menu_type=Menus.MIRROR_SELECT, edit=True)
56 | return
57 | config_dict.update({"DEFAULT_OWNER_REMOTE": cmd[2]})
58 | update_rclone_data("MIRROR_SELECT_BASE_DIR", "", user_id)
59 | update_rclone_data("MIRROR_SELECT_REMOTE", cmd[2], user_id)
60 | await list_folder(
61 | message,
62 | cmd[2],
63 | "",
64 | menu_type=Menus.MIRROR_SELECT,
65 | is_crypt=is_crypt,
66 | edit=True,
67 | )
68 | elif cmd[1] == "remote_dir":
69 | path = get_rclone_data(cmd[2], user_id)
70 | base_dir += path + "/"
71 | if await is_valid_path(rclone_remote, base_dir, message):
72 | update_rclone_data("MIRROR_SELECT_BASE_DIR", base_dir, user_id)
73 | await list_folder(
74 | message,
75 | rclone_remote,
76 | base_dir,
77 | menu_type=Menus.MIRROR_SELECT,
78 | edit=True,
79 | )
80 | elif cmd[1] == "back":
81 | if len(base_dir) == 0:
82 | await list_remotes(message, menu_type=Menus.MIRROR_SELECT, edit=True)
83 | return
84 | base_dir_split = base_dir.split("/")[:-2]
85 | base_dir_string = ""
86 | for dir in base_dir_split:
87 | base_dir_string += dir + "/"
88 | base_dir = base_dir_string
89 | update_rclone_data("MIRROR_SELECT_BASE_DIR", base_dir, user_id)
90 | await list_folder(
91 | message, rclone_remote, base_dir, menu_type=Menus.MIRROR_SELECT, edit=True
92 | )
93 | await query.answer()
94 | elif cmd[1] == "pages":
95 | await query.answer()
96 | elif cmd[1] == "reset":
97 | remotes_multi.clear()
98 | await list_remotes(message, menu_type=Menus.MIRROR_SELECT, edit=True)
99 | else:
100 | await query.answer()
101 | await message.delete()
102 |
103 |
104 | async def next_page_mirrorselect(_, callback_query):
105 | query = callback_query
106 | data = query.data
107 | message = query.message
108 | await query.answer()
109 | user_id = message.reply_to_message.from_user.id
110 | _, next_offset, _, data_back_cb = data.split()
111 |
112 | info = get_rclone_data("info", user_id)
113 | total = len(info)
114 | next_offset = int(next_offset)
115 | prev_offset = next_offset - 10
116 |
117 | buttons = ButtonMaker()
118 | buttons.cb_buildbutton(
119 | "✅ Select this folder", f"{Menus.MIRROR_SELECT}^close^{user_id}"
120 | )
121 |
122 | next_info, _next_offset = await run_sync_to_async(rcloneListNextPage, info, next_offset)
123 |
124 | await run_sync_to_async(
125 | rcloneListButtonMaker,
126 | info=next_info,
127 | button=buttons,
128 | menu_type=Menus.MIRROR_SELECT,
129 | dir_callback="remote_dir",
130 | file_callback="",
131 | user_id=user_id,
132 | )
133 |
134 | await create_next_buttons(
135 | next_offset,
136 | prev_offset,
137 | _next_offset,
138 | data_back_cb,
139 | total,
140 | user_id,
141 | buttons,
142 | filter="next_ms",
143 | menu_type=Menus.MIRROR_SELECT,
144 | )
145 |
146 | mirrorsel_remote = get_rclone_data("MIRROR_SELECT_REMOTE", user_id)
147 | base_dir = get_rclone_data("MIRROR_SELECT_BASE_DIR", user_id)
148 | msg = f"Select folder where you want to store files\n\nPath:{mirrorsel_remote}:{base_dir}
"
149 | await editMessage(msg, message, reply_markup=buttons.build_menu(1))
150 |
151 |
152 | bot.add_handler(
153 | MessageHandler(
154 | handle_mirrorselect,
155 | filters=filters.command(BotCommands.MirrorSelectCommand)
156 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
157 | )
158 | )
159 | bot.add_handler(CallbackQueryHandler(next_page_mirrorselect, filters=regex("next_ms")))
160 | bot.add_handler(
161 | CallbackQueryHandler(mirrorselect_callback, filters=regex("mirrorselectmenu"))
162 | )
163 |
--------------------------------------------------------------------------------
/bot/modules/queue.py:
--------------------------------------------------------------------------------
1 | """A FIFO Asynchronous Queue """
2 |
3 | from asyncio import (
4 | sleep,
5 | CancelledError,
6 | AbstractEventLoop,
7 | PriorityQueue,
8 | Task,
9 | )
10 | import dataclasses
11 | import typing
12 | import inspect
13 | import sys
14 | import uuid
15 | from bot import LOGGER, PARALLEL_TASKS, bot_loop, bot
16 |
17 |
18 | queue = None
19 |
20 |
21 | async def queue_worker(name, queue):
22 | while not bot_loop.is_closed():
23 | while queue.empty():
24 | try:
25 | await sleep(0.1)
26 | except CancelledError:
27 | return
28 | try:
29 | queue_item = await queue.get()
30 | LOGGER.info(f"{name} is processing queue item: {queue_item}")
31 | resp = await safe_run(
32 | queue_item.task,
33 | queue_item.task_args,
34 | queue_item.exception_callback,
35 | queue_item.exception_callback_args,
36 | bot_loop,
37 | )
38 | if queue_item.done_callback:
39 | args = queue_item.done_callback_args or ()
40 | if resp and queue_item.pass_result_to_done_callback:
41 | args = (resp,) + args
42 | await safe_run(
43 | queue_item.done_callback,
44 | args,
45 | queue_item.exception_callback,
46 | queue_item.exception_callback_args,
47 | bot_loop,
48 | )
49 | queue.task_done()
50 | except RuntimeError as runtime_error:
51 | if bot_loop.is_closed():
52 | return
53 | raise runtime_error
54 | except CancelledError:
55 | return
56 | except GeneratorExit:
57 | return
58 | except KeyboardInterrupt:
59 | return
60 | except Exception as excp:
61 | LOGGER.error(excp, exc_info=True)
62 |
63 |
64 | async def safe_run(
65 | task: typing.Any,
66 | task_args: typing.Any,
67 | exception_callback: typing.Any,
68 | exception_callback_args: typing.Any,
69 | loop: AbstractEventLoop,
70 | ):
71 | resp = None
72 | try:
73 | if inspect.iscoroutine(task):
74 | resp = await task
75 | elif inspect.iscoroutinefunction(task):
76 | resp = await task(*task_args or {})
77 | elif inspect.isfunction(task) or inspect.ismethod(task):
78 | resp = await loop.run_in_executor(None, task, *task_args or {})
79 | else:
80 | LOGGER.error("%s is not a coroutine or function", task, stack_info=True)
81 | # catching all exceptions is bad, but this is a background task
82 | except:
83 | _, exception, _ = sys.exc_info()
84 | if exception_callback:
85 | if inspect.iscoroutinefunction(exception_callback):
86 | await exception_callback(
87 | exception,
88 | *exception_callback_args or {},
89 | )
90 | elif inspect.isfunction(exception_callback) or inspect.ismethod(
91 | exception_callback
92 | ):
93 | await loop.run_in_executor(
94 | None,
95 | exception_callback,
96 | exception,
97 | *exception_callback_args or {},
98 | )
99 | else:
100 | LOGGER.error(exception, exc_info=True, stack_info=True)
101 | else:
102 | return resp
103 |
104 |
105 | @dataclasses.dataclass(frozen=True, order=False)
106 | class QueueItem:
107 | """A item on a `asyncio.Queue`"""
108 |
109 | priority: int
110 | task: typing.Callable | typing.Coroutine | typing.Awaitable
111 | task_args: typing.Optional[tuple] = dataclasses.field(default_factory=tuple)
112 | pass_result_to_done_callback: bool = dataclasses.field(default=False)
113 |
114 | done_callback: typing.Optional[
115 | typing.Callable | typing.Coroutine
116 | ] = dataclasses.field(default=None)
117 |
118 | done_callback_args: typing.Optional[tuple] = dataclasses.field(
119 | default_factory=tuple
120 | )
121 |
122 | exception_callback: typing.Optional[
123 | typing.Callable | typing.Awaitable
124 | ] = dataclasses.field(default=None)
125 |
126 | """ This function/awaitable must accept the exception as its first argument. """
127 | exception_callback_args: typing.Optional[tuple] = dataclasses.field(
128 | default_factory=tuple
129 | )
130 |
131 | # Make the QueueItem sortable (by priority)
132 | def __lt__(self, other: "QueueItem"):
133 | return self.priority < other.priority
134 |
135 | def __gt__(self, other: "QueueItem"):
136 | return self.priority > other.priority
137 |
138 | def __le__(self, other: "QueueItem"):
139 | return self.priority <= other.priority
140 |
141 | def __ge__(self, other: "QueueItem"):
142 | return self.priority >= other.priority
143 |
144 |
145 | class QueueManager:
146 | slug_name: str
147 | queue: PriorityQueue[QueueItem]
148 | tasks: list[Task]
149 | len_workers: int
150 | _configured: bool
151 |
152 | def __init__(
153 | self,
154 | slug_name,
155 | queue=None,
156 | max_queue_size=0,
157 | num_workers=1,
158 | create_queue=True,
159 | configure_on_init=True,
160 | ) -> None:
161 | if create_queue or not queue:
162 | self.queue = PriorityQueue(maxsize=max_queue_size)
163 | else:
164 | self.queue = queue
165 |
166 | self.slug_name = slug_name or f"unspecified-core-{uuid.uuid4()}"
167 | self._configured = False
168 | self.workers = [
169 | (
170 | queue_worker(f"{self.slug_name}-worker-{i}", self.queue),
171 | f"{self.slug_name}-worker-{i}",
172 | )
173 | for i in range(1, num_workers + (1 if num_workers else 0), 1)
174 | ]
175 | self.worker_manager = None
176 | LOGGER.info(
177 | f"Initialized a new {self.slug_name} QueueManager",
178 | )
179 | if configure_on_init:
180 | self.configure()
181 |
182 | def configure(self):
183 | self.tasks = [
184 | bot_loop.create_task(coroutine, name=name)
185 | for coroutine, name in self.workers
186 | ]
187 | self._configured = True
188 | return True
189 |
190 | async def restart_worker(self, task: Task) -> None:
191 | worker_location = self.workers.index(task)
192 | self.workers[worker_location] = bot_loop.create_task(
193 | queue_worker(
194 | task.get_name(),
195 | self.queue,
196 | ),
197 | name=f"{self.slug_name}-worker-{len(self.workers)}",
198 | )
199 |
200 | async def auto_restart_workers(self):
201 | try:
202 | for task in self.tasks:
203 | if task.done() or task.cancelled():
204 | await self.restart_worker(task)
205 | await sleep(5)
206 | except CancelledError:
207 | return
208 |
209 | async def put(self, item):
210 | await self.queue.put(item)
211 |
212 | def close(self) -> None:
213 | if not self._configured:
214 | return
215 | for task in self.tasks:
216 | task.cancel()
217 |
218 |
219 | async def conditional_queue_add(message, func, *args, **kwargs):
220 | if PARALLEL_TASKS > 0:
221 | await add_to_queue(message, func, *args, **kwargs)
222 | else:
223 | await func(*args, **kwargs)
224 |
225 |
226 | async def add_to_queue(message, task, *args, **kwargs):
227 | LOGGER.info(f"Adding {task} on the queue")
228 | if queue.queue.full():
229 | return await bot.send_message(
230 | message.chat.id,
231 | reply_to_message_id=message.id,
232 | text="Queue is full, wait for a slot to be available..",
233 | )
234 | await queue.put(
235 | QueueItem(priority=1, task=task(*args, **kwargs)),
236 | )
237 |
238 |
239 | if PARALLEL_TASKS > 0:
240 | queue = QueueManager(
241 | slug_name="all-queue",
242 | create_queue=True,
243 | max_queue_size=PARALLEL_TASKS,
244 | num_workers=PARALLEL_TASKS,
245 | configure_on_init=True,
246 | )
247 |
--------------------------------------------------------------------------------
/bot/modules/rcfm.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import regex
2 | from pyrogram import filters
3 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
4 | from bot import bot, config_dict
5 | from bot.helper.ext_utils.bot_utils import run_sync_to_async
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 | from bot.helper.ext_utils.menu_utils import (
9 | Menus,
10 | rcloneListButtonMaker,
11 | rcloneListNextPage,
12 | )
13 | from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
14 | from bot.helper.telegram_helper.button_build import ButtonMaker
15 | from bot.helper.ext_utils.rclone_utils import (
16 | create_next_buttons,
17 | is_rclone_config,
18 | is_valid_path,
19 | list_folder,
20 | list_remotes,
21 | )
22 | from bot.helper.ext_utils.rclone_data_holder import get_rclone_data, update_rclone_data
23 | from bot.modules.myfilesset import (
24 | calculate_size,
25 | delete_empty_dir,
26 | delete_selected,
27 | delete_selection,
28 | myfiles_settings,
29 | rclone_dedupe,
30 | rclone_mkdir,
31 | rclone_rename,
32 | search_action,
33 | )
34 |
35 |
36 | async def handle_myfiles(client, message):
37 | user_id = message.from_user.id
38 | if await is_rclone_config(user_id, message):
39 | if config_dict["MULTI_RCLONE_CONFIG"] or CustomFilters.sudo_filter("", message):
40 | await list_remotes(message, menu_type=Menus.MYFILES)
41 | else:
42 | await sendMessage("Not allowed to use", message)
43 |
44 |
45 | async def myfiles_callback(client, callback_query):
46 | query = callback_query
47 | data = query.data
48 | cmd = data.split("^")
49 | message = query.message
50 | tag = f"@{message.reply_to_message.from_user.username}"
51 | user_id = query.from_user.id
52 | base_dir = get_rclone_data("MYFILES_BASE_DIR", user_id)
53 | rclone_remote = get_rclone_data("MYFILES_REMOTE", user_id)
54 | is_folder = False
55 |
56 | if int(cmd[-1]) != user_id:
57 | await query.answer("This menu is not for you!", show_alert=True)
58 | return
59 | if cmd[1] == "remote":
60 | update_rclone_data("MYFILES_BASE_DIR", "", user_id) # Reset Dir
61 | update_rclone_data("MYFILES_REMOTE", cmd[2], user_id)
62 | await list_folder(message, cmd[2], "", menu_type=Menus.MYFILES, edit=True)
63 | elif cmd[1] == "remote_dir":
64 | path = get_rclone_data(cmd[2], user_id)
65 | base_dir += path + "/"
66 | if await is_valid_path(rclone_remote, base_dir, message):
67 | update_rclone_data("MYFILES_BASE_DIR", base_dir, user_id)
68 | await list_folder(
69 | message, rclone_remote, base_dir, menu_type=Menus.MYFILES, edit=True
70 | )
71 | # Handle back button
72 | elif cmd[1] == "back":
73 | if len(base_dir) == 0:
74 | await list_remotes(message, menu_type=Menus.MYFILES, edit=True)
75 | return
76 | base_dir_split = base_dir.split("/")[:-2]
77 | base_dir_string = ""
78 | for dir in base_dir_split:
79 | base_dir_string += dir + "/"
80 | base_dir = base_dir_string
81 | update_rclone_data("MYFILES_BASE_DIR", base_dir, user_id)
82 | await list_folder(
83 | message, rclone_remote, base_dir, menu_type=Menus.MYFILES, edit=True
84 | )
85 | elif cmd[1] == "back_remotes_menu":
86 | await list_remotes(message, menu_type=Menus.MYFILES, edit=True)
87 | # Handle actions
88 | elif cmd[1] == "file_action":
89 | path = get_rclone_data(cmd[2], user_id)
90 | base_dir += path
91 | update_rclone_data("MYFILES_BASE_DIR", base_dir, user_id)
92 | await myfiles_settings(
93 | message, rclone_remote, base_dir, edit=True, is_folder=False
94 | )
95 | await query.answer()
96 | elif cmd[1] == "folder_action":
97 | await myfiles_settings(
98 | message, rclone_remote, base_dir, edit=True, is_folder=True
99 | )
100 | await query.answer()
101 | elif cmd[1] == "search":
102 | await search_action(client, message, query, rclone_remote, user_id)
103 | elif cmd[1] == "delete":
104 | if cmd[2] == "folder":
105 | is_folder = True
106 | await delete_selection(message, user_id, is_folder=is_folder)
107 | await query.answer()
108 | elif cmd[1] == "size":
109 | await calculate_size(message, base_dir, rclone_remote, user_id)
110 | await query.answer()
111 | elif cmd[1] == "mkdir":
112 | await query.answer()
113 | await rclone_mkdir(client, message, rclone_remote, base_dir, tag)
114 | elif cmd[1] == "rmdir":
115 | await query.answer()
116 | await delete_empty_dir(message, user_id, rclone_remote, base_dir)
117 | elif cmd[1] == "dedupe":
118 | await query.answer()
119 | await rclone_dedupe(message, rclone_remote, base_dir, user_id, tag)
120 | elif cmd[1] == "rename":
121 | await query.answer()
122 | await rclone_rename(client, message, rclone_remote, base_dir, tag)
123 | elif cmd[1] == "yes":
124 | if cmd[2] == "folder":
125 | is_folder = True
126 | await delete_selected(
127 | message, user_id, base_dir, rclone_remote, is_folder=is_folder
128 | )
129 | await query.answer()
130 | elif cmd[1] == "no":
131 | await query.answer()
132 | await message.delete()
133 | elif cmd[1] == "pages":
134 | await query.answer()
135 | else:
136 | await query.answer()
137 | await message.delete()
138 |
139 |
140 | async def next_page_myfiles(client, callback_query):
141 | query = callback_query
142 | data = query.data
143 | message = query.message
144 | await query.answer()
145 | user_id = message.reply_to_message.from_user.id
146 | _, next_offset, _, data_back_cb = data.split()
147 |
148 | info = get_rclone_data("info", user_id)
149 | total = len(info)
150 | next_offset = int(next_offset)
151 | prev_offset = next_offset - 10
152 |
153 | buttons = ButtonMaker()
154 | buttons.cb_buildbutton(f"⚙️ Folder Options", f"myfilesmenu^folder_action^{user_id}")
155 | buttons.cb_buildbutton("🔍 Search", f"myfilesmenu^search^{user_id}")
156 |
157 | next_info, _next_offset = await run_sync_to_async(
158 | rcloneListNextPage, info, next_offset
159 | )
160 |
161 | await run_sync_to_async(
162 | rcloneListButtonMaker,
163 | info=next_info,
164 | button=buttons,
165 | menu_type=Menus.MYFILES,
166 | dir_callback="remote_dir",
167 | file_callback="file_action",
168 | user_id=user_id,
169 | )
170 |
171 | await create_next_buttons(
172 | next_offset,
173 | prev_offset,
174 | _next_offset,
175 | data_back_cb,
176 | total,
177 | user_id,
178 | buttons,
179 | filter="next_myfiles",
180 | menu_type=Menus.MYFILES,
181 | )
182 |
183 | remote = get_rclone_data("MYFILES_REMOTE", user_id)
184 | base_dir = get_rclone_data("MYFILES_BASE_DIR", user_id)
185 | msg = f"Your cloud files are listed below\n\nPath:{remote}:{base_dir}
"
186 | await editMessage(msg, message, reply_markup=buttons.build_menu(1))
187 |
188 |
189 | bot.add_handler(CallbackQueryHandler(myfiles_callback, filters=regex("myfilesmenu")))
190 | bot.add_handler(CallbackQueryHandler(next_page_myfiles, filters=regex("next_myfiles")))
191 | bot.add_handler(
192 | MessageHandler(
193 | handle_myfiles,
194 | filters=filters.command(BotCommands.RcfmCommand)
195 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
196 | )
197 | )
198 |
--------------------------------------------------------------------------------
/bot/modules/serve.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from configparser import ConfigParser
3 | from bot import LOGGER, OWNER_ID, bot, config_dict
4 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
5 | from asyncio.subprocess import PIPE
6 | from pyrogram import filters
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.ext_utils.bot_utils import cmd_exec
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.message_utils import editMarkup, sendMarkup
11 | from bot.helper.telegram_helper.button_build import ButtonMaker
12 | from bot.helper.ext_utils.rclone_utils import get_rclone_path, is_rclone_config
13 |
14 |
15 | SELECTED_REMOTE = []
16 | process_dict = {"status": "inactive", "pid": None}
17 |
18 |
19 | async def serve(client, message):
20 | if await is_rclone_config(message.from_user.id, message):
21 | if process_dict["status"] == "inactive":
22 | await list_remotes(message)
23 | else:
24 | button = ButtonMaker()
25 | url = f"{config_dict['RC_INDEX_URL']}:{config_dict['RC_INDEX_PORT']}"
26 | msg = f"Serving on {url}"
27 | button.cb_buildbutton("Stop", "servemenu^stop")
28 | await sendMarkup(msg, message, button.build_menu(1))
29 |
30 |
31 | async def serve_callback(client, query):
32 | message = query.message
33 | data = query.data.split("^")
34 | path = await get_rclone_path(OWNER_ID)
35 |
36 | RC_INDEX_USER = config_dict["RC_INDEX_USER"]
37 | RC_INDEX_PASS = config_dict["RC_INDEX_PASS"]
38 | RC_INDEX_PORT = config_dict["RC_INDEX_PORT"]
39 |
40 | if data[1] == "remote":
41 | SELECTED_REMOTE.append(data[2])
42 | button = ButtonMaker()
43 | button.cb_buildbutton("HTTP", "servemenu^http")
44 | button.cb_buildbutton("WEBDAV", "servemenu^webdav")
45 | await editMarkup(
46 | "Choose protocol to serve the remote", message, button.build_menu(2)
47 | )
48 | elif data[1] == "all":
49 | cmd = [
50 | "rclone",
51 | "rcd",
52 | "--rc-serve",
53 | f"--rc-addr=:{RC_INDEX_PORT}",
54 | f"--rc-user={RC_INDEX_USER}",
55 | f"--rc-pass={RC_INDEX_PASS}",
56 | f"--config={path}",
57 | ]
58 | await rclone_serve(cmd, message)
59 | elif data[1] == "http":
60 | cmd = [
61 | "rclone",
62 | "serve",
63 | "http",
64 | f"--addr=:{RC_INDEX_PORT}",
65 | f"--user={RC_INDEX_USER}",
66 | f"--pass={RC_INDEX_PASS}",
67 | f"--config={path}",
68 | f"{SELECTED_REMOTE[0]}:",
69 | ]
70 | await rclone_serve(cmd, message)
71 | elif data[1] == "webdav":
72 | cmd = [
73 | "rclone",
74 | "serve",
75 | "webdav",
76 | f"--addr=:{RC_INDEX_PORT}",
77 | f"--user={RC_INDEX_USER}",
78 | f"--pass={RC_INDEX_PASS}",
79 | f"--config={path}",
80 | f"{SELECTED_REMOTE[0]}:",
81 | ]
82 | await rclone_serve(cmd, message)
83 | elif data[1] == "stop":
84 | _, stderr, return_code = await cmd_exec(
85 | ["kill", "-9", f"{process_dict['pid']}"]
86 | )
87 | if return_code == 0:
88 | await query.answer(text="Server stopped", show_alert=True)
89 | process_dict["status"] = "inactive"
90 | await message.delete()
91 | else:
92 | LOGGER.info(f"Error: {stderr}")
93 | process_dict["status"] = "active"
94 | else:
95 | await query.answer()
96 | await message.delete()
97 |
98 |
99 | async def rclone_serve(cmd, message):
100 | button = ButtonMaker()
101 | url = f"{config_dict['RC_INDEX_URL']}:{config_dict['RC_INDEX_PORT']}"
102 | msg = f"Serving on {url}"
103 | msg += f"\nUser: {config_dict['RC_INDEX_USER']}
"
104 | msg += f"\nPass: {config_dict['RC_INDEX_PASS']}
"
105 | button.cb_buildbutton("Stop", "servemenu^stop")
106 | await editMarkup(msg, message, button.build_menu(1))
107 |
108 | process = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
109 | process_dict["pid"] = process.pid
110 | process_dict["status"] = "active"
111 | _, stderr = await process.communicate()
112 | stderr = stderr.decode().strip()
113 |
114 | if process.returncode != 0:
115 | LOGGER.info(f"Error: {stderr}")
116 | process_dict["status"] = "inactive"
117 |
118 |
119 | async def list_remotes(message):
120 | SELECTED_REMOTE.clear()
121 | button = ButtonMaker()
122 | path = await get_rclone_path(OWNER_ID)
123 | conf = ConfigParser()
124 | conf.read(path)
125 | for remote in conf.sections():
126 | button.cb_buildbutton(f"📁{remote}", f"servemenu^remote^{remote}")
127 | button.cb_buildbutton("🌐 All", "servemenu^all")
128 | button.cb_buildbutton("✘ Close Menu", "servemenu^close")
129 | await sendMarkup(
130 | "Select cloud to serve as index", message, reply_markup=button.build_menu(2)
131 | )
132 |
133 |
134 | bot.add_handler(
135 | MessageHandler(
136 | serve,
137 | filters=filters.command(BotCommands.ServeCommand)
138 | & (CustomFilters.owner_filter | CustomFilters.chat_filter),
139 | )
140 | )
141 | bot.add_handler(
142 | CallbackQueryHandler(serve_callback, filters=filters.regex("servemenu"))
143 | )
144 |
--------------------------------------------------------------------------------
/bot/modules/shell.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_shell
2 | from asyncio.subprocess import PIPE
3 | from bot import LOGGER, bot
4 | from bot.helper.telegram_helper.bot_commands import BotCommands
5 | from bot.helper.telegram_helper.filters import CustomFilters
6 | from pyrogram.filters import command
7 | from pyrogram.handlers import MessageHandler
8 |
9 |
10 | async def shell(client, message):
11 | cmd = message.text.split(maxsplit=1)
12 | if len(cmd) == 1:
13 | await message.reply_text("No command to execute was given.")
14 | return
15 | cmd = cmd[1]
16 | process = await create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
17 | stdout, stderr = await process.communicate()
18 | reply = ""
19 | stderr = stderr.decode()
20 | stdout = stdout.decode()
21 | if len(stdout) != 0:
22 | reply += f"*Stdout*\n`{stdout}`\n"
23 | LOGGER.info(f"Shell - {cmd} - {stdout}")
24 | if len(stderr) != 0:
25 | reply += f"*Stderr*\n`{stderr}`\n"
26 | LOGGER.error(f"Shell - {cmd} - {stderr}")
27 | if len(reply) > 3000:
28 | with open("shell_output.txt", "w") as file:
29 | file.write(reply)
30 | with open("shell_output.txt", "rb") as doc:
31 | await client.send_document(
32 | chat_id=message.chat.id,
33 | document=doc,
34 | file_name=doc.name,
35 | reply_to_message_id=message.id,
36 | )
37 | elif len(reply) != 0:
38 | await message.reply_text(reply)
39 | else:
40 | await message.reply_text("No Reply")
41 |
42 |
43 | bot.add_handler(MessageHandler(
44 | shell, filters=command(BotCommands.ShellCommand) & (CustomFilters.owner_filter)
45 | ))
46 |
--------------------------------------------------------------------------------
/bot/modules/stats.py:
--------------------------------------------------------------------------------
1 | from psutil import (
2 | disk_usage,
3 | cpu_percent,
4 | swap_memory,
5 | cpu_count,
6 | virtual_memory,
7 | net_io_counters,
8 | boot_time,
9 | )
10 | from pyrogram.handlers import MessageHandler
11 | from pyrogram.filters import command
12 | from time import time
13 | from os import path as ospath
14 | from bot.helper.telegram_helper.message_utils import sendMessage
15 | from bot import bot, botUptime
16 | from bot.helper.telegram_helper.bot_commands import BotCommands
17 | from bot.helper.ext_utils.bot_utils import cmd_exec, get_readable_time
18 | from bot.helper.ext_utils.human_format import get_readable_file_size
19 | from bot.helper.telegram_helper.filters import CustomFilters
20 |
21 |
22 | async def stats(client, message):
23 | if ospath.exists(".git"):
24 | last_commit = await cmd_exec(
25 | "git log -1 --date=short --pretty=format:'%cd From %cr'", True
26 | )
27 | last_commit = last_commit[0]
28 | else:
29 | last_commit = "No UPSTREAM_REPO"
30 | total, used, free, disk = disk_usage("/")
31 | swap = swap_memory()
32 | memory = virtual_memory()
33 | stats = (
34 | f"Commit Date: {last_commit}\n\n"
35 | f"Bot Uptime: {get_readable_time(time() - botUptime)}\n"
36 | f"OS Uptime: {get_readable_time(time() - boot_time())}\n\n"
37 | f"Total Disk Space: {get_readable_file_size(total)}\n"
38 | f"Used: {get_readable_file_size(used)} | Free: {get_readable_file_size(free)}\n\n"
39 | f"Upload: {get_readable_file_size(net_io_counters().bytes_sent)}\n"
40 | f"Download: {get_readable_file_size(net_io_counters().bytes_recv)}\n\n"
41 | f"CPU: {cpu_percent(interval=0.5)}%\n"
42 | f"RAM: {memory.percent}%\n"
43 | f"DISK: {disk}%\n\n"
44 | f"Physical Cores: {cpu_count(logical=False)}\n"
45 | f"Total Cores: {cpu_count(logical=True)}\n\n"
46 | f"SWAP: {get_readable_file_size(swap.total)} | Used: {swap.percent}%\n"
47 | f"Memory Total: {get_readable_file_size(memory.total)}\n"
48 | f"Memory Free: {get_readable_file_size(memory.available)}\n"
49 | f"Memory Used: {get_readable_file_size(memory.used)}\n"
50 | )
51 | await sendMessage(stats, message)
52 |
53 |
54 | bot.add_handler(
55 | MessageHandler(
56 | stats,
57 | filters=command(BotCommands.StatsCommand)
58 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
59 | )
60 | )
61 |
--------------------------------------------------------------------------------
/bot/modules/status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from psutil import cpu_percent, virtual_memory, disk_usage
3 | from bot import (
4 | DOWNLOAD_DIR,
5 | bot,
6 | Interval,
7 | status_dict,
8 | status_dict_lock,
9 | status_reply_dict_lock,
10 | config_dict,
11 | botUptime,
12 | )
13 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
14 | from pyrogram import filters
15 | from bot.helper.telegram_helper.bot_commands import BotCommands
16 | from bot.helper.ext_utils.bot_utils import get_readable_time, setInterval, turn
17 | from bot.helper.telegram_helper.filters import CustomFilters
18 | from bot.helper.ext_utils.human_format import get_readable_file_size
19 | from bot.helper.telegram_helper.message_utils import (
20 | auto_delete_message,
21 | sendMessage,
22 | sendStatusMessage,
23 | update_all_messages,
24 | )
25 |
26 |
27 | async def status_handler(client, message):
28 | async with status_dict_lock:
29 | count = len(status_dict)
30 | if count == 0:
31 | currentTime = get_readable_time(time() - botUptime)
32 | free = get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)
33 | msg = "No Active Downloads !\n___________________________"
34 | msg += (
35 | f"\nCPU: {cpu_percent()}% | FREE: {free}"
36 | f"\nRAM: {virtual_memory().percent}% | UPTIME: {currentTime}"
37 | )
38 | reply_message = await sendMessage(msg, message)
39 | await auto_delete_message(message, reply_message)
40 | else:
41 | await sendStatusMessage(message)
42 | async with status_reply_dict_lock:
43 | try:
44 | if Interval:
45 | Interval[0].cancel()
46 | Interval.clear()
47 | except:
48 | pass
49 | finally:
50 | Interval.append(
51 | setInterval(
52 | config_dict["STATUS_UPDATE_INTERVAL"], update_all_messages
53 | )
54 | )
55 |
56 |
57 | async def status_pages(client, callback_query):
58 | query = callback_query
59 | await query.answer()
60 | data = query.data.split()
61 | if data[1] == "ref":
62 | await update_all_messages(True)
63 | else:
64 | await turn(data)
65 |
66 |
67 | bot.add_handler(
68 | MessageHandler(
69 | status_handler,
70 | filters=filters.command(BotCommands.StatusCommand)
71 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
72 | )
73 | )
74 | bot.add_handler(CallbackQueryHandler(status_pages, filters=filters.regex("status")))
75 |
--------------------------------------------------------------------------------
/bot/modules/storage.py:
--------------------------------------------------------------------------------
1 | from json import loads
2 | from math import floor
3 | from pyrogram.filters import command, regex
4 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
5 | from asyncio.subprocess import PIPE, create_subprocess_exec as exec
6 | from bot import bot
7 | from bot.helper.ext_utils.menu_utils import Menus
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.ext_utils.human_format import get_readable_file_size
11 | from bot.helper.ext_utils.rclone_utils import (
12 | get_rclone_path,
13 | is_rclone_config,
14 | list_remotes,
15 | )
16 | from bot.helper.telegram_helper.message_utils import editMarkup, sendMessage
17 | from bot.helper.telegram_helper.button_build import ButtonMaker
18 |
19 |
20 | async def handle_storage(client, message):
21 | if await is_rclone_config(message.from_user.id, message):
22 | await list_remotes(message, menu_type=Menus.STORAGE)
23 |
24 |
25 | async def storage_menu_cb(client, callback_query):
26 | query = callback_query
27 | data = query.data
28 | cmd = data.split("^")
29 | message = query.message
30 | user_id = query.from_user.id
31 |
32 | if int(cmd[-1]) != user_id:
33 | await query.answer("This menu is not for you!", show_alert=True)
34 | return
35 | if cmd[1] == "remote":
36 | await rclone_about(message, query, cmd[2], user_id)
37 | elif cmd[1] == "back":
38 | await list_remotes(message, menu_type=Menus.STORAGE, edit=True)
39 | await query.answer()
40 | elif cmd[1] == "close":
41 | await query.answer()
42 | await message.delete()
43 |
44 |
45 | async def rclone_about(message, query, remote_name, user_id):
46 | button = ButtonMaker()
47 | conf_path = await get_rclone_path(user_id, message)
48 | cmd = ["rclone", "about", "--json", f"--config={conf_path}", f"{remote_name}:"]
49 | process = await exec(*cmd, stdout=PIPE, stderr=PIPE)
50 | stdout, stderr = await process.communicate()
51 | return_code = await process.wait()
52 | stdout = stdout.decode().strip()
53 | if return_code != 0:
54 | err = stderr.decode().strip()
55 | await sendMessage(f"Error: {err}", message)
56 | return
57 | info = loads(stdout)
58 | if len(info) == 0:
59 | await query.answer("Team Drive with Unlimited Storage", show_alert=True)
60 | return
61 | result_msg = "🗂 Storage Details\n"
62 | try:
63 | used = get_readable_file_size(info["used"])
64 | total = get_readable_file_size(info["total"])
65 | free = get_readable_file_size(info["free"])
66 | used_percentage = 100 * float(info["used"]) / float(info["total"])
67 | used_bar = get_used_bar(used_percentage)
68 | used_percentage = f"{round(used_percentage, 2)}%"
69 | free_percentage = round((info["free"] * 100) / info["total"], 2)
70 | free_percentage = f"{free_percentage}%"
71 | result_msg += used_bar
72 | result_msg += f"\nUsed: {used} of {total}"
73 | result_msg += f"\nFree: {free} of {total}"
74 | result_msg += f"\nTrashed: {get_readable_file_size(info['trashed'])}"
75 | result_msg += f"\n\nStorage used: {used_percentage}"
76 | result_msg += f"\nStorage free: {free_percentage}"
77 | except KeyError:
78 | result_msg += f"\nN/A:"
79 | button.cb_buildbutton("⬅️ Back", f"storagemenu^back^{user_id}", "footer")
80 | button.cb_buildbutton(
81 | "✘ Close Menu", f"storagemenu^close^{user_id}", "footer_second"
82 | )
83 | await editMarkup(result_msg, message, reply_markup=button.build_menu(1))
84 |
85 |
86 | def get_used_bar(percentage):
87 | return "{0}{1}".format(
88 | "".join(["■" for i in range(floor(percentage / 10))]),
89 | "".join(["□" for i in range(10 - floor(percentage / 10))]),
90 | )
91 |
92 |
93 | bot.add_handler(
94 | MessageHandler(
95 | handle_storage,
96 | filters=command(BotCommands.StorageCommand)
97 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
98 | )
99 | )
100 | bot.add_handler(CallbackQueryHandler(storage_menu_cb, filters=regex("storagemenu")))
101 |
--------------------------------------------------------------------------------
/bot/modules/sync.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 | from pyrogram.filters import command, regex
4 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
5 | from asyncio.subprocess import PIPE, create_subprocess_exec as exec
6 | from bot import bot, config_dict, status_dict_lock, status_dict
7 | from bot.helper.ext_utils.menu_utils import Menus
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.ext_utils.rclone_utils import (
11 | get_rclone_path,
12 | is_rclone_config,
13 | list_remotes,
14 | )
15 | from bot.helper.telegram_helper.message_utils import sendStatusMessage
16 | from bot.modules.tasks_listener import TaskListener
17 | from bot.helper.mirror_leech_utils.status_utils.sync_status import SyncStatus
18 |
19 |
20 | SOURCE = None
21 | listener_dict = {}
22 |
23 |
24 | async def handle_sync(client, message):
25 | user_id = message.from_user.id
26 | tag = f"@{message.from_user.username}"
27 | if await is_rclone_config(user_id, message):
28 | await list_remotes(message, menu_type=Menus.SYNC, remote_type="source")
29 | listener_dict[message.id] = TaskListener(message, tag, user_id)
30 |
31 |
32 | async def sync_callback(client, query):
33 | data = query.data.split("^")
34 | message = query.message
35 | user_id = query.from_user.id
36 | msg_id = query.message.reply_to_message.id
37 |
38 | listener = listener_dict[msg_id]
39 | path = await get_rclone_path(user_id, message)
40 |
41 | if data[1] == "source":
42 | await query.answer()
43 | globals()["SOURCE"] = data[2]
44 | await list_remotes(
45 | message, menu_type=Menus.SYNC, remote_type="destination", edit=True
46 | )
47 | elif data[1] == "destination":
48 | await query.answer()
49 | destination = data[2]
50 | await start_sync(message, path, destination, listener)
51 | else:
52 | await query.answer()
53 | await message.delete()
54 |
55 |
56 | async def start_sync(message, path, destination, listener):
57 | cmd = [
58 | "rclone",
59 | "sync",
60 | "--delete-during",
61 | "-P",
62 | f"--config={path}",
63 | f"{SOURCE}:",
64 | f"{destination}:",
65 | ]
66 | if config_dict["SERVER_SIDE"]:
67 | cmd.append("--server-side-across-configs")
68 |
69 | process = await exec(*cmd, stdout=PIPE, stderr=PIPE)
70 |
71 | gid = "".join(SystemRandom().choices(ascii_letters + digits, k=10))
72 | async with status_dict_lock:
73 | status = SyncStatus(process, gid, SOURCE, destination, listener)
74 | status_dict[listener.uid] = status
75 | await sendStatusMessage(listener.message)
76 | await status.start()
77 |
78 | return_code = await process.wait()
79 |
80 | if return_code == 0:
81 | msg = "Sync completed successfully✅\n\n"
82 | msg += "Note:"
83 | msg += "\n1.Use dedupe command to remove duplicate file/directory"
84 | msg += "\n2.Use rmdir command to remove empty directories"
85 | await listener.onRcloneSyncComplete(msg)
86 | else:
87 | err = await process.stderr.read()
88 | await listener.onDownloadError(str(err))
89 |
90 | await message.delete()
91 |
92 |
93 | bot.add_handler(
94 | MessageHandler(
95 | handle_sync,
96 | filters=command(BotCommands.SyncCommand)
97 | & (CustomFilters.user_filter | CustomFilters.chat_filter),
98 | )
99 | )
100 |
101 | bot.add_handler(CallbackQueryHandler(sync_callback, filters=regex("syncmenu")))
102 |
--------------------------------------------------------------------------------
/bot/modules/torr_select.py:
--------------------------------------------------------------------------------
1 | from pyrogram.handlers import CallbackQueryHandler
2 | from pyrogram import filters
3 | from os import remove as osremove, path as ospath
4 | from bot import bot, aria2, LOGGER
5 | from bot.helper.ext_utils.bot_utils import run_sync_to_async
6 | from bot.helper.telegram_helper.message_utils import sendStatusMessage
7 | from bot.helper.ext_utils.misc_utils import getTaskByGid
8 |
9 |
10 | async def get_confirm(client, query):
11 | user_id = query.from_user.id
12 | data = query.data.split()
13 | message = query.message
14 | task = await getTaskByGid(data[2])
15 | if task is None:
16 | await query.answer("This task has been cancelled!", show_alert=True)
17 | await message.delete()
18 | return
19 | if not hasattr(task, "seeding"):
20 | await query.answer(
21 | "Not in download state anymore! Keep this message to resume the seed if seed enabled!",
22 | show_alert=True,
23 | )
24 | return
25 | if hasattr(task, "listener"):
26 | listener = task.listener()
27 | else:
28 | return
29 | if user_id != listener.user_id:
30 | await query.answer("This task is not for you!", show_alert=True)
31 | elif data[1] == "pin":
32 | await query.answer(data[3], show_alert=True)
33 | elif data[1] == "done":
34 | await query.answer()
35 | id_ = data[3]
36 | if len(id_) > 20:
37 | client = task.client()
38 | tor_info = (await run_sync_to_async(client.torrents_info, torrent_hash=id_))[0]
39 | path = tor_info.content_path.rsplit("/", 1)[0]
40 | res = await run_sync_to_async(client.torrents_files, torrent_hash=id_)
41 | for f in res:
42 | if f.priority == 0:
43 | f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"]
44 | for f_path in f_paths:
45 | if ospath.exists(f_path):
46 | try:
47 | osremove(f_path)
48 | except:
49 | pass
50 | await run_sync_to_async(client.torrents_resume, torrent_hashes=id_)
51 | else:
52 | res = await run_sync_to_async(aria2.client.get_files, id_)
53 | for f in res:
54 | if f["selected"] == "false" and ospath.exists(f["path"]):
55 | try:
56 | osremove(f["path"])
57 | except:
58 | pass
59 | try:
60 | await run_sync_to_async(aria2.client.unpause, id_)
61 | except Exception as e:
62 | LOGGER.error(
63 | f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!"
64 | )
65 | await sendStatusMessage(message)
66 | await message.delete()
67 | elif data[1] == "rm":
68 | await query.answer()
69 | obj = task.task()
70 | await obj.cancel_task()
71 | await message.delete()
72 |
73 |
74 | bot.add_handler(CallbackQueryHandler(get_confirm, filters=filters.regex("btsel")))
75 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 |
3 | services:
4 | app:
5 | build: .
6 | container_name: rcmltb
7 | command: bash start.sh
8 | # volumes: # optional
9 | # - /usr/src/app/downloads:/usr/src/app/downloads:rw
10 | ports:
11 | - "80:80" # qbittorrent selection webserver
12 | - "8080:8080" # rclone serve index webserver
13 | restart: on-failure
14 |
15 |
16 |
--------------------------------------------------------------------------------
/generate_drive_token.py:
--------------------------------------------------------------------------------
1 | # Source: https://github.com/anasty17/mirror-leech-telegram-bot/blob/master/generate_drive_token.py
2 |
3 | import pickle
4 | import os
5 | from google_auth_oauthlib.flow import InstalledAppFlow
6 | from google.auth.transport.requests import Request
7 |
8 | credentials = None
9 | __G_DRIVE_TOKEN_FILE = "token.pickle"
10 | __OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"]
11 | if os.path.exists(__G_DRIVE_TOKEN_FILE):
12 | with open(__G_DRIVE_TOKEN_FILE, "rb") as f:
13 | credentials = pickle.load(f)
14 | if (
15 | (credentials is None or not credentials.valid)
16 | and credentials
17 | and credentials.expired
18 | and credentials.refresh_token
19 | ):
20 | credentials.refresh(Request())
21 | else:
22 | flow = InstalledAppFlow.from_client_secrets_file("credentials.json", __OAUTH_SCOPE)
23 | credentials = flow.run_local_server(port=0, open_browser=False)
24 |
25 | # Save the credentials for the next run
26 | with open(__G_DRIVE_TOKEN_FILE, "wb") as token:
27 | pickle.dump(credentials, token)
28 |
--------------------------------------------------------------------------------
/qBittorrent/config/qBittorrent.conf:
--------------------------------------------------------------------------------
1 | [Application]
2 | MemoryWorkingSetLimit=512
3 |
4 | [BitTorrent]
5 | Session\AddExtensionToIncompleteFiles=true
6 | Session\AddTrackersEnabled=false
7 | Session\AnnounceToAllTrackers=true
8 | Session\AnonymousModeEnabled=false
9 | Session\AsyncIOThreadsCount=16
10 | Session\ConnectionSpeed=-1
11 | Session\DHTEnabled=true
12 | Session\DiskCacheSize=-1
13 | Session\GlobalDLSpeedLimit=0
14 | Session\GlobalMaxRatio=-1
15 | Session\GlobalMaxSeedingMinutes=-1
16 | Session\GlobalUPSpeedLimit=0
17 | Session\HashingThreadsCount=1
18 | Session\IgnoreSlowTorrentsForQueueing=true
19 | Session\IncludeOverheadInLimits=false
20 | Session\LSDEnabled=true
21 | Session\MaxActiveCheckingTorrents=3
22 | Session\MaxActiveDownloads=1000
23 | Session\MaxActiveTorrents=1000
24 | Session\MaxActiveUploads=1000
25 | Session\MaxConnections=-1
26 | Session\MaxConnectionsPerTorrent=-1
27 | Session\MaxRatioAction=0
28 | Session\MaxUploads=-1
29 | Session\MaxUploadsPerTorrent=-1
30 | Session\MultiConnectionsPerIp=true
31 | Session\PexEnabled=true
32 | Session\PerformanceWarning=true
33 | Session\Preallocation=true
34 | Session\QueueingSystemEnabled=false
35 | Session\SlowTorrentsDownloadRate=2
36 | Session\SlowTorrentsInactivityTimer=600
37 | Session\SlowTorrentsUploadRate=2
38 | Session\StopTrackerTimeout=5
39 | TrackerEnabled=true
40 |
41 | [LegalNotice]
42 | Accepted=true
43 |
44 | [Meta]
45 | MigrationVersion=4
46 |
47 | [Preferences]
48 | Advanced\DisableRecursiveDownload=false
49 | Advanced\RecheckOnCompletion=false
50 | Advanced\trackerPortForwarding=true
51 | General\PreventFromSuspendWhenDownloading=true
52 | General\PreventFromSuspendWhenSeeding=true
53 | Search\SearchEnabled=true
54 | WebUI\BanDuration=3600
55 | WebUI\CSRFProtection=false
56 | WebUI\ClickjackingProtection=false
57 | WebUI\Enabled=true
58 | WebUI\HTTPS\Enabled=false
59 | WebUI\HostHeaderValidation=false
60 | WebUI\LocalHostAuth=false
61 | WebUI\MaxAuthenticationFailCount=1000
62 | WebUI\Port=8090
63 | WebUI\SecureCookie=false
64 | WebUI\SessionTimeout=3600
65 | WebUI\UseUPnP=false
66 |
--------------------------------------------------------------------------------
/qbitweb/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/qbitweb/nodes.py:
--------------------------------------------------------------------------------
1 | from anytree import NodeMixin
2 | from re import findall as re_findall
3 | from os import environ
4 |
5 | DOWNLOAD_DIR = environ.get("DOWNLOAD_DIR", "")
6 | if len(DOWNLOAD_DIR) == 0:
7 | DOWNLOAD_DIR = "/usr/src/app/downloads/"
8 | elif not DOWNLOAD_DIR.endswith("/"):
9 | DOWNLOAD_DIR += "/"
10 |
11 |
12 | class TorNode(NodeMixin):
13 | def __init__(
14 | self,
15 | name,
16 | is_folder=False,
17 | is_file=False,
18 | parent=None,
19 | size=None,
20 | priority=None,
21 | file_id=None,
22 | progress=None,
23 | ):
24 | super().__init__()
25 | self.name = name
26 | self.is_folder = is_folder
27 | self.is_file = is_file
28 |
29 | if parent is not None:
30 | self.parent = parent
31 | if size is not None:
32 | self.fsize = size
33 | if priority is not None:
34 | self.priority = priority
35 | if file_id is not None:
36 | self.file_id = file_id
37 | if progress is not None:
38 | self.progress = progress
39 |
40 |
41 | def qb_get_folders(path):
42 | return path.split("/")
43 |
44 |
45 | def get_folders(path):
46 | fs = re_findall(f"{DOWNLOAD_DIR}[0-9]+/(.+)", path)[0]
47 | return fs.split("/")
48 |
49 |
50 | def make_tree(res, aria2=False):
51 | parent = TorNode("Torrent")
52 | if not aria2:
53 | for i in res:
54 | folders = qb_get_folders(i.name)
55 | if len(folders) > 1:
56 | previous_node = parent
57 | for j in range(len(folders) - 1):
58 | current_node = next(
59 | (k for k in previous_node.children if k.name == folders[j]),
60 | None,
61 | )
62 | if current_node is None:
63 | previous_node = TorNode(
64 | folders[j], parent=previous_node, is_folder=True
65 | )
66 | else:
67 | previous_node = current_node
68 | TorNode(
69 | folders[-1],
70 | is_file=True,
71 | parent=previous_node,
72 | size=i.size,
73 | priority=i.priority,
74 | file_id=i.id,
75 | progress=round(i.progress * 100, 5),
76 | )
77 | else:
78 | TorNode(
79 | folders[-1],
80 | is_file=True,
81 | parent=parent,
82 | size=i.size,
83 | priority=i.priority,
84 | file_id=i.id,
85 | progress=round(i.progress * 100, 5),
86 | )
87 | else:
88 | for i in res:
89 | folders = get_folders(i["path"])
90 | priority = 1
91 | if i["selected"] == "false":
92 | priority = 0
93 | if len(folders) > 1:
94 | previous_node = parent
95 | for j in range(len(folders) - 1):
96 | current_node = next(
97 | (k for k in previous_node.children if k.name == folders[j]),
98 | None,
99 | )
100 | if current_node is None:
101 | previous_node = TorNode(
102 | folders[j], parent=previous_node, is_folder=True
103 | )
104 | else:
105 | previous_node = current_node
106 | TorNode(
107 | folders[-1],
108 | is_file=True,
109 | parent=previous_node,
110 | size=i["length"],
111 | priority=priority,
112 | file_id=i["index"],
113 | progress=round(
114 | (int(i["completedLength"]) / int(i["length"])) * 100, 5
115 | ),
116 | )
117 | else:
118 | TorNode(
119 | folders[-1],
120 | is_file=True,
121 | parent=parent,
122 | size=i["length"],
123 | priority=priority,
124 | file_id=i["index"],
125 | progress=round(
126 | (int(i["completedLength"]) / int(i["length"])) * 100, 5
127 | ),
128 | )
129 | return create_list(parent, ["", 0])
130 |
131 |
132 | """
133 | def print_tree(parent):
134 | for pre, _, node in RenderTree(parent):
135 | treestr = u"%s%s" % (pre, node.name)
136 | print(treestr.ljust(8), node.is_folder, node.is_file)
137 | """
138 |
139 |
140 | def create_list(par, msg):
141 | if par.name != ".unwanted":
142 | msg[0] += "