├── .github
└── FUNDING.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── add_to_team_drive.py
├── aria.sh
├── bot
├── __init__.py
├── __main__.py
├── helper
│ ├── __init__.py
│ ├── ext_utils
│ │ ├── __init__.py
│ │ ├── bot_utils.py
│ │ ├── bulk_links.py
│ │ ├── db_handler.py
│ │ ├── exceptions.py
│ │ ├── fs_utils.py
│ │ ├── help_messages.py
│ │ ├── leech_utils.py
│ │ ├── shortener.py
│ │ ├── task_manager.py
│ │ └── telegraph_helper.py
│ ├── jmdkh_utils.py
│ ├── listeners
│ │ ├── aria2_listener.py
│ │ ├── qbit_listener.py
│ │ └── tasks_listener.py
│ ├── mirror_utils
│ │ ├── __init__.py
│ │ ├── download_utils
│ │ │ ├── __init__.py
│ │ │ ├── aria2_download.py
│ │ │ ├── direct_link_generator.py
│ │ │ ├── direct_link_generator_license.md
│ │ │ ├── gd_download.py
│ │ │ ├── mega_download.py
│ │ │ ├── qbit_download.py
│ │ │ ├── rclone_download.py
│ │ │ ├── telegram_download.py
│ │ │ └── yt_dlp_download.py
│ │ ├── rclone_utils
│ │ │ ├── __init__.py
│ │ │ ├── list.py
│ │ │ ├── serve.py
│ │ │ └── transfer.py
│ │ ├── status_utils
│ │ │ ├── __init__.py
│ │ │ ├── aria2_status.py
│ │ │ ├── extract_status.py
│ │ │ ├── gdrive_status.py
│ │ │ ├── mega_download_status.py
│ │ │ ├── qbit_status.py
│ │ │ ├── queue_status.py
│ │ │ ├── rclone_status.py
│ │ │ ├── split_status.py
│ │ │ ├── telegram_status.py
│ │ │ ├── yt_dlp_download_status.py
│ │ │ └── zip_status.py
│ │ └── upload_utils
│ │ │ ├── __init__.py
│ │ │ ├── gdriveTools.py
│ │ │ └── pyrogramEngine.py
│ └── telegram_helper
│ │ ├── __init__.py
│ │ ├── bot_commands.py
│ │ ├── button_build.py
│ │ ├── filters.py
│ │ └── message_utils.py
└── modules
│ ├── __init__.py
│ ├── anonymous.py
│ ├── authorize.py
│ ├── bot_settings.py
│ ├── cancel_mirror.py
│ ├── category_select.py
│ ├── clone.py
│ ├── eval.py
│ ├── gd_count.py
│ ├── gd_delete.py
│ ├── gd_list.py
│ ├── leech_del.py
│ ├── mirror_leech.py
│ ├── rmdb.py
│ ├── rss.py
│ ├── save_message.py
│ ├── shell.py
│ ├── status.py
│ ├── torrent_search.py
│ ├── torrent_select.py
│ ├── users_settings.py
│ └── ytdlp.py
├── config_sample.env
├── docker-compose.yml
├── driveid.py
├── gen_sa_accounts.py
├── generate_drive_token.py
├── generate_string_session.py
├── qBittorrent
└── config
│ └── qBittorrent.conf
├── requirements-cli.txt
├── requirements.txt
├── start.sh
├── update.py
└── web
├── __init__.py
├── nodes.py
└── wserver.py
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | custom: ['https://ko-fi.com/anasty17']
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | config.env
2 | *.pyc
3 | data*
4 | .vscode
5 | .idea
6 | *.json
7 | *.pickle
8 | .netrc
9 | log.txt
10 | accounts/*
11 | Thumbnails/*
12 | rclone/*
13 | list_drives.txt
14 | cookies.txt
15 | downloads
16 | categories.txt
17 | shorteners.txt
18 | buttons.txt
19 | terabox.txt
20 | rclone.conf
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM anasty17/mltb:latest
2 |
3 | WORKDIR /usr/src/app
4 | RUN chmod 777 /usr/src/app
5 |
6 | COPY requirements.txt .
7 | RUN pip3 install --no-cache-dir -r requirements.txt
8 |
9 | COPY . .
10 |
11 | CMD ["bash", "start.sh"]
--------------------------------------------------------------------------------
/add_to_team_drive.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | from google.oauth2.service_account import Credentials
3 | import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time
4 | from google_auth_oauthlib.flow import InstalledAppFlow
5 | from google.auth.transport.requests import Request
6 | import os, pickle
7 |
8 | stt = time.time()
9 |
10 | parse = argparse.ArgumentParser(
11 | description='A tool to add service accounts to a shared drive from a folder containing credential files.')
12 | parse.add_argument('--path', '-p', default='accounts',
13 | help='Specify an alternative path to the service accounts folder.')
14 | parse.add_argument('--credentials', '-c', default='./credentials.json',
15 | help='Specify the relative path for the credentials file.')
16 | parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.')
17 | parsereq = parse.add_argument_group('required arguments')
18 | parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True)
19 |
20 | args = parse.parse_args()
21 | acc_dir = args.path
22 | did = args.drive_id
23 | credentials = glob.glob(args.credentials)
24 |
25 | try:
26 | open(credentials[0], 'r')
27 | print('>> Found credentials.')
28 | except IndexError:
29 | print('>> No credentials found.')
30 | sys.exit(0)
31 |
32 | if not args.yes:
33 | # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
34 | # credentials[0],'r').read()))['installed']['client_id'])
35 | input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
36 | '(shared drive) as Manager\n>> (Press any key to continue)')
37 |
38 | creds = None
39 | if os.path.exists('token_sa.pickle'):
40 | with open('token_sa.pickle', 'rb') as token:
41 | creds = pickle.load(token)
42 | # If there are no (valid) credentials available, let the user log in.
43 | if not creds or not creds.valid:
44 | if creds and creds.expired and creds.refresh_token:
45 | creds.refresh(Request())
46 | else:
47 | flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
48 | 'https://www.googleapis.com/auth/admin.directory.group',
49 | 'https://www.googleapis.com/auth/admin.directory.group.member'
50 | ])
51 | # creds = flow.run_local_server(port=0)
52 | creds = flow.run_console()
53 | # Save the credentials for the next run
54 | with open('token_sa.pickle', 'wb') as token:
55 | pickle.dump(creds, token)
56 |
57 | drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
58 | batch = drive.new_batch_http_request()
59 |
60 | aa = glob.glob('%s/*.json' % acc_dir)
61 | pbar = progress.bar.Bar("Readying accounts", max=len(aa))
62 | for i in aa:
63 | ce = json.loads(open(i, 'r').read())['client_email']
64 | batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
65 | "role": "organizer",
66 | "type": "user",
67 | "emailAddress": ce
68 | }))
69 | pbar.next()
70 | pbar.finish()
71 | print('Adding...')
72 | batch.execute()
73 |
74 | print('Complete.')
75 | hours, rem = divmod((time.time() - stt), 3600)
76 | minutes, sec = divmod(rem, 60)
77 | print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
78 |
--------------------------------------------------------------------------------
/aria.sh:
--------------------------------------------------------------------------------
1 | tracker_list=$(curl -Ns https://ngosang.github.io/trackerslist/trackers_all_http.txt | awk '$0' | tr '\n\n' ',')
2 | aria2c --allow-overwrite=true --auto-file-renaming=true --bt-enable-lpd=true --bt-detach-seed-only=true \
3 | --bt-remove-unselected-file=true --bt-tracker="[$tracker_list]" --bt-max-peers=0 --enable-rpc=true \
4 | --rpc-max-request-size=1024M --max-connection-per-server=10 --max-concurrent-downloads=10 --split=10 \
5 | --seed-ratio=0 --check-integrity=true --continue=true --daemon=true --disk-cache=40M --force-save=true \
6 | --min-split-size=10M --follow-torrent=mem --check-certificate=false --optimize-concurrent-downloads=true \
7 | --http-accept-gzip=true --max-file-not-found=0 --max-tries=20 --peer-id-prefix=-qB4520- --reuse-uri=true \
8 | --content-disposition-default-utf8=true --user-agent=Wget/1.12 --peer-agent=qBittorrent/4.5.2 --quiet=true \
9 | --summary-interval=0
--------------------------------------------------------------------------------
/bot/helper/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/bulk_links.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from aiofiles import open as aiopen
3 | from aiofiles.os import remove
4 |
5 |
6 | async def get_links_from_message(text, bulk_start, bulk_end):
7 | links_list = text.split('\n')
8 | links_list = [item.strip() for item in links_list if len(item) != 0]
9 |
10 | if bulk_start != 0 and bulk_end != 0:
11 | links_list = links_list[bulk_start:bulk_end]
12 | elif bulk_start != 0:
13 | links_list = links_list[bulk_start:]
14 | elif bulk_end != 0:
15 | links_list = links_list[:bulk_end]
16 |
17 | return links_list
18 |
19 |
20 | async def get_links_from_file(message, bulk_start, bulk_end):
21 | links_list = []
22 | text_file_dir = await message.download()
23 |
24 | async with aiopen(text_file_dir, 'r+') as f:
25 | lines = await f.readlines()
26 | links_list.extend(line.strip() for line in lines if len(line) != 0)
27 |
28 | if bulk_start != 0 and bulk_end != 0:
29 | links_list = links_list[bulk_start:bulk_end]
30 | elif bulk_start != 0:
31 | links_list = links_list[bulk_start:]
32 | elif bulk_end != 0:
33 | links_list = links_list[:bulk_end]
34 |
35 | await remove(text_file_dir)
36 |
37 | return links_list
38 |
39 |
40 | async def extract_bulk_links(message, bulk_start, bulk_end):
41 | bulk_start = int(bulk_start)
42 | bulk_end = int(bulk_end)
43 | if (reply_to := message.reply_to_message) and (file_ := reply_to.document) and (file_.mime_type == 'text/plain'):
44 | return await get_links_from_file(message.reply_to_message, bulk_start, bulk_end)
45 | elif text := message.reply_to_message.text:
46 | return await get_links_from_message(text, bulk_start, bulk_end)
47 | return []
48 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/db_handler.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from os import environ
3 |
4 | from aiofiles import open as aiopen
5 | from aiofiles.os import makedirs
6 | from aiofiles.os import path as aiopath
7 | from motor.motor_asyncio import AsyncIOMotorClient
8 | from pymongo.errors import PyMongoError
9 |
10 | from bot import (DATABASE_URL, LOGGER, aria2_options, bot_id, bot_loop,
11 | bot_name, config_dict, qbit_options, rss_dict, user_data)
12 |
13 |
14 | class DbManger:
15 | def __init__(self):
16 | self.__err = False
17 | self.__db = None
18 | self.__conn = None
19 | self.__connect()
20 |
21 | def __connect(self):
22 | try:
23 | self.__conn = AsyncIOMotorClient(DATABASE_URL)
24 | self.__db = self.__conn.mltb
25 | except PyMongoError as e:
26 | LOGGER.error(f"Error in DB connection: {e}")
27 | self.__err = True
28 |
29 | async def db_load(self):
30 | if self.__err:
31 | return
32 | # Save bot settings
33 | await self.__db.settings.config.update_one({'_id': bot_id}, {'$set': config_dict}, upsert=True)
34 | # Save Aria2c options
35 | if await self.__db.settings.aria2c.find_one({'_id': bot_id}) is None:
36 | await self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': aria2_options}, upsert=True)
37 | # Save qbittorrent options
38 | if await self.__db.settings.qbittorrent.find_one({'_id': bot_id}) is None:
39 | await self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': qbit_options}, upsert=True)
40 | # User Data
41 | if await self.__db.users[bot_id].find_one():
42 | rows = self.__db.users[bot_id].find({})
43 | # return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_opt, media_group, equal_splits, split_size, rclone}
44 | async for row in rows:
45 | uid = row['_id']
46 | del row['_id']
47 | thumb_path = f'Thumbnails/{uid}.jpg'
48 | rclone_path = f'rclone/{uid}.conf'
49 | if row.get('thumb'):
50 | if not await aiopath.exists('Thumbnails'):
51 | await makedirs('Thumbnails')
52 | async with aiopen(thumb_path, 'wb+') as f:
53 | await f.write(row['thumb'])
54 | row['thumb'] = thumb_path
55 | if row.get('rclone'):
56 | if not await aiopath.exists('rclone'):
57 | await makedirs('rclone')
58 | async with aiopen(rclone_path, 'wb+') as f:
59 | await f.write(row['rclone'])
60 | row['rclone'] = rclone_path
61 | user_data[uid] = row
62 | LOGGER.info("Users data has been imported from Database")
63 | # Rss Data
64 | if await self.__db.rss[bot_id].find_one():
65 | rows = self.__db.rss[bot_id].find({}) # return a dict ==> {_id, title: {link, last_feed, last_name, inf, exf, command, paused}
66 | async for row in rows:
67 | user_id = row['_id']
68 | del row['_id']
69 | rss_dict[user_id] = row
70 | LOGGER.info("Rss data has been imported from Database.")
71 | self.__conn.close
72 |
73 | async def update_config(self, dict_):
74 | if self.__err:
75 | return
76 | await self.__db.settings.config.update_one({'_id': bot_id}, {'$set': dict_}, upsert=True)
77 | self.__conn.close
78 |
79 | async def load_configs(self):
80 | if self.__err:
81 | return
82 | if db_dict := await self.__db.settings.config.find_one({'_id': bot_id}):
83 | del db_dict['_id']
84 | for key, value in db_dict.items():
85 | environ[key] = str(value)
86 | if pf_dict := await self.__db.settings.files.find_one({'_id': bot_id}):
87 | del pf_dict['_id']
88 | for key, value in pf_dict.items():
89 | if value:
90 | file_ = key.replace('__', '.')
91 | with open(file_, 'wb+') as f:
92 | f.write(value)
93 |
94 | async def update_aria2(self, key, value):
95 | if self.__err:
96 | return
97 | await self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True)
98 | self.__conn.close
99 |
100 | async def update_qbittorrent(self, key, value):
101 | if self.__err:
102 | return
103 | await self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True)
104 | self.__conn.close
105 |
106 | async def update_private_file(self, path):
107 | if self.__err:
108 | return
109 | if await aiopath.exists(path):
110 | async with aiopen(path, 'rb+') as pf:
111 | pf_bin = await pf.read()
112 | else:
113 | pf_bin = ''
114 | path = path.replace('.', '__')
115 | await self.__db.settings.files.update_one({'_id': bot_id}, {'$set': {path: pf_bin}}, upsert=True)
116 | self.__conn.close
117 |
118 | async def update_user_data(self, user_id):
119 | if self.__err:
120 | return
121 | data = user_data[user_id]
122 | if data.get('thumb'):
123 | del data['thumb']
124 | if data.get('rclone'):
125 | del data['rclone']
126 | if data.get('token'):
127 | del data['token']
128 | if data.get('time'):
129 | del data['time']
130 | await self.__db.users[bot_id].replace_one({'_id': user_id}, data, upsert=True)
131 | self.__conn.close
132 |
133 | async def update_user_doc(self, user_id, key, path=''):
134 | if self.__err:
135 | return
136 | if path:
137 | async with aiopen(path, 'rb+') as doc:
138 | doc_bin = await doc.read()
139 | else:
140 | doc_bin = ''
141 | await self.__db.users[bot_id].update_one({'_id': user_id}, {'$set': {key: doc_bin}}, upsert=True)
142 | self.__conn.close
143 |
144 | async def rss_update_all(self):
145 | if self.__err:
146 | return
147 | for user_id in list(rss_dict.keys()):
148 | await self.__db.rss[bot_id].replace_one({'_id': user_id}, rss_dict[user_id], upsert=True)
149 | self.__conn.close
150 |
151 | async def rss_update(self, user_id):
152 | if self.__err:
153 | return
154 | await self.__db.rss[bot_id].replace_one({'_id': user_id}, rss_dict[user_id], upsert=True)
155 | self.__conn.close
156 |
157 | async def rss_delete(self, user_id):
158 | if self.__err:
159 | return
160 | await self.__db.rss[bot_id].delete_one({'_id': user_id})
161 | self.__conn.close
162 |
163 | async def add_incomplete_task(self, cid, link, tag):
164 | if self.__err:
165 | return
166 | await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag})
167 | self.__conn.close
168 |
169 | async def rm_complete_task(self, link):
170 | if self.__err:
171 | return
172 | await self.__db.tasks[bot_id].delete_one({'_id': link})
173 | self.__conn.close
174 |
175 | async def get_incomplete_tasks(self):
176 | notifier_dict = {}
177 | if self.__err:
178 | return notifier_dict
179 | if await self.__db.tasks[bot_id].find_one():
180 | # return a dict ==> {_id, cid, tag}
181 | rows = self.__db.tasks[bot_id].find({})
182 | async for row in rows:
183 | if row['cid'] in list(notifier_dict.keys()):
184 | if row['tag'] in list(notifier_dict[row['cid']]):
185 | notifier_dict[row['cid']][row['tag']].append(
186 | row['_id'])
187 | else:
188 | notifier_dict[row['cid']][row['tag']] = [row['_id']]
189 | else:
190 | notifier_dict[row['cid']] = {row['tag']: [row['_id']]}
191 | await self.__db.tasks[bot_id].drop()
192 | self.__conn.close
193 | return notifier_dict # return a dict ==> {cid: {tag: [_id, _id, ...]}}
194 |
195 | async def trunc_table(self, name):
196 | if self.__err:
197 | return
198 | await self.__db[name][bot_id].drop()
199 | self.__conn.close
200 |
201 | async def add_download_url(self, url: str, tag: str):
202 | if self.__err:
203 | return
204 | download = {'_id': url, 'tag': tag, 'botname': bot_name}
205 | await self.__db.download_links.update_one({'_id': url}, {'$set': download}, upsert=True)
206 | self.__conn.close
207 |
208 | async def check_download(self, url:str):
209 | if self.__err:
210 | return
211 | exist = await self.__db.download_links.find_one({'_id': url})
212 | self.__conn.close
213 | return exist
214 |
215 | async def clear_download_links(self, botName=None):
216 | if self.__err:
217 | return
218 | if not botName:
219 | botName = bot_name
220 | await self.__db.download_links.delete_many({'botname': botName})
221 | self.__conn.close
222 |
223 | async def remove_download(self, url: str):
224 | if self.__err:
225 | return
226 | await self.__db.download_links.delete_one({'_id': url})
227 | self.__conn.close
228 |
229 | if DATABASE_URL:
230 | bot_loop.run_until_complete(DbManger().db_load())
231 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/exceptions.py:
--------------------------------------------------------------------------------
1 | class DirectDownloadLinkException(Exception):
2 | """Not method found for extracting direct download link from the http link"""
3 | pass
4 |
5 |
6 | class NotSupportedExtractionArchive(Exception):
7 | """The archive format use is trying to extract is not supported"""
8 | pass
9 |
10 |
11 | class RssShutdownException(Exception):
12 | """This exception should be raised when shutdown is called to stop the montior"""
13 | pass
14 |
15 |
16 | class TgLinkException(Exception):
17 | """No Access granted for this chat"""
18 | pass
19 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/fs_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from os import walk, path as ospath
3 | from aiofiles.os import remove as aioremove, path as aiopath, listdir, rmdir, makedirs
4 | from aioshutil import rmtree as aiormtree
5 | from shutil import rmtree, disk_usage
6 | from magic import Magic
7 | from re import split as re_split, I, search as re_search
8 | from subprocess import run as srun
9 | from sys import exit as sexit
10 |
11 | from .exceptions import NotSupportedExtractionArchive
12 | from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client, GLOBAL_EXTENSION_FILTER
13 | from bot.helper.ext_utils.bot_utils import sync_to_async, cmd_exec, async_to_sync
14 | from bot.helper.ext_utils.telegraph_helper import telegraph
15 |
16 |
17 | ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
18 | ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
19 | ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr",
20 | ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"]
21 |
22 | FIRST_SPLIT_REGEX = r'(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$'
23 |
24 | SPLIT_REGEX = r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$'
25 |
26 |
27 | def is_first_archive_split(file):
28 | return bool(re_search(FIRST_SPLIT_REGEX, file))
29 |
30 |
31 | def is_archive(file):
32 | return file.endswith(tuple(ARCH_EXT))
33 |
34 |
35 | def is_archive_split(file):
36 | return bool(re_search(SPLIT_REGEX, file))
37 |
38 |
39 | async def clean_target(path):
40 | if await aiopath.exists(path):
41 | LOGGER.info(f"Cleaning Target: {path}")
42 | if await aiopath.isdir(path):
43 | try:
44 | await aiormtree(path)
45 | except:
46 | pass
47 | elif await aiopath.isfile(path):
48 | try:
49 | await aioremove(path)
50 | except:
51 | pass
52 |
53 |
54 | async def clean_download(path):
55 | if await aiopath.exists(path):
56 | LOGGER.info(f"Cleaning Download: {path}")
57 | try:
58 | await aiormtree(path)
59 | except:
60 | pass
61 |
62 |
63 | async def start_cleanup():
64 | get_client().torrents_delete(torrent_hashes="all")
65 | try:
66 | await aiormtree(DOWNLOAD_DIR)
67 | except:
68 | pass
69 | await makedirs(DOWNLOAD_DIR)
70 |
71 |
72 | def clean_all():
73 | aria2.remove_all(True)
74 | try:
75 | get_client().torrents_delete(torrent_hashes="all")
76 | except:
77 | pass
78 | async_to_sync(telegraph.revoke_access_token)
79 | try:
80 | rmtree(DOWNLOAD_DIR)
81 | except:
82 | pass
83 |
84 |
85 | def exit_clean_up(signal, frame):
86 | try:
87 | LOGGER.info(
88 | "Please wait, while we clean up and stop the running downloads")
89 | clean_all()
90 | srun(['pkill', '-9', '-f', '-e',
91 | 'gunicorn|aria2c|qbittorrent-nox|ffmpeg|rclone'])
92 | sexit(0)
93 | except KeyboardInterrupt:
94 | LOGGER.warning("Force Exiting before the cleanup finishes!")
95 | sexit(1)
96 |
97 |
98 | async def clean_unwanted(path):
99 | LOGGER.info(f"Cleaning unwanted files/folders: {path}")
100 | for dirpath, _, files in await sync_to_async(walk, path, topdown=False):
101 | for filee in files:
102 | if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'):
103 | await aioremove(ospath.join(dirpath, filee))
104 | if dirpath.endswith((".unwanted", "splited_files_mltb", "copied_mltb")):
105 | await aiormtree(dirpath)
106 | for dirpath, _, files in await sync_to_async(walk, path, topdown=False):
107 | if not await listdir(dirpath):
108 | await rmdir(dirpath)
109 |
110 |
111 | async def get_path_size(path):
112 | if await aiopath.isfile(path):
113 | return await aiopath.getsize(path)
114 | total_size = 0
115 | for root, dirs, files in await sync_to_async(walk, path):
116 | for f in files:
117 | abs_path = ospath.join(root, f)
118 | total_size += await aiopath.getsize(abs_path)
119 | return total_size
120 |
121 |
122 | async def count_files_and_folders(path):
123 | total_files = 0
124 | total_folders = 0
125 | for _, dirs, files in await sync_to_async(walk, path):
126 | total_files += len(files)
127 | for f in files:
128 | if f.endswith(tuple(GLOBAL_EXTENSION_FILTER)):
129 | total_files -= 1
130 | total_folders += len(dirs)
131 | return total_folders, total_files
132 |
133 |
134 | def get_base_name(orig_path):
135 | extension = next(
136 | (ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), ''
137 | )
138 | if extension != '':
139 | return re_split(f'{extension}$', orig_path, maxsplit=1, flags=I)[0]
140 | else:
141 | raise NotSupportedExtractionArchive(
142 | 'File format not supported for extraction')
143 |
144 |
145 | def get_mime_type(file_path):
146 | mime = Magic(mime=True)
147 | mime_type = mime.from_file(file_path)
148 | mime_type = mime_type or "text/plain"
149 | return mime_type
150 |
151 |
152 | async def join_files(path):
153 | files = await listdir(path)
154 | results = []
155 | for file_ in files:
156 | if re_search(r"\.0+2$", file_) and await sync_to_async(get_mime_type, f'{path}/{file_}') == 'application/octet-stream':
157 | final_name = file_.rsplit('.', 1)[0]
158 | cmd = f'cat {path}/{final_name}.* > {path}/{final_name}'
159 | _, stderr, code = await cmd_exec(cmd, True)
160 | if code != 0:
161 | LOGGER.error(f'Failed to join {final_name}, stderr: {stderr}')
162 | else:
163 | results.append(final_name)
164 | if results:
165 | for res in results:
166 | for file_ in files:
167 | if re_search(fr"{res}\.0[0-9]+$", file_):
168 | await aioremove(f'{path}/{file_}')
169 |
170 |
171 | def check_storage_threshold(size, threshold, arch=False, alloc=False):
172 | free = disk_usage(DOWNLOAD_DIR).free
173 | if not alloc:
174 | if (
175 | not arch
176 | and free - size < threshold
177 | or arch
178 | and free - (size * 2) < threshold
179 | ):
180 | return False
181 | elif not arch:
182 | if free < threshold:
183 | return False
184 | elif free - size < threshold:
185 | return False
186 | return True
187 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/help_messages.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | YT_HELP_MESSAGE = """
4 | Send link along with command line:
5 | /{cmd}
link -s -n new name -opt x:y|x1:y1
6 |
7 | By replying to link:
8 | /{cmd}
-n new name -z password -opt x:y|x1:y1
9 |
10 | New Name: -n
11 | /{cmd}
link -n new name
12 | Note: Don't add file extension
13 |
14 | Upload Custom Drive: link -id -index
15 | -id drive_folder_link
or drive_id
-index https://anything.in/0:
16 | drive_id must be folder id and index must be url else it will not accept
17 |
18 | Quality Buttons: -s
19 | Incase default quality added from yt-dlp options using format option and you need to select quality for specific link or links with multi links feature.
20 | /{cmd}
link -s
21 |
22 | Zip: -z password
23 | /{cmd}
link -z (zip)
24 | /{cmd}
link -z password (zip password protected)
25 |
26 | Options: -opt
27 | /{cmd}
link -opt playliststart:^10|fragment_retries:^inf|matchtitle:S13|writesubtitles:true|live_from_start:true|postprocessor_args:{{"ffmpeg": ["-threads", "4"]}}|wait_for_video:(5, 100)
28 | Note: Add `^` before integer or float, some values must be numeric and some string.
29 | Like playlist_items:10 works with string, so no need to add `^` before the number but playlistend works only with integer so you must add `^` before the number like example above.
30 | You can add tuple and dict also. Use double quotes inside dict.
31 |
32 | Multi links only by replying to first link: -i
33 | /{cmd}
-i 10(number of links)
34 |
35 | Multi links within same upload directory only by replying to first link: -m
36 | /{cmd}
-i 10(number of links) -m folder name
37 |
38 | Upload: -up
39 | /{cmd}
link -up rcl
(To select rclone config, remote and path)
40 | You can directly add the upload path: -up remote:dir/subdir
41 | If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID.
42 | If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH.
43 | If you want to add path manually from your config (uploaded from usetting) add mrcc:
before the path without space
44 | /{cmd}
link -up mrcc:
main:dump
45 |
46 | Rclone Flags: -rcf
47 | /{cmd}
link -up path|rcl -rcf --buffer-size:8M|--drive-starred-only|key|key:value
48 | This will override all other flags except --exclude
49 | Check here all RcloneFlags.
50 |
51 | Bulk Download: -b
52 | Bulk can be used by text message and by replying to text file contains links seperated by new line.
53 | You can use it only by reply to message(text/file).
54 | All options should be along with link!
55 | Example:
56 | link1 -n new name -up remote1:path1 -rcf |key:value|key:value
57 | link2 -z -n new name -up remote2:path2
58 | link3 -e -n new name -opt ytdlpoptions
59 | Note: You can't add -m arg for some links only, do it for all links or use multi without bulk!
60 | link pswd: pass(zip/unzip) opt: ytdlpoptions up: remote2:path2
61 | Reply to this example by this cmd /{cmd}
b(bulk)
62 | You can set start and end of the links from the bulk with -b start:end or only end by -b :end or only start by -b start. The default start is from zero(first link) to inf.
63 |
64 |
65 | Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options.
66 | """
67 |
68 | MIRROR_HELP_MESSAGE = """
69 | /{cmd}
link -n new name
70 |
71 | By replying to link/file:
72 | /{cmd}
-n new name -z -e -up upload destination
73 |
74 | New Name: -n
75 | /{cmd}
link -n new name
76 | Note: Doesn't work with torrents.
77 |
78 | Upload Custom Drive: link -id -index
79 | -id drive_folder_link
or drive_id
-index https://anything.in/0:
80 | drive_id must be folder id and index must be url else it will not accept
81 |
82 | Direct link authorization: -au -ap
83 | /{cmd}
link -au username -ap password
84 |
85 | Extract/Zip: -e -z
86 | /{cmd}
link -e password (extract password protected)
87 | /{cmd}
link -z password (zip password protected)
88 | /{cmd}
link -z password -e (extract and zip password protected)
89 | /{cmd}
link -e password -z password (extract password protected and zip password protected)
90 | Note: When both extract and zip added with cmd it will extract first and then zip, so always extract first
91 |
92 | Bittorrent selection: -s
93 | /{cmd}
link -s or by replying to file/link
94 |
95 | Bittorrent seed: -d
96 | /{cmd}
link -d ratio:seed_time or by replying to file/link
97 | To specify ratio and seed time add -d ratio:time. Ex: -d 0.7:10 (ratio and time) or -d 0.7 (only ratio) or -d :10 (only time) where time in minutes.
98 |
99 | Multi links only by replying to first link/file: -i
100 | /{cmd}
-i 10(number of links/files)
101 |
102 | Multi links within same upload directory only by replying to first link/file: -m
103 | /{cmd}
-i 10(number of links/files) -m folder name (multi message)
104 | /{cmd}
-b -m folder name (bulk-message/file)
105 |
106 | Upload: -up
107 | /{cmd}
link -up rcl
(To select rclone config, remote and path)
108 | You can directly add the upload path: -up remote:dir/subdir
109 | If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID.
110 | If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH.
111 | If you want to add path manually from your config (uploaded from usetting) add mrcc:
before the path without space
112 | /{cmd}
link -up mrcc:
main:dump
113 |
114 | Rclone Flags: -rcf
115 | /{cmd}
link|path|rcl -up path|rcl -rcf --buffer-size:8M|--drive-starred-only|key|key:value
116 | This will override all other flags except --exclude
117 | Check here all RcloneFlags.
118 |
119 | Bulk Download: -b
120 | Bulk can be used by text message and by replying to text file contains links seperated by new line.
121 | You can use it only by reply to message(text/file).
122 | All options should be along with link!
123 | Example:
124 | link1 -n new name -up remote1:path1 -rcf |key:value|key:value
125 | link2 -z -n new name -up remote2:path2
126 | link3 -e -n new name -up remote2:path2
127 | Note: You can't add -m arg for some links only, do it for all links or use multi without bulk!
128 | Reply to this example by this cmd /{cmd}
-b(bulk)
129 | You can set start and end of the links from the bulk like seed, with -b start:end or only end by -b :end or only start by -b start. The default start is from zero(first link) to inf.
130 |
131 | Join Splitted Files: -j
132 | This option will only work before extract and zip, so mostly it will be used with -m argument (samedir)
133 | By Reply:
134 | /{cmd}
-i 3 -j -m folder name
135 | /{cmd}
-b -j -m folder name
136 | if u have link have splitted files:
137 | /{cmd}
link -j
138 |
139 | Rclone Download:
140 | Treat rclone paths exactly like links
141 | /{cmd}
main:dump/ubuntu.iso or rcl
(To select config, remote and path)
142 | Users can add their own rclone from user settings
143 | If you want to add path manually from your config add mrcc:
before the path without space
144 | /{cmd}
mrcc:
main:dump/ubuntu.iso
145 |
146 | TG Links:
147 | Treat links like any direct link
148 | Some links need user access so sure you must add USER_SESSION_STRING for it.
149 | Three types of links:
150 | Public: https://t.me/channel_name/message_id
151 | Private: tg://openmessage?user_id=xxxxxx&message_id=xxxxx
152 | Super: https://t.me/c/channel_id/message_id
153 |
154 | NOTES:
155 | 1. Commands that start with qb are ONLY for torrents.
156 | """
157 |
158 | RSS_HELP_MESSAGE = """
159 | Use this format to add feed url:
160 | Title1 link (required)
161 | Title2 link -c cmd -inf xx -exf xx
162 | Title3 link -c cmd -d ratio:time -z password
163 |
164 | -c command + any arg
165 | -inf For included words filter.
166 | -exf For excluded words filter.
167 |
168 | Example: Title https://www.rss-url.com inf: 1080 or 720 or 144p|mkv or mp4|hevc exf: flv or web|xxx opt: up: mrcc:remote:path/subdir rcf: --buffer-size:8M|key|key:value
169 | This filter will parse links that it's titles contains `(1080 or 720 or 144p) and (mkv or mp4) and hevc` and doesn't conyain (flv or web) and xxx` words. You can add whatever you want.
170 |
171 | Another example: inf: 1080 or 720p|.web. or .webrip.|hvec or x264. This will parse titles that contains ( 1080 or 720p) and (.web. or .webrip.) and (hvec or x264). I have added space before and after 1080 to avoid wrong matching. If this `10805695` number in title it will match 1080 if added 1080 without spaces after it.
172 |
173 | Filter Notes:
174 | 1. | means and.
175 | 2. Add `or` between similar keys, you can add it between qualities or between extensions, so don't add filter like this f: 1080|mp4 or 720|web because this will parse 1080 and (mp4 or 720) and web ... not (1080 and mp4) or (720 and web)."
176 | 3. You can add `or` and `|` as much as you want."
177 | 4. Take look on title if it has static special character after or before the qualities or extensions or whatever and use them in filter to avoid wrong match.
178 | Timeout: 60 sec.
179 | """
180 |
181 | CLONE_HELP_MESSAGE = """
182 | Send Gdrive|Gdot|Filepress|Filebee|Appdrive|Gdflix link or rclone path along with command or by replying to the link/rc_path by command.
183 |
184 | Multi links only by replying to first gdlink or rclone_path:
185 | /{cmd}
-i 10(number of links/pathies)
186 |
187 | Gdrive:
188 | /{cmd}
gdrivelink
189 |
190 | Upload Custom Drive: link -id -index
191 | -id drive_folder_link
or drive_id
-index https://anything.in/0:
192 | drive_id must be folder id and index must be url else it will not accept
193 |
194 | Rclone:
195 | /{cmd}
(rcl or rclone_path) -up (rcl or rclone_path) -rcf flagkey:flagvalue|flagkey|flagkey:flagvalue
196 |
197 | Note: If -up not specified then rclone destination will be the RCLONE_PATH from config.env
198 | """
199 |
200 | CATEGORY_HELP_MESSAGE = """
201 | Reply to an active /{cmd} which was used to start the download or add gid along with {cmd}
202 | This command mainly for change category incase you decided to change category from already added download.
203 | But you can always use /{mir} with to select category before download start.
204 |
205 | Upload Custom Drive
206 | /{cmd}
-id drive_folder_link
or drive_id
-index https://anything.in/0:
gid or by replying to active download
207 | drive_id must be folder id and index must be url else it will not accept
208 | """
209 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/leech_utils.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from asyncio.subprocess import PIPE
3 | from os import path as ospath
4 | from re import search as re_search
5 | from time import time
6 |
7 | from aiofiles.os import mkdir
8 | from aiofiles.os import path as aiopath
9 | from aiofiles.os import remove as aioremove
10 |
11 | from bot import LOGGER, MAX_SPLIT_SIZE, config_dict, user_data
12 | from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async
13 | from bot.helper.ext_utils.fs_utils import ARCH_EXT, get_mime_type
14 |
15 |
16 | async def is_multi_streams(path):
17 | try:
18 | result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
19 | "json", "-show_streams", path])
20 | if res := result[1]:
21 | LOGGER.warning(f'Get Video Streams: {res}')
22 | except Exception as e:
23 | LOGGER.error(f'Get Video Streams: {e}. Mostly File not found!')
24 | return False
25 | fields = eval(result[0]).get('streams')
26 | if fields is None:
27 | LOGGER.error(f"get_video_streams: {result}")
28 | return False
29 | videos = 0
30 | audios = 0
31 | for stream in fields:
32 | if stream.get('codec_type') == 'video':
33 | videos += 1
34 | elif stream.get('codec_type') == 'audio':
35 | audios += 1
36 | return videos > 1 or audios > 1
37 |
38 |
39 | async def get_media_info(path):
40 | try:
41 | result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
42 | "json", "-show_format", path])
43 | if res := result[1]:
44 | LOGGER.warning(f'Get Media Info: {res}')
45 | except Exception as e:
46 | LOGGER.error(f'Get Media Info: {e}. Mostly File not found!')
47 | return 0, None, None
48 | fields = eval(result[0]).get('format')
49 | if fields is None:
50 | LOGGER.error(f"get_media_info: {result}")
51 | return 0, None, None
52 | duration = round(float(fields.get('duration', 0)))
53 | tags = fields.get('tags', {})
54 | artist = tags.get('artist') or tags.get('ARTIST')
55 | title = tags.get('title') or tags.get('TITLE')
56 | return duration, artist, title
57 |
58 |
59 | async def get_document_type(path):
60 | is_video, is_audio, is_image = False, False, False
61 | if path.endswith(tuple(ARCH_EXT)) or re_search(r'.+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$', path):
62 | return is_video, is_audio, is_image
63 | mime_type = await sync_to_async(get_mime_type, path)
64 | if mime_type.startswith('audio'):
65 | return False, True, False
66 | if mime_type.startswith('image'):
67 | return False, False, True
68 | if not mime_type.startswith('video') and not mime_type.endswith('octet-stream'):
69 | return is_video, is_audio, is_image
70 | try:
71 | result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
72 | "json", "-show_streams", path])
73 | if res := result[1]:
74 | LOGGER.warning(f'Get Document Type: {res}')
75 | except Exception as e:
76 | LOGGER.error(f'Get Document Type: {e}. Mostly File not found!')
77 | return is_video, is_audio, is_image
78 | fields = eval(result[0]).get('streams')
79 | if fields is None:
80 | LOGGER.error(f"get_document_type: {result}")
81 | return is_video, is_audio, is_image
82 | for stream in fields:
83 | if stream.get('codec_type') == 'video':
84 | is_video = True
85 | elif stream.get('codec_type') == 'audio':
86 | is_audio = True
87 | return is_video, is_audio, is_image
88 |
89 |
90 | async def take_ss(video_file, duration):
91 | des_dir = 'Thumbnails'
92 | if not await aiopath.exists(des_dir):
93 | await mkdir(des_dir)
94 | des_dir = ospath.join(des_dir, f"{time()}.jpg")
95 | if duration is None:
96 | duration = (await get_media_info(video_file))[0]
97 | if duration == 0:
98 | duration = 3
99 | duration = duration // 2
100 | cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
101 | "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir]
102 | status = await create_subprocess_exec(*cmd, stderr=PIPE)
103 | if await status.wait() != 0 or not await aiopath.exists(des_dir):
104 | err = (await status.stderr.read()).decode().strip()
105 | LOGGER.error(
106 | f'Error while extracting thumbnail. Name: {video_file} stderr: {err}')
107 | return None
108 | return des_dir
109 |
110 |
111 | async def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, multi_streams=True):
112 | if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9:
113 | return False
114 | if listener.seed and not listener.newDir:
115 | dirpath = f"{dirpath}/splited_files_mltb"
116 | if not await aiopath.exists(dirpath):
117 | await mkdir(dirpath)
118 | user_id = listener.message.from_user.id
119 | user_dict = user_data.get(user_id, {})
120 | leech_split_size = user_dict.get(
121 | 'split_size') or config_dict['LEECH_SPLIT_SIZE']
122 | leech_split_size = min(leech_split_size, MAX_SPLIT_SIZE)
123 | parts = -(-size // leech_split_size)
124 | if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop:
125 | split_size = ((size + parts - 1) // parts) + 1000
126 | if (await get_document_type(path))[0]:
127 | if multi_streams:
128 | multi_streams = await is_multi_streams(path)
129 | duration = (await get_media_info(path))[0]
130 | base_name, extension = ospath.splitext(file_)
131 | split_size -= 5000000
132 | while i <= parts or start_time < duration - 4:
133 | parted_name = f"{base_name}.part{i:03}{extension}"
134 | out_path = ospath.join(dirpath, parted_name)
135 | cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time), "-i", path,
136 | "-fs", str(split_size), "-map", "0", "-map_chapters", "-1", "-async", "1", "-strict",
137 | "-2", "-c", "copy", out_path]
138 | if not multi_streams:
139 | del cmd[10]
140 | del cmd[10]
141 | if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9:
142 | return False
143 | listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE)
144 | code = await listener.suproc.wait()
145 | if code == -9:
146 | return False
147 | elif code != 0:
148 | err = (await listener.suproc.stderr.read()).decode().strip()
149 | try:
150 | await aioremove(out_path)
151 | except:
152 | pass
153 | if multi_streams:
154 | LOGGER.warning(
155 | f"{err}. Retrying without map, -map 0 not working in all situations. Path: {path}")
156 | return await split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, False)
157 | else:
158 | LOGGER.warning(
159 | f"{err}. Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}")
160 | return "errored"
161 | out_size = await aiopath.getsize(out_path)
162 | if out_size > MAX_SPLIT_SIZE:
163 | dif = out_size - MAX_SPLIT_SIZE
164 | split_size -= dif + 5000000
165 | await aioremove(out_path)
166 | return await split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, )
167 | lpd = (await get_media_info(out_path))[0]
168 | if lpd == 0:
169 | LOGGER.error(
170 | f'Something went wrong while splitting, mostly file is corrupted. Path: {path}')
171 | break
172 | elif duration == lpd:
173 | LOGGER.warning(
174 | f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {path}")
175 | break
176 | elif lpd <= 3:
177 | await aioremove(out_path)
178 | break
179 | start_time += lpd - 3
180 | i += 1
181 | else:
182 | out_path = ospath.join(dirpath, f"{file_}.")
183 | listener.suproc = await create_subprocess_exec("split", "--numeric-suffixes=1", "--suffix-length=3",
184 | f"--bytes={split_size}", path, out_path, stderr=PIPE)
185 | code = await listener.suproc.wait()
186 | if code == -9:
187 | return False
188 | elif code != 0:
189 | err = (await listener.suproc.stderr.read()).decode().strip()
190 | LOGGER.error(err)
191 | return True
192 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/shortener.py:
--------------------------------------------------------------------------------
1 | from base64 import b64encode
2 | from random import choice, random, randrange
3 | from time import sleep
4 | from urllib.parse import quote
5 |
6 | from cloudscraper import create_scraper
7 | from urllib3 import disable_warnings
8 |
9 | from bot import LOGGER, shorteneres_list
10 |
11 |
12 | def short_url(longurl, attempt=0):
13 | if not shorteneres_list:
14 | return longurl
15 | if attempt >= 4:
16 | return longurl
17 | i = 0 if len(shorteneres_list) == 1 else randrange(len(shorteneres_list))
18 | _shorten_dict = shorteneres_list[i]
19 | _shortener = _shorten_dict['domain']
20 | _shortener_api = _shorten_dict['api_key']
21 | cget = create_scraper().request
22 | disable_warnings()
23 | try:
24 | if "shorte.st" in _shortener:
25 | headers = {'public-api-token': _shortener_api}
26 | data = {'urlToShorten': quote(longurl)}
27 | return cget('PUT', 'https://api.shorte.st/v1/data/url', headers=headers, data=data).json()['shortenedUrl']
28 | elif "linkvertise" in _shortener:
29 | url = quote(b64encode(longurl.encode("utf-8")))
30 | linkvertise = [
31 | f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
32 | f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
33 | f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
34 | f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}"]
35 | return choice(linkvertise)
36 | elif "bitly.com" in _shortener:
37 | headers = {"Authorization": f"Bearer {_shortener_api}"}
38 | return cget('POST', "https://api-ssl.bit.ly/v4/shorten", json={"long_url": longurl}, headers=headers).json()["link"]
39 | elif "ouo.io" in _shortener:
40 | return cget('GET', f'http://ouo.io/api/{_shortener_api}?s={longurl}', verify=False).text
41 | elif "cutt.ly" in _shortener:
42 | return cget('GET', f'http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}', verify=False).json()['url']['shortLink']
43 | else:
44 | res = cget('GET', f'https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}').json()
45 | shorted = res['shortenedUrl']
46 | if not shorted:
47 | shrtco_res = cget('GET', f'https://api.shrtco.de/v2/shorten?url={quote(longurl)}').json()
48 | shrtco_link = shrtco_res['result']['full_short_link']
49 | res = cget('GET', f'https://{_shortener}/api?api={_shortener_api}&url={shrtco_link}').json()
50 | shorted = res['shortenedUrl']
51 | if not shorted:
52 | shorted = longurl
53 | return shorted
54 | except Exception as e:
55 | LOGGER.error(e)
56 | sleep(1)
57 | attempt +=1
58 | return short_url(longurl, attempt)
--------------------------------------------------------------------------------
/bot/helper/ext_utils/task_manager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import Event
3 |
4 | from bot import (LOGGER, config_dict, non_queued_dl, non_queued_up,
5 | queue_dict_lock, queued_dl, queued_up)
6 | from bot.helper.ext_utils.bot_utils import (get_readable_file_size, get_telegraph_list,
7 | sync_to_async)
8 | from bot.helper.ext_utils.fs_utils import (check_storage_threshold,
9 | get_base_name)
10 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
11 |
12 |
13 | async def stop_duplicate_check(name, listener):
14 | if (
15 | not config_dict['STOP_DUPLICATE']
16 | or listener.isLeech
17 | or listener.upPath != 'gd'
18 | or listener.select
19 | ):
20 | return False, None
21 | LOGGER.info(f'Checking File/Folder if already in Drive: {name}')
22 | if listener.compress:
23 | name = f"{name}.zip"
24 | elif listener.extract:
25 | try:
26 | name = get_base_name(name)
27 | except:
28 | name = None
29 | if name is not None:
30 | telegraph_content, contents_no = await sync_to_async(GoogleDriveHelper().drive_list, name, stopDup=True)
31 | if telegraph_content:
32 | msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:"
33 | button = await get_telegraph_list(telegraph_content)
34 | return msg, button
35 | return False, None
36 |
37 |
38 | async def is_queued(uid):
39 | all_limit = config_dict['QUEUE_ALL']
40 | dl_limit = config_dict['QUEUE_DOWNLOAD']
41 | event = None
42 | added_to_queue = False
43 | if all_limit or dl_limit:
44 | async with queue_dict_lock:
45 | dl = len(non_queued_dl)
46 | up = len(non_queued_up)
47 | if (all_limit and dl + up >= all_limit and (not dl_limit or dl >= dl_limit)) or (dl_limit and dl >= dl_limit):
48 | added_to_queue = True
49 | event = Event()
50 | queued_dl[uid] = event
51 | return added_to_queue, event
52 |
53 |
54 | def start_dl_from_queued(uid):
55 | queued_dl[uid].set()
56 | del queued_dl[uid]
57 |
58 |
59 | def start_up_from_queued(uid):
60 | queued_up[uid].set()
61 | del queued_up[uid]
62 |
63 |
64 | async def start_from_queued():
65 | if all_limit := config_dict['QUEUE_ALL']:
66 | dl_limit = config_dict['QUEUE_DOWNLOAD']
67 | up_limit = config_dict['QUEUE_UPLOAD']
68 | async with queue_dict_lock:
69 | dl = len(non_queued_dl)
70 | up = len(non_queued_up)
71 | all_ = dl + up
72 | if all_ < all_limit:
73 | f_tasks = all_limit - all_
74 | if queued_up and (not up_limit or up < up_limit):
75 | for index, uid in enumerate(list(queued_up.keys()), start=1):
76 | f_tasks = all_limit - all_
77 | start_up_from_queued(uid)
78 | f_tasks -= 1
79 | if f_tasks == 0 or (up_limit and index >= up_limit - up):
80 | break
81 | if queued_dl and (not dl_limit or dl < dl_limit) and f_tasks != 0:
82 | for index, uid in enumerate(list(queued_dl.keys()), start=1):
83 | start_dl_from_queued(uid)
84 | if (dl_limit and index >= dl_limit - dl) or index == f_tasks:
85 | break
86 | return
87 |
88 | if up_limit := config_dict['QUEUE_UPLOAD']:
89 | async with queue_dict_lock:
90 | up = len(non_queued_up)
91 | if queued_up and up < up_limit:
92 | f_tasks = up_limit - up
93 | for index, uid in enumerate(list(queued_up.keys()), start=1):
94 | start_up_from_queued(uid)
95 | if index == f_tasks:
96 | break
97 | else:
98 | async with queue_dict_lock:
99 | if queued_up:
100 | for uid in list(queued_up.keys()):
101 | start_up_from_queued(uid)
102 |
103 | if dl_limit := config_dict['QUEUE_DOWNLOAD']:
104 | async with queue_dict_lock:
105 | dl = len(non_queued_dl)
106 | if queued_dl and dl < dl_limit:
107 | f_tasks = dl_limit - dl
108 | for index, uid in enumerate(list(queued_dl.keys()), start=1):
109 | start_dl_from_queued(uid)
110 | if index == f_tasks:
111 | break
112 | else:
113 | async with queue_dict_lock:
114 | if queued_dl:
115 | for uid in list(queued_dl.keys()):
116 | start_dl_from_queued(uid)
117 |
118 |
119 | async def limit_checker(size, listener, isTorrent=False, isMega=False, isDriveLink=False, isYtdlp=False):
120 | limit_exceeded = ''
121 | if listener.isClone:
122 | if CLONE_LIMIT := config_dict['CLONE_LIMIT']:
123 | limit = CLONE_LIMIT * 1024**3
124 | if size > limit:
125 | limit_exceeded = f'Clone limit is {get_readable_file_size(limit)}.'
126 | elif isMega:
127 | if MEGA_LIMIT := config_dict['MEGA_LIMIT']:
128 | limit = MEGA_LIMIT * 1024**3
129 | if size > limit:
130 | limit_exceeded = f'Mega limit is {get_readable_file_size(limit)}'
131 | elif isDriveLink:
132 | if GDRIVE_LIMIT := config_dict['GDRIVE_LIMIT']:
133 | limit = GDRIVE_LIMIT * 1024**3
134 | if size > limit:
135 | limit_exceeded = f'Google drive limit is {get_readable_file_size(limit)}'
136 | elif isYtdlp:
137 | if YTDLP_LIMIT := config_dict['YTDLP_LIMIT']:
138 | limit = YTDLP_LIMIT * 1024**3
139 | if size > limit:
140 | limit_exceeded = f'Ytdlp limit is {get_readable_file_size(limit)}'
141 | elif isTorrent:
142 | if TORRENT_LIMIT := config_dict['TORRENT_LIMIT']:
143 | limit = TORRENT_LIMIT * 1024**3
144 | if size > limit:
145 | limit_exceeded = f'Torrent limit is {get_readable_file_size(limit)}'
146 | elif DIRECT_LIMIT := config_dict['DIRECT_LIMIT']:
147 | limit = DIRECT_LIMIT * 1024**3
148 | if size > limit:
149 | limit_exceeded = f'Direct limit is {get_readable_file_size(limit)}'
150 | if not limit_exceeded and (LEECH_LIMIT := config_dict['LEECH_LIMIT']) and listener.isLeech:
151 | limit = LEECH_LIMIT * 1024**3
152 | if size > limit:
153 | limit_exceeded = f'Leech limit is {get_readable_file_size(limit)}'
154 | if not limit_exceeded and (STORAGE_THRESHOLD := config_dict['STORAGE_THRESHOLD']) and not listener.isClone:
155 | arch = any([listener.compress, listener.extract])
156 | limit = STORAGE_THRESHOLD * 1024**3
157 | acpt = await sync_to_async(check_storage_threshold, size, limit, arch)
158 | if not acpt:
159 | limit_exceeded = f'You must leave {get_readable_file_size(limit)} free storage.'
160 | if limit_exceeded:
161 | return f"{limit_exceeded}.\nYour File/Folder size is {get_readable_file_size(size)}"
162 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/telegraph_helper.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import sleep
3 | from random import SystemRandom
4 | from string import ascii_letters
5 |
6 | from telegraph.aio import Telegraph
7 | from telegraph.exceptions import RetryAfterError
8 |
9 | from bot import LOGGER, bot_loop
10 |
11 |
12 | class TelegraphHelper:
13 | def __init__(self, author_name=None, author_url=None):
14 | self.__error = False
15 | self.telegraph = Telegraph(domain='graph.org')
16 | self.short_name = ''.join(SystemRandom().choices(ascii_letters, k=8))
17 | self.access_token = None
18 | self.author_name = author_name
19 | self.author_url = author_url
20 |
21 | async def create_account(self):
22 | LOGGER.info("Creating Telegraph Account")
23 | try:
24 | await self.telegraph.create_account(
25 | short_name=self.short_name,
26 | author_name=self.author_name,
27 | author_url=self.author_url
28 | )
29 | self.access_token = self.telegraph.get_access_token()
30 | self.__error = False
31 | except Exception as e:
32 | self.__error = True
33 | LOGGER.error(e)
34 |
35 | async def create_page(self, title, content):
36 | if self.__error:
37 | LOGGER.info('Telegraph is not working')
38 | return
39 | try:
40 | return await self.telegraph.create_page(
41 | title=title,
42 | author_name=self.author_name,
43 | author_url=self.author_url,
44 | html_content=content
45 | )
46 | except RetryAfterError as st:
47 | LOGGER.warning(
48 | f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.')
49 | await sleep(st.retry_after)
50 | return await self.create_page(title, content)
51 |
52 | async def edit_page(self, path, title, content):
53 | if self.__error:
54 | LOGGER.info('Telegraph is not working')
55 | return
56 | try:
57 | return await self.telegraph.edit_page(
58 | path=path,
59 | title=title,
60 | author_name=self.author_name,
61 | author_url=self.author_url,
62 | html_content=content
63 | )
64 | except RetryAfterError as st:
65 | LOGGER.warning(
66 | f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.')
67 | await sleep(st.retry_after)
68 | return await self.edit_page(path, title, content)
69 |
70 | async def edit_telegraph(self, path, telegraph_content):
71 | if self.__error:
72 | LOGGER.info('Telegraph is not working')
73 | return
74 | nxt_page = 1
75 | prev_page = 0
76 | num_of_path = len(path)
77 | for content in telegraph_content:
78 | if nxt_page == 1:
79 | content += f'Next'
80 | nxt_page += 1
81 | else:
82 | if prev_page <= num_of_path:
83 | content += f'Prev'
84 | prev_page += 1
85 | if nxt_page < num_of_path:
86 | content += f' | Next'
87 | nxt_page += 1
88 | await self.edit_page(
89 | path=path[prev_page],
90 | title='Jmdkh-mltb Torrent Search',
91 | content=content
92 | )
93 | return
94 |
95 | async def revoke_access_token(self):
96 | if self.__error:
97 | LOGGER.info('Telegraph is not working')
98 | return
99 | LOGGER.info('Revoking telegraph access token...')
100 | try:
101 | return await self.telegraph.revoke_access_token()
102 | except Exception as e:
103 | LOGGER.error(
104 | f'Failed Revoking telegraph access token due to : {e}')
105 |
106 |
107 | telegraph = TelegraphHelper(
108 | 'Jmdkh-mltb', 'https://github.com/junedkh/jmdkh-mltb')
109 | bot_loop.run_until_complete(telegraph.create_account())
110 |
--------------------------------------------------------------------------------
/bot/helper/jmdkh_utils.py:
--------------------------------------------------------------------------------
1 | from hashlib import sha1
2 | from os import path, remove
3 | from re import search
4 |
5 | from bencoding import bdecode, bencode
6 |
7 | from bot import DATABASE_URL, LOGGER, config_dict
8 | from bot.helper.ext_utils.bot_utils import (check_user_tasks, checking_access,
9 | is_gdrive_link, is_magnet)
10 | from bot.helper.ext_utils.db_handler import DbManger
11 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
12 | from bot.helper.telegram_helper.message_utils import (delete_links, forcesub,
13 | message_filter,
14 | sendMessage)
15 |
16 |
17 | async def extract_link(link, shouldDel=False):
18 | try:
19 | if link and is_magnet(link):
20 | raw_link = search(
21 | r'(?<=xt=urn:(btih|btmh):)[a-zA-Z0-9]+', link).group(0).lower()
22 | elif is_gdrive_link(link):
23 | raw_link = GoogleDriveHelper.getIdFromUrl(link)
24 | elif path.exists(link):
25 | with open(link, "rb") as f:
26 | decodedDict = bdecode(f.read())
27 | raw_link = str(sha1(bencode(decodedDict[b'info'])).hexdigest())
28 | if shouldDel:
29 | remove(link)
30 | else:
31 | raw_link = link
32 | except Exception as e:
33 | LOGGER.error(e)
34 | raw_link = link
35 | return raw_link
36 |
37 |
38 | async def stop_duplicate_tasks(message, link, file_=None):
39 | if DATABASE_URL and config_dict['STOP_DUPLICATE_TASKS']:
40 | raw_url = file_.file_unique_id if file_ else await extract_link(link)
41 | exist = await DbManger().check_download(raw_url)
42 | if exist:
43 | _msg = f'Download is already added by {exist["tag"]}\n\nCheck the download status in @{exist["botname"]}\n\nLink: {exist["_id"]}
'
44 | await delete_links(message)
45 | await sendMessage(message, _msg)
46 | return 'duplicate_tasks'
47 | return raw_url
48 |
49 |
50 | async def none_admin_utils(message, isLeech=False):
51 | msg = []
52 | if filtered := await message_filter(message):
53 | msg.append(filtered)
54 | button = None
55 | if message.chat.type != message.chat.type.PRIVATE:
56 | token_msg, button = checking_access(message.from_user.id, button)
57 | if token_msg is not None:
58 | msg.append(token_msg)
59 | if ids := config_dict['FSUB_IDS']:
60 | _msg, button = await forcesub(message, ids, button)
61 | if _msg:
62 | msg.append(_msg)
63 | if (maxtask := config_dict['USER_MAX_TASKS']) and await check_user_tasks(message.from_user.id, maxtask):
64 | msg.append(f"Your tasks limit exceeded for {maxtask} tasks")
65 | if isLeech and config_dict['DISABLE_LEECH']:
66 | msg.append('Leech is disabled for users')
67 | return msg, button
68 |
--------------------------------------------------------------------------------
/bot/helper/listeners/aria2_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import sleep
3 | from time import time
4 |
5 | from aiofiles.os import path as aiopath
6 | from aiofiles.os import remove as aioremove
7 |
8 | from bot import LOGGER, aria2, config_dict, download_dict, download_dict_lock
9 | from bot.helper.ext_utils.bot_utils import (bt_selection_buttons,
10 | get_telegraph_list,
11 | getDownloadByGid, new_thread,
12 | sync_to_async)
13 | from bot.helper.ext_utils.fs_utils import clean_unwanted, get_base_name
14 | from bot.helper.ext_utils.task_manager import limit_checker
15 | from bot.helper.mirror_utils.status_utils.aria2_status import Aria2Status
16 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
17 | from bot.helper.telegram_helper.message_utils import (deleteMessage,
18 | sendMessage,
19 | update_all_messages)
20 |
21 |
22 | @new_thread
23 | async def __onDownloadStarted(api, gid):
24 | download = await sync_to_async(api.get_download, gid)
25 | if download.is_metadata:
26 | LOGGER.info(f'onDownloadStarted: {gid} METADATA')
27 | await sleep(1)
28 | if dl := await getDownloadByGid(gid):
29 | listener = dl.listener()
30 | if listener.select:
31 | metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait."
32 | meta = await sendMessage(listener.message, metamsg)
33 | while True:
34 | await sleep(0.5)
35 | if download.is_removed or download.followed_by_ids:
36 | await deleteMessage(meta)
37 | break
38 | download = download.live
39 | return
40 | else:
41 | LOGGER.info(f'onDownloadStarted: {download.name} - Gid: {gid}')
42 | dl = None
43 | if config_dict['STOP_DUPLICATE']:
44 | await sleep(1)
45 | if dl is None:
46 | dl = await getDownloadByGid(gid)
47 | if dl:
48 | if not hasattr(dl, 'listener'):
49 | LOGGER.warning(
50 | f"onDownloadStart: {gid}. STOP_DUPLICATE didn't pass since download completed earlier!")
51 | return
52 | listener = dl.listener()
53 | if not listener.isLeech and not listener.select and listener.upPath == 'gd':
54 | download = await sync_to_async(api.get_download, gid)
55 | if not download.is_torrent:
56 | await sleep(3)
57 | download = download.live
58 | LOGGER.info('Checking File/Folder if already in Drive...')
59 | name = download.name
60 | if listener.compress:
61 | name = f"{name}.zip"
62 | elif listener.extract:
63 | try:
64 | name = get_base_name(name)
65 | except:
66 | name = None
67 | if name is not None:
68 | telegraph_content, contents_no = await sync_to_async(GoogleDriveHelper().drive_list, name, True)
69 | if telegraph_content:
70 | msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:"
71 | button = await get_telegraph_list(telegraph_content)
72 | await listener.onDownloadError(msg, button)
73 | await sync_to_async(api.remove, [download], force=True, files=True)
74 | return
75 | if any([config_dict['DIRECT_LIMIT'],
76 | config_dict['TORRENT_LIMIT'],
77 | config_dict['LEECH_LIMIT'],
78 | config_dict['STORAGE_THRESHOLD']]):
79 | await sleep(1)
80 | if dl is None:
81 | dl = await getDownloadByGid(gid)
82 | if dl is not None:
83 | if not hasattr(dl, 'listener'):
84 | LOGGER.warning(
85 | f"onDownloadStart: {gid}. at Download limit didn't pass since download completed earlier!")
86 | return
87 | listener = dl.listener()
88 | download = await sync_to_async(api.get_download, gid)
89 | download = download.live
90 | if download.total_length == 0:
91 | start_time = time()
92 | while time() - start_time <= 15:
93 | await sleep(0.5)
94 | download = await sync_to_async(api.get_download, gid)
95 | download = download.live
96 | if download.followed_by_ids:
97 | download = await sync_to_async(api.get_download, download.followed_by_ids[0])
98 | if download.total_length > 0:
99 | break
100 | size = download.total_length
101 | if limit_exceeded := await limit_checker(size, listener, download.is_torrent):
102 | await listener.onDownloadError(limit_exceeded)
103 | await sync_to_async(api.remove, [download], force=True, files=True)
104 |
105 |
106 | @new_thread
107 | async def __onDownloadComplete(api, gid):
108 | try:
109 | download = await sync_to_async(api.get_download, gid)
110 | except:
111 | return
112 | if download.followed_by_ids:
113 | new_gid = download.followed_by_ids[0]
114 | LOGGER.info(f'Gid changed from {gid} to {new_gid}')
115 | if dl := await getDownloadByGid(new_gid):
116 | listener = dl.listener()
117 | if config_dict['BASE_URL'] and listener.select:
118 | if not dl.queued:
119 | await sync_to_async(api.client.force_pause, new_gid)
120 | SBUTTONS = bt_selection_buttons(new_gid)
121 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
122 | await sendMessage(listener.message, msg, SBUTTONS)
123 | elif download.is_torrent:
124 | if dl := await getDownloadByGid(gid):
125 | if hasattr(dl, 'listener') and dl.seeding:
126 | LOGGER.info(
127 | f"Cancelling Seed: {download.name} onDownloadComplete")
128 | listener = dl.listener()
129 | await listener.onUploadError(f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}")
130 | await sync_to_async(api.remove, [download], force=True, files=True)
131 | else:
132 | LOGGER.info(f"onDownloadComplete: {download.name} - Gid: {gid}")
133 | if dl := await getDownloadByGid(gid):
134 | listener = dl.listener()
135 | await listener.onDownloadComplete()
136 | await sync_to_async(api.remove, [download], force=True, files=True)
137 |
138 |
139 | @new_thread
140 | async def __onBtDownloadComplete(api, gid):
141 | seed_start_time = time()
142 | await sleep(1)
143 | download = await sync_to_async(api.get_download, gid)
144 | LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}")
145 | if dl := await getDownloadByGid(gid):
146 | listener = dl.listener()
147 | if listener.select:
148 | res = download.files
149 | for file_o in res:
150 | f_path = file_o.path
151 | if not file_o.selected and await aiopath.exists(f_path):
152 | try:
153 | await aioremove(f_path)
154 | except:
155 | pass
156 | await clean_unwanted(download.dir)
157 | if listener.seed:
158 | try:
159 | await sync_to_async(api.set_options, {'max-upload-limit': '0'}, [download])
160 | except Exception as e:
161 | LOGGER.error(
162 | f'{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent GID: {gid}')
163 | else:
164 | try:
165 | await sync_to_async(api.client.force_pause, gid)
166 | except Exception as e:
167 | LOGGER.error(f"{e} GID: {gid}")
168 | await listener.onDownloadComplete()
169 | download = download.live
170 | if listener.seed:
171 | if download.is_complete:
172 | if dl := await getDownloadByGid(gid):
173 | LOGGER.info(f"Cancelling Seed: {download.name}")
174 | await listener.onUploadError(f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}")
175 | await sync_to_async(api.remove, [download], force=True, files=True)
176 | else:
177 | async with download_dict_lock:
178 | if listener.uid not in download_dict:
179 | await sync_to_async(api.remove, [download], force=True, files=True)
180 | return
181 | download_dict[listener.uid] = Aria2Status(
182 | gid, listener, True)
183 | download_dict[listener.uid].start_time = seed_start_time
184 | LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}")
185 | await update_all_messages()
186 | else:
187 | await sync_to_async(api.remove, [download], force=True, files=True)
188 |
189 |
190 | @new_thread
191 | async def __onDownloadStopped(api, gid):
192 | await sleep(6)
193 | if dl := await getDownloadByGid(gid):
194 | listener = dl.listener()
195 | await listener.onDownloadError('Dead torrent!')
196 |
197 |
198 | @new_thread
199 | async def __onDownloadError(api, gid):
200 | LOGGER.info(f"onDownloadError: {gid}")
201 | error = "None"
202 | try:
203 | download = await sync_to_async(api.get_download, gid)
204 | error = download.error_message
205 | LOGGER.info(f"Download Error: {error}")
206 | except:
207 | pass
208 | if dl := await getDownloadByGid(gid):
209 | listener = dl.listener()
210 | await listener.onDownloadError(error)
211 |
212 |
213 | def start_aria2_listener():
214 | aria2.listen_to_notifications(threaded=False,
215 | on_download_start=__onDownloadStarted,
216 | on_download_error=__onDownloadError,
217 | on_download_stop=__onDownloadStopped,
218 | on_download_complete=__onDownloadComplete,
219 | on_bt_download_complete=__onBtDownloadComplete,
220 | timeout=60)
221 |
--------------------------------------------------------------------------------
/bot/helper/listeners/qbit_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import sleep
3 | from time import time
4 |
5 | from bot import (LOGGER, QbInterval, QbTorrents, bot_loop, config_dict,
6 | download_dict, download_dict_lock, get_client,
7 | qb_listener_lock)
8 | from bot.helper.ext_utils.bot_utils import (get_readable_time,
9 | getDownloadByGid, new_task,
10 | sync_to_async)
11 | from bot.helper.ext_utils.fs_utils import clean_unwanted
12 | from bot.helper.ext_utils.task_manager import (limit_checker,
13 | stop_duplicate_check)
14 | from bot.helper.mirror_utils.status_utils.qbit_status import QbittorrentStatus
15 | from bot.helper.telegram_helper.message_utils import update_all_messages
16 |
17 |
18 | async def __remove_torrent(client, hash_, tag):
19 | await sync_to_async(client.torrents_delete, torrent_hashes=hash_, delete_files=True)
20 | async with qb_listener_lock:
21 | if tag in QbTorrents:
22 | del QbTorrents[tag]
23 | await sync_to_async(client.torrents_delete_tags, tags=tag)
24 |
25 |
26 | @new_task
27 | async def __onDownloadError(err, tor, button=None):
28 | LOGGER.info(f"Cancelling Download: {tor.name}")
29 | ext_hash = tor.hash
30 | download = await getDownloadByGid(ext_hash[:12])
31 | if not hasattr(download, 'client'):
32 | return
33 | listener = download.listener()
34 | client = download.client()
35 | await listener.onDownloadError(err, button)
36 | await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash)
37 | await sleep(0.3)
38 | await __remove_torrent(client, ext_hash, tor.tags)
39 |
40 |
41 | @new_task
42 | async def __onSeedFinish(tor):
43 | ext_hash = tor.hash
44 | LOGGER.info(f"Cancelling Seed: {tor.name}")
45 | download = await getDownloadByGid(ext_hash[:12])
46 | if not hasattr(download, 'client'):
47 | return
48 | listener = download.listener()
49 | client = download.client()
50 | msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time)}"
51 | await listener.onUploadError(msg)
52 | await __remove_torrent(client, ext_hash, tor.tags)
53 |
54 |
55 | @new_task
56 | async def __stop_duplicate(tor):
57 | download = await getDownloadByGid(tor.hash[:12])
58 | if not hasattr(download, 'listener'):
59 | return
60 | listener = download.listener()
61 | name = tor.content_path.rsplit('/', 1)[-1].rsplit('.!qB', 1)[0]
62 | msg, button = await stop_duplicate_check(name, listener)
63 | if msg:
64 | __onDownloadError(msg, tor, button)
65 |
66 |
67 | @new_task
68 | async def __size_checked(tor):
69 | download = await getDownloadByGid(tor.hash[:12])
70 | if hasattr(download, 'listener'):
71 | listener = download.listener()
72 | size = tor.size
73 | if limit_exceeded := await limit_checker(size, listener, True):
74 | await __onDownloadError(limit_exceeded, tor)
75 |
76 |
77 | @new_task
78 | async def __onDownloadComplete(tor):
79 | ext_hash = tor.hash
80 | tag = tor.tags
81 | await sleep(2)
82 | download = await getDownloadByGid(ext_hash[:12])
83 | if not hasattr(download, 'client'):
84 | return
85 | listener = download.listener()
86 | client = download.client()
87 | if not listener.seed:
88 | await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash)
89 | if listener.select:
90 | await clean_unwanted(listener.dir)
91 | await listener.onDownloadComplete()
92 | client = await sync_to_async(get_client)
93 | if listener.seed:
94 | async with download_dict_lock:
95 | if listener.uid in download_dict:
96 | removed = False
97 | download_dict[listener.uid] = QbittorrentStatus(listener, True)
98 | else:
99 | removed = True
100 | if removed:
101 | await __remove_torrent(client, ext_hash, tag)
102 | return
103 | async with qb_listener_lock:
104 | if tag in QbTorrents:
105 | QbTorrents[tag]['seeding'] = True
106 | else:
107 | return
108 | await update_all_messages()
109 | LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}")
110 | await sync_to_async(client.auth_log_out)
111 | else:
112 | await __remove_torrent(client, ext_hash, tag)
113 |
114 |
115 | async def __qb_listener():
116 | client = await sync_to_async(get_client)
117 | while True:
118 | async with qb_listener_lock:
119 | try:
120 | if len(await sync_to_async(client.torrents_info)) == 0:
121 | QbInterval.clear()
122 | await sync_to_async(client.auth_log_out)
123 | break
124 | for tor_info in await sync_to_async(client.torrents_info):
125 | tag = tor_info.tags
126 | if tag not in QbTorrents:
127 | continue
128 | state = tor_info.state
129 | if state == "metaDL":
130 | TORRENT_TIMEOUT = config_dict['TORRENT_TIMEOUT']
131 | QbTorrents[tag]['stalled_time'] = time()
132 | if TORRENT_TIMEOUT and time() - tor_info.added_on >= TORRENT_TIMEOUT:
133 | __onDownloadError("Dead Torrent!", tor_info)
134 | else:
135 | await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash)
136 | elif state == "downloading":
137 | QbTorrents[tag]['stalled_time'] = time()
138 | if config_dict['STOP_DUPLICATE'] and not QbTorrents[tag]['stop_dup_check']:
139 | QbTorrents[tag]['stop_dup_check'] = True
140 | __stop_duplicate(tor_info)
141 | if any([config_dict['STORAGE_THRESHOLD'], config_dict['TORRENT_LIMIT'], config_dict['LEECH_LIMIT']]) and not QbTorrents[tag]['size_checked']:
142 | QbTorrents[tag]['size_checked'] = True
143 | __size_checked(tor_info)
144 | elif state == "stalledDL":
145 | TORRENT_TIMEOUT = config_dict['TORRENT_TIMEOUT']
146 | if not QbTorrents[tag]['rechecked'] and 0.99989999999999999 < tor_info.progress < 1:
147 | msg = f"Force recheck - Name: {tor_info.name} Hash: "
148 | msg += f"{tor_info.hash} Downloaded Bytes: {tor_info.downloaded} "
149 | msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
150 | LOGGER.warning(msg)
151 | await sync_to_async(client.torrents_recheck, torrent_hashes=tor_info.hash)
152 | QbTorrents[tag]['rechecked'] = True
153 | elif TORRENT_TIMEOUT and time() - QbTorrents[tag]['stalled_time'] >= TORRENT_TIMEOUT:
154 | __onDownloadError("Dead Torrent!", tor_info)
155 | else:
156 | await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash)
157 | elif state == "missingFiles":
158 | await sync_to_async(client.torrents_recheck, torrent_hashes=tor_info.hash)
159 | elif state == "error":
160 | __onDownloadError(
161 | "No enough space for this torrent on device", tor_info)
162 | elif tor_info.completion_on != 0 and not QbTorrents[tag]['uploaded'] and \
163 | state not in ['checkingUP', 'checkingDL', 'checkingResumeData']:
164 | QbTorrents[tag]['uploaded'] = True
165 | __onDownloadComplete(tor_info)
166 | elif state in ['pausedUP', 'pausedDL'] and QbTorrents[tag]['seeding']:
167 | QbTorrents[tag]['seeding'] = False
168 | __onSeedFinish(tor_info)
169 | except Exception as e:
170 | LOGGER.error(str(e))
171 | client = await sync_to_async(get_client)
172 | await sleep(3)
173 |
174 |
175 | async def onDownloadStart(tag):
176 | async with qb_listener_lock:
177 | QbTorrents[tag] = {'stalled_time': time(
178 | ), 'stop_dup_check': False, 'rechecked': False, 'uploaded': False, 'seeding': False, 'size_checked': False}
179 | if not QbInterval:
180 | periodic = bot_loop.create_task(__qb_listener())
181 | QbInterval.append(periodic)
182 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/aria2_download.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from aiofiles.os import path as aiopath
3 | from aiofiles.os import remove as aioremove
4 |
5 | from bot import (LOGGER, aria2, aria2_options, aria2c_global, config_dict,
6 | download_dict, download_dict_lock, non_queued_dl,
7 | queue_dict_lock)
8 | from bot.helper.ext_utils.bot_utils import bt_selection_buttons, sync_to_async
9 | from bot.helper.ext_utils.task_manager import is_queued
10 | from bot.helper.mirror_utils.status_utils.aria2_status import Aria2Status
11 | from bot.helper.telegram_helper.message_utils import (delete_links,
12 | sendMessage,
13 | sendStatusMessage)
14 |
15 |
16 | async def add_aria2c_download(link, path, listener, filename, auth, ratio, seed_time):
17 | a2c_opt = {**aria2_options}
18 | [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options]
19 | a2c_opt['dir'] = path
20 | if filename:
21 | a2c_opt['out'] = filename
22 | if auth:
23 | a2c_opt['header'] = auth
24 | if ratio:
25 | a2c_opt['seed-ratio'] = ratio
26 | if seed_time:
27 | a2c_opt['seed-time'] = seed_time
28 | if TORRENT_TIMEOUT := config_dict['TORRENT_TIMEOUT']:
29 | a2c_opt['bt-stop-timeout'] = f'{TORRENT_TIMEOUT}'
30 | added_to_queue, event = await is_queued(listener.uid)
31 | if added_to_queue:
32 | if link.startswith('magnet:'):
33 | a2c_opt['pause-metadata'] = 'true'
34 | else:
35 | a2c_opt['pause'] = 'true'
36 | try:
37 | download = (await sync_to_async(aria2.add, link, a2c_opt))[0]
38 | except Exception as e:
39 | LOGGER.info(f"Aria2c Download Error: {e}")
40 | await sendMessage(listener.message, f'{e}')
41 | await delete_links(listener.message)
42 | return
43 | if await aiopath.exists(link):
44 | await aioremove(link)
45 | if download.error_message:
46 | error = str(download.error_message).replace('<', ' ').replace('>', ' ')
47 | LOGGER.info(f"Aria2c Download Error: {error}")
48 | await sendMessage(listener.message, error)
49 | await delete_links(listener.message)
50 | return
51 |
52 | gid = download.gid
53 | name = download.name
54 | async with download_dict_lock:
55 | download_dict[listener.uid] = Aria2Status(
56 | gid, listener, queued=added_to_queue)
57 | if added_to_queue:
58 | LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}")
59 | if not listener.select or not download.is_torrent:
60 | await sendStatusMessage(listener.message)
61 | else:
62 | async with queue_dict_lock:
63 | non_queued_dl.add(listener.uid)
64 | LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}")
65 |
66 | await listener.onDownloadStart()
67 |
68 | if not added_to_queue and (not listener.select or not config_dict['BASE_URL']):
69 | await sendStatusMessage(listener.message)
70 | elif listener.select and download.is_torrent and not download.is_metadata:
71 | if not added_to_queue:
72 | await sync_to_async(aria2.client.force_pause, gid)
73 | SBUTTONS = bt_selection_buttons(gid)
74 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
75 | await sendMessage(listener.message, msg, SBUTTONS)
76 |
77 | if added_to_queue:
78 | await event.wait()
79 |
80 | async with download_dict_lock:
81 | if listener.uid not in download_dict:
82 | return
83 | download = download_dict[listener.uid]
84 | download.queued = False
85 | new_gid = download.gid()
86 |
87 | await sync_to_async(aria2.client.unpause, new_gid)
88 | LOGGER.info(f'Start Queued Download from Aria2c: {name}. Gid: {gid}')
89 |
90 | async with queue_dict_lock:
91 | non_queued_dl.add(listener.uid)
92 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/direct_link_generator_license.md:
--------------------------------------------------------------------------------
1 | RAPHIELSCAPE PUBLIC LICENSE
2 | Version 1.c, June 2019
3 |
4 | Copyright (C) 2019 Raphielscape LLC.
5 | Copyright (C) 2019 Devscapes Open Source Holding GmbH.
6 |
7 | Everyone is permitted to copy and distribute verbatim or modified
8 | copies of this license document, and changing it is allowed as long
9 | as the name is changed.
10 |
11 | RAPHIELSCAPE PUBLIC LICENSE
12 | A-1. DEFINITIONS
13 |
14 | 0. “This License” refers to version 1.c of the Raphielscape Public License.
15 |
16 | 1. “Copyright” also means copyright-like laws that apply to other kinds of works.
17 |
18 | 2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”.
19 | “Licensees” and “recipients” may be individuals or organizations.
20 |
21 | 3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
22 | other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work
23 | or a work “based on” the earlier work.
24 |
25 | 4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it.
26 | “Object code” means any non-source form of a work.
27 |
28 | The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and
29 | (for an executable work) run the object code and to modify the work, including scripts to control those activities.
30 |
31 | The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
32 | The Corresponding Source for a work in source code form is that same work.
33 |
34 | 5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of
35 | the Corresponding Source.
36 |
37 | 6. "Owner" refers to any parties which is made the early form of the Corresponding Source.
38 |
39 | A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
40 |
41 | 0. You must give any other recipients of the Work or Derivative Works a copy of this License; and
42 |
43 | 1. You must cause any modified files to carry prominent notices stating that You changed the files; and
44 |
45 | 2. You must retain, in the Source form of any Derivative Works that You distribute,
46 | this license, all copyright, patent, trademark, authorships and attribution notices
47 | from the Source form of the Work; and
48 |
49 | 3. Respecting the author and owner of works that are distributed in any way.
50 |
51 | You may add Your own copyright statement to Your modifications and may provide
52 | additional or different license terms and conditions for use, reproduction,
53 | or distribution of Your modifications, or for any such Derivative Works as a whole,
54 | provided Your use, reproduction, and distribution of the Work otherwise complies
55 | with the conditions stated in this License.
56 |
57 | B. DISCLAIMER OF WARRANTY
58 |
59 | THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR
60 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
61 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS
62 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
63 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
64 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
65 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
66 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
67 |
68 |
69 | C. REVISED VERSION OF THIS LICENSE
70 |
71 | The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the
72 | Raphielscape Public License from time to time. Such new versions will be similar in spirit
73 | to the present version, but may differ in detail to address new problems or concerns.
74 |
75 | Each version is given a distinguishing version number. If the Program specifies that a
76 | certain numbered version of the Raphielscape Public License "or any later version" applies to it,
77 | you have the option of following the terms and conditions either of that numbered version or of
78 | any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a
79 | version number of the Raphielscape Public License, you may choose any version ever published
80 | by the Devscapes Open Source Holding GmbH.
81 |
82 | END OF LICENSE
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/gd_download.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from random import SystemRandom
3 | from string import ascii_letters, digits
4 |
5 | from bot import (LOGGER, download_dict, download_dict_lock, non_queued_dl,
6 | queue_dict_lock)
7 | from bot.helper.ext_utils.bot_utils import sync_to_async
8 | from bot.helper.ext_utils.task_manager import (is_queued, limit_checker,
9 | stop_duplicate_check)
10 | from bot.helper.mirror_utils.status_utils.gdrive_status import GdriveStatus
11 | from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
12 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
13 | from bot.helper.telegram_helper.message_utils import (delete_links,
14 | sendMessage,
15 | sendStatusMessage)
16 |
17 |
18 | async def add_gd_download(link, path, listener, newname):
19 | drive = GoogleDriveHelper()
20 | name, mime_type, size, _, _ = await sync_to_async(drive.count, link)
21 | if mime_type is None:
22 | await sendMessage(listener.message, name)
23 | return
24 | name = newname or name
25 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
26 |
27 | msg, button = await stop_duplicate_check(name, listener)
28 | if msg:
29 | await sendMessage(listener.message, msg, button)
30 | return
31 | if limit_exceeded := await limit_checker(size, listener, isDriveLink=True):
32 | await sendMessage(listener.message, limit_exceeded)
33 | await delete_links(listener.message)
34 | return
35 | added_to_queue, event = await is_queued(listener.uid)
36 | if added_to_queue:
37 | LOGGER.info(f"Added to Queue/Download: {name}")
38 | async with download_dict_lock:
39 | download_dict[listener.uid] = QueueStatus(
40 | name, size, gid, listener, 'dl')
41 | await listener.onDownloadStart()
42 | await sendStatusMessage(listener.message)
43 | await event.wait()
44 | async with download_dict_lock:
45 | if listener.uid not in download_dict:
46 | return
47 | from_queue = True
48 | else:
49 | from_queue = False
50 |
51 | drive = GoogleDriveHelper(name, path, listener)
52 | async with download_dict_lock:
53 | download_dict[listener.uid] = GdriveStatus(
54 | drive, size, listener.message, gid, 'dl', listener.extra_details)
55 |
56 | async with queue_dict_lock:
57 | non_queued_dl.add(listener.uid)
58 |
59 | if from_queue:
60 | LOGGER.info(f'Start Queued Download from GDrive: {name}')
61 | else:
62 | LOGGER.info(f"Download from GDrive: {name}")
63 | await listener.onDownloadStart()
64 | await sendStatusMessage(listener.message)
65 |
66 | await sync_to_async(drive.download, link)
67 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/mega_download.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import Event
3 | from random import SystemRandom
4 | from string import ascii_letters, digits
5 |
6 | from aiofiles.os import makedirs
7 | from mega import MegaApi, MegaError, MegaListener, MegaRequest, MegaTransfer
8 |
9 | from bot import (LOGGER, config_dict, download_dict, download_dict_lock,
10 | non_queued_dl, queue_dict_lock)
11 | from bot.helper.ext_utils.bot_utils import (async_to_sync, get_mega_link_type,
12 | sync_to_async)
13 | from bot.helper.ext_utils.task_manager import (is_queued, limit_checker,
14 | stop_duplicate_check)
15 | from bot.helper.mirror_utils.status_utils.mega_download_status import MegaDownloadStatus
16 | from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
17 | from bot.helper.telegram_helper.message_utils import (delete_links,
18 | sendMessage,
19 | sendStatusMessage)
20 |
21 | DumpSession = None
22 |
23 |
24 | class MegaAppListener(MegaListener):
25 | _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN, MegaRequest.TYPE_FETCH_NODES)
26 | NO_ERROR = "no error"
27 |
28 | def __init__(self, continue_event: Event, listener):
29 | self.continue_event = continue_event
30 | self.node = None
31 | self.public_node = None
32 | self.listener = listener
33 | self.is_cancelled = False
34 | self.error = None
35 | self.__bytes_transferred = 0
36 | self.__speed = 0
37 | self.__name = ''
38 | super().__init__()
39 |
40 | @property
41 | def speed(self):
42 | return self.__speed
43 | @property
44 | def downloaded_bytes(self):
45 | return self.__bytes_transferred
46 |
47 | def onRequestFinish(self, api, request, error):
48 | if str(error).lower() != "no error":
49 | self.error = error.copy()
50 | if str(self.error).casefold() != "not found":
51 | LOGGER.error(f'Mega onRequestFinishError: {self.error}')
52 | self.continue_event.set()
53 | return
54 | request_type = request.getType()
55 | if request_type == MegaRequest.TYPE_LOGIN:
56 | api.fetchNodes()
57 | elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE:
58 | self.public_node = request.getPublicMegaNode()
59 | self.__name = self.public_node.getName()
60 | elif request_type == MegaRequest.TYPE_FETCH_NODES:
61 | LOGGER.info("Fetching Root Node.")
62 | self.node = api.getRootNode()
63 | self.__name = self.node.getName()
64 | LOGGER.info(f"Node Name: {self.node.getName()}")
65 | if request_type not in self._NO_EVENT_ON or self.node and "cloud drive" not in self.__name.lower():
66 | self.continue_event.set()
67 |
68 | def onRequestTemporaryError(self, api, request, error: MegaError):
69 | LOGGER.error(f'Mega Request error in {error}')
70 | if not self.is_cancelled:
71 | self.is_cancelled = True
72 | async_to_sync(self.listener.onDownloadError,
73 | f"RequestTempError: {error.toString()}")
74 | self.error = error.toString()
75 | self.continue_event.set()
76 |
77 | def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer):
78 | if self.is_cancelled:
79 | api.cancelTransfer(transfer, None)
80 | self.continue_event.set()
81 | return
82 | self.__speed = transfer.getSpeed()
83 | self.__bytes_transferred = transfer.getTransferredBytes()
84 |
85 | def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error):
86 | try:
87 | if self.is_cancelled:
88 | self.continue_event.set()
89 | elif transfer.isFinished() and (transfer.isFolderTransfer() or transfer.getFileName() == self.__name):
90 | async_to_sync(self.listener.onDownloadComplete)
91 | self.continue_event.set()
92 | except Exception as e:
93 | LOGGER.error(e)
94 |
95 | def onTransferTemporaryError(self, api, transfer, error):
96 | filen = transfer.getFileName()
97 | state = transfer.getState()
98 | errStr = error.toString()
99 | LOGGER.error(
100 | f'Mega download error in file {transfer} {filen}: {error}')
101 | if state in [1, 4]:
102 | # Sometimes MEGA (official client) can't stream a node either and raises a temp failed error.
103 | # Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault]
104 | return
105 |
106 | self.error = errStr
107 | if not self.is_cancelled:
108 | self.is_cancelled = True
109 | async_to_sync(self.listener.onDownloadError,
110 | f"TransferTempError: {errStr} ({filen})")
111 | self.continue_event.set()
112 |
113 | async def cancel_download(self):
114 | self.is_cancelled = True
115 | await self.listener.onDownloadError("Download Canceled by user")
116 |
117 |
118 | class AsyncExecutor:
119 |
120 | def __init__(self):
121 | self.continue_event = Event()
122 |
123 | async def do(self, function, args):
124 | self.continue_event.clear()
125 | await sync_to_async(function, *args)
126 | await self.continue_event.wait()
127 |
128 |
129 | async def add_mega_download(mega_link, path, listener, name):
130 | MEGA_EMAIL = config_dict['MEGA_EMAIL']
131 | MEGA_PASSWORD = config_dict['MEGA_PASSWORD']
132 |
133 | executor = AsyncExecutor()
134 | api = MegaApi(None, None, None, 'mirror-leech-telegram-bot')
135 | folder_api = None
136 |
137 | mega_listener = MegaAppListener(executor.continue_event, listener)
138 | api.addListener(mega_listener)
139 |
140 | if MEGA_EMAIL and MEGA_PASSWORD:
141 | await executor.do(api.login, (MEGA_EMAIL, MEGA_PASSWORD))
142 |
143 | if get_mega_link_type(mega_link) == "file":
144 | await executor.do(api.getPublicNode, (mega_link,))
145 | node = mega_listener.public_node
146 | else:
147 | folder_api = MegaApi(None, None, None, 'mirror-leech-telegram-bot')
148 | folder_api.addListener(mega_listener)
149 | await executor.do(folder_api.loginToFolder, (mega_link,))
150 | node = await sync_to_async(folder_api.authorizeNode, mega_listener.node)
151 | if mega_listener.error is not None:
152 | await sendMessage(listener.message, str(mega_listener.error))
153 | await executor.do(api.logout, ())
154 | if folder_api is not None:
155 | await executor.do(folder_api.logout, ())
156 | await delete_links(listener.message)
157 | return
158 |
159 | name = name or node.getName()
160 | msg, button = await stop_duplicate_check(name, listener)
161 | if msg:
162 | await sendMessage(listener.message, msg, button)
163 | await executor.do(api.logout, ())
164 | if folder_api is not None:
165 | await executor.do(folder_api.logout, ())
166 | await delete_links(listener.message)
167 | return
168 |
169 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=8))
170 | size = api.getSize(node)
171 | if limit_exceeded := await limit_checker(size, listener, isMega=True):
172 | await sendMessage(listener.message, limit_exceeded)
173 | await delete_links(listener.message)
174 | return
175 | added_to_queue, event = await is_queued(listener.uid)
176 | if added_to_queue:
177 | LOGGER.info(f"Added to Queue/Download: {name}")
178 | async with download_dict_lock:
179 | download_dict[listener.uid] = QueueStatus(
180 | name, size, gid, listener, 'Dl')
181 | await listener.onDownloadStart()
182 | await sendStatusMessage(listener.message)
183 | await event.wait()
184 | async with download_dict_lock:
185 | if listener.uid not in download_dict:
186 | await executor.do(api.logout, ())
187 | if folder_api is not None:
188 | await executor.do(folder_api.logout, ())
189 | await delete_links(listener.message)
190 | return
191 | from_queue = True
192 | LOGGER.info(f'Start Queued Download from Mega: {name}')
193 | else:
194 | from_queue = False
195 |
196 | async with download_dict_lock:
197 | download_dict[listener.uid] = MegaDownloadStatus(
198 | name, size, gid, mega_listener, listener.message, listener.extra_details)
199 | async with queue_dict_lock:
200 | non_queued_dl.add(listener.uid)
201 |
202 | if from_queue:
203 | LOGGER.info(f'Start Queued Download from Mega: {name}')
204 | else:
205 | await listener.onDownloadStart()
206 | await sendStatusMessage(listener.message)
207 | LOGGER.info(f"Download from Mega: {name}")
208 |
209 | await makedirs(path, exist_ok=True)
210 | await executor.do(api.startDownload, (node, path, name, None, False, None))
211 | await executor.do(api.logout, ())
212 | if folder_api is not None:
213 | await executor.do(folder_api.logout, ())
214 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/qbit_download.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from aiofiles.os import path as aiopath
5 | from aiofiles.os import remove as aioremove
6 |
7 | from bot import (LOGGER, config_dict, download_dict, download_dict_lock,
8 | get_client, non_queued_dl, queue_dict_lock)
9 | from bot.helper.ext_utils.bot_utils import bt_selection_buttons, sync_to_async
10 | from bot.helper.ext_utils.task_manager import is_queued
11 | from bot.helper.listeners.qbit_listener import onDownloadStart
12 | from bot.helper.mirror_utils.status_utils.qbit_status import QbittorrentStatus
13 | from bot.helper.telegram_helper.message_utils import (delete_links,
14 | deleteMessage,
15 | sendMessage,
16 | sendStatusMessage)
17 |
18 | """
19 | Only v1 torrents
20 | #from hashlib import sha1
21 | #from base64 import b16encode, b32decode
22 | #from bencoding import bencode, bdecode
23 | #from re import search as re_search
24 | def __get_hash_magnet(mgt: str):
25 | hash_ = re_search(r'(?<=xt=urn:btih:)[a-zA-Z0-9]+', mgt).group(0)
26 | if len(hash_) == 32:
27 | hash_ = b16encode(b32decode(hash_.upper())).decode()
28 | return str(hash_)
29 |
30 | def __get_hash_file(path):
31 | with open(path, "rb") as f:
32 | decodedDict = bdecode(f.read())
33 | hash_ = sha1(bencode(decodedDict[b'info'])).hexdigest()
34 | return str(hash_)
35 | """
36 |
37 |
38 | async def add_qb_torrent(link, path, listener, ratio, seed_time):
39 | client = await sync_to_async(get_client)
40 | ADD_TIME = time()
41 | try:
42 | url = link
43 | tpath = None
44 | if await aiopath.exists(link):
45 | url = None
46 | tpath = link
47 | added_to_queue, event = await is_queued(listener.uid)
48 | op = await sync_to_async(client.torrents_add, url, tpath, path, is_paused=added_to_queue, tags=f'{listener.uid}',
49 | ratio_limit=ratio, seeding_time_limit=seed_time, headers={'user-agent': 'Wget/1.12'})
50 | if op.lower() == "ok.":
51 | tor_info = await sync_to_async(client.torrents_info, tag=f'{listener.uid}')
52 | if len(tor_info) == 0:
53 | while True:
54 | tor_info = await sync_to_async(client.torrents_info, tag=f'{listener.uid}')
55 | if len(tor_info) > 0:
56 | break
57 | elif time() - ADD_TIME >= 120:
58 | msg = "Not added! Check if the link is valid or not. If it's torrent file then report, this happens if torrent file size above 10mb."
59 | await sendMessage(listener.message, msg)
60 | await delete_links(listener.message)
61 | return
62 | tor_info = tor_info[0]
63 | ext_hash = tor_info.hash
64 | else:
65 | await sendMessage(listener.message, "This Torrent already added or unsupported/invalid link/file.")
66 | await delete_links(listener.message)
67 | return
68 |
69 | async with download_dict_lock:
70 | download_dict[listener.uid] = QbittorrentStatus(
71 | listener, queued=added_to_queue)
72 | await onDownloadStart(f'{listener.uid}')
73 |
74 | if added_to_queue:
75 | LOGGER.info(
76 | f"Added to Queue/Download: {tor_info.name} - Hash: {ext_hash}")
77 | else:
78 | async with queue_dict_lock:
79 | non_queued_dl.add(listener.uid)
80 | LOGGER.info(
81 | f"QbitDownload started: {tor_info.name} - Hash: {ext_hash}")
82 |
83 | await listener.onDownloadStart()
84 |
85 | if config_dict['BASE_URL'] and listener.select:
86 | if link.startswith('magnet:'):
87 | metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait."
88 | meta = await sendMessage(listener.message, metamsg)
89 | while True:
90 | tor_info = await sync_to_async(client.torrents_info, tag=f'{listener.uid}')
91 | if len(tor_info) == 0:
92 | await deleteMessage(meta)
93 | return
94 | try:
95 | tor_info = tor_info[0]
96 | if tor_info.state not in ["metaDL", "checkingResumeData", "pausedDL"]:
97 | await deleteMessage(meta)
98 | break
99 | except:
100 | await deleteMessage(meta)
101 | return
102 |
103 | ext_hash = tor_info.hash
104 | if not added_to_queue:
105 | await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash)
106 | SBUTTONS = bt_selection_buttons(ext_hash)
107 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
108 | await sendMessage(listener.message, msg, SBUTTONS)
109 | else:
110 | await sendStatusMessage(listener.message)
111 |
112 | if added_to_queue:
113 | await event.wait()
114 |
115 | async with download_dict_lock:
116 | if listener.uid not in download_dict:
117 | return
118 | download_dict[listener.uid].queued = False
119 |
120 | await sync_to_async(client.torrents_resume, torrent_hashes=ext_hash)
121 | LOGGER.info(
122 | f'Start Queued Download from Qbittorrent: {tor_info.name} - Hash: {ext_hash}')
123 |
124 | async with queue_dict_lock:
125 | non_queued_dl.add(listener.uid)
126 | except Exception as e:
127 | await sendMessage(listener.message, str(e))
128 | await delete_links(listener.message)
129 | finally:
130 | if await aiopath.exists(link):
131 | await aioremove(link)
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/rclone_download.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import gather
3 | from json import loads
4 | from random import SystemRandom
5 | from string import ascii_letters, digits
6 |
7 | from bot import (LOGGER, download_dict, download_dict_lock, non_queued_dl,
8 | queue_dict_lock)
9 | from bot.helper.ext_utils.bot_utils import cmd_exec
10 | from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check
11 | from bot.helper.mirror_utils.rclone_utils.transfer import RcloneTransferHelper
12 | from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
13 | from bot.helper.mirror_utils.status_utils.rclone_status import RcloneStatus
14 | from bot.helper.telegram_helper.message_utils import (sendMessage,
15 | sendStatusMessage)
16 |
17 |
18 | async def add_rclone_download(rc_path, config_path, path, name, listener):
19 | remote, rc_path = rc_path.split(':', 1)
20 | rc_path = rc_path.strip('/')
21 |
22 | cmd1 = ['rclone', 'lsjson', '--fast-list', '--stat', '--no-mimetype',
23 | '--no-modtime', '--config', config_path, f'{remote}:{rc_path}']
24 | cmd2 = ['rclone', 'size', '--fast-list', '--json',
25 | '--config', config_path, f'{remote}:{rc_path}']
26 | res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2))
27 | if res1[2] != res2[2] != 0:
28 | if res1[2] != -9:
29 | err = res1[1] or res2[1]
30 | msg = f'Error: While getting rclone stat/size. Path: {remote}:{rc_path}. Stderr: {err[:4000]}'
31 | await sendMessage(listener.message, msg)
32 | return
33 | rstat = loads(res1[0])
34 | rsize = loads(res2[0])
35 | if rstat['IsDir']:
36 | if not name:
37 | name = rc_path.rsplit('/', 1)[-1] if rc_path else remote
38 | path += name
39 | else:
40 | name = rc_path.rsplit('/', 1)[-1]
41 | size = rsize['bytes']
42 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
43 |
44 | msg, button = await stop_duplicate_check(name, listener)
45 | if msg:
46 | await sendMessage(listener.message, msg, button)
47 | return
48 |
49 | added_to_queue, event = await is_queued(listener.uid)
50 | if added_to_queue:
51 | LOGGER.info(f"Added to Queue/Download: {name}")
52 | async with download_dict_lock:
53 | download_dict[listener.uid] = QueueStatus(
54 | name, size, gid, listener, 'dl')
55 | await listener.onDownloadStart()
56 | await sendStatusMessage(listener.message)
57 | await event.wait()
58 | async with download_dict_lock:
59 | if listener.uid not in download_dict:
60 | return
61 | from_queue = True
62 | else:
63 | from_queue = False
64 |
65 | RCTransfer = RcloneTransferHelper(listener, name)
66 | async with download_dict_lock:
67 | download_dict[listener.uid] = RcloneStatus(
68 | RCTransfer, listener.message, gid, 'dl', listener.extra_details)
69 | async with queue_dict_lock:
70 | non_queued_dl.add(listener.uid)
71 |
72 | if from_queue:
73 | LOGGER.info(f'Start Queued Download with rclone: {rc_path}')
74 | else:
75 | await listener.onDownloadStart()
76 | await sendStatusMessage(listener.message)
77 | LOGGER.info(f"Download with rclone: {rc_path}")
78 |
79 | await RCTransfer.download(remote, rc_path, config_path, path)
80 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/telegram_download.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import Lock
3 | from logging import ERROR, getLogger
4 | from time import time
5 |
6 | from bot import (IS_PREMIUM_USER, LOGGER, bot, download_dict,
7 | download_dict_lock, non_queued_dl, queue_dict_lock, user)
8 | from bot.helper.ext_utils.task_manager import (is_queued, limit_checker,
9 | stop_duplicate_check)
10 | from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
11 | from bot.helper.mirror_utils.status_utils.telegram_status import TelegramStatus
12 | from bot.helper.telegram_helper.message_utils import (delete_links,
13 | sendMessage,
14 | sendStatusMessage)
15 |
16 | global_lock = Lock()
17 | GLOBAL_GID = set()
18 | getLogger("pyrogram").setLevel(ERROR)
19 |
20 |
21 | class TelegramDownloadHelper:
22 |
23 | def __init__(self, listener):
24 | self.name = ""
25 | self.__processed_bytes = 0
26 | self.__start_time = time()
27 | self.__listener = listener
28 | self.__id = ""
29 | self.__is_cancelled = False
30 |
31 | @property
32 | def speed(self):
33 | return self.__processed_bytes / (time() - self.__start_time)
34 |
35 | @property
36 | def processed_bytes(self):
37 | return self.__processed_bytes
38 |
39 | async def __onDownloadStart(self, name, size, file_id, from_queue):
40 | async with global_lock:
41 | GLOBAL_GID.add(file_id)
42 | self.name = name
43 | self.__id = file_id
44 | async with download_dict_lock:
45 | download_dict[self.__listener.uid] = TelegramStatus(
46 | self, size, self.__listener.message, file_id[:12], 'dl', self.__listener.extra_details)
47 | async with queue_dict_lock:
48 | non_queued_dl.add(self.__listener.uid)
49 | if not from_queue:
50 | await self.__listener.onDownloadStart()
51 | await sendStatusMessage(self.__listener.message)
52 | LOGGER.info(f'Download from Telegram: {name}')
53 | else:
54 | LOGGER.info(f'Start Queued Download from Telegram: {name}')
55 |
56 | async def __onDownloadProgress(self, current, total):
57 | if self.__is_cancelled:
58 | if IS_PREMIUM_USER:
59 | user.stop_transmission()
60 | else:
61 | bot.stop_transmission()
62 | self.__processed_bytes = current
63 |
64 | async def __onDownloadError(self, error):
65 | async with global_lock:
66 | try:
67 | GLOBAL_GID.remove(self.__id)
68 | except:
69 | pass
70 | await self.__listener.onDownloadError(error)
71 |
72 | async def __onDownloadComplete(self):
73 | await self.__listener.onDownloadComplete()
74 | async with global_lock:
75 | GLOBAL_GID.remove(self.__id)
76 |
77 | async def __download(self, message, path):
78 | try:
79 | download = await message.download(file_name=path, progress=self.__onDownloadProgress)
80 | if self.__is_cancelled:
81 | await self.__onDownloadError('Cancelled by user!')
82 | return
83 | except Exception as e:
84 | LOGGER.error(str(e))
85 | await self.__onDownloadError(str(e))
86 | return
87 | if download is not None:
88 | await self.__onDownloadComplete()
89 | elif not self.__is_cancelled:
90 | await self.__onDownloadError('Internal error occurred')
91 |
92 | async def add_download(self, message, path, filename, session):
93 | if IS_PREMIUM_USER and session != 'bot' or session == 'user':
94 | if not self.__listener.isSuperGroup and session != 'user':
95 | await sendMessage(message, 'Use SuperGroup to download with User!')
96 | return
97 | message = await user.get_messages(chat_id=message.chat.id, message_ids=message.id)
98 |
99 | media = message.document or message.photo or message.video or message.audio or \
100 | message.voice or message.video_note or message.sticker or message.animation or None
101 | if media is not None:
102 |
103 | async with global_lock:
104 | download = media.file_unique_id not in GLOBAL_GID
105 |
106 | if download:
107 | if filename == "":
108 | name = media.file_name if hasattr(
109 | media, 'file_name') else 'None'
110 | else:
111 | name = filename
112 | path = path + name
113 | size = media.file_size
114 | gid = media.file_unique_id
115 |
116 | msg, button = await stop_duplicate_check(name, self.__listener)
117 | if msg:
118 | await sendMessage(self.__listener.message, msg, button)
119 | return
120 | if limit_exceeded := await limit_checker(size, self.__listener):
121 | await sendMessage(self.__listener.message, limit_exceeded)
122 | await delete_links(self.__listener.message)
123 | return
124 | added_to_queue, event = await is_queued(self.__listener.uid)
125 | if added_to_queue:
126 | LOGGER.info(f"Added to Queue/Download: {name}")
127 | async with download_dict_lock:
128 | download_dict[self.__listener.uid] = QueueStatus(
129 | name, size, gid, self.__listener, 'dl')
130 | await self.__listener.onDownloadStart()
131 | await sendStatusMessage(self.__listener.message)
132 | await event.wait()
133 | async with download_dict_lock:
134 | if self.__listener.uid not in download_dict:
135 | return
136 | from_queue = True
137 | else:
138 | from_queue = False
139 | await self.__onDownloadStart(name, size, gid, from_queue)
140 | await self.__download(message, path)
141 | else:
142 | await self.__onDownloadError('File already being downloaded!')
143 | else:
144 | await self.__onDownloadError('No document in the replied message')
145 |
146 | async def cancel_download(self):
147 | self.__is_cancelled = True
148 | LOGGER.info(
149 | f'Cancelling download on user request: name: {self.name} id: {self.__id}')
150 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/rclone_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/junedkh/jmdkh-mltb/e685f5b40d20907af3e917218a5e140cca0e9d2b/bot/helper/mirror_utils/rclone_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/rclone_utils/serve.py:
--------------------------------------------------------------------------------
1 | from asyncio import create_subprocess_exec
2 | from aiofiles.os import path as aiopath
3 | from aiofiles import open as aiopen
4 | from configparser import ConfigParser
5 |
6 | from bot import config_dict, bot_loop
7 |
8 | RcloneServe = []
9 |
10 |
11 | async def rclone_serve_booter():
12 | if not config_dict['RCLONE_SERVE_URL'] or not await aiopath.exists('rclone.conf'):
13 | if RcloneServe:
14 | try:
15 | RcloneServe[0].kill()
16 | RcloneServe.clear()
17 | except:
18 | pass
19 | return
20 | config = ConfigParser()
21 | async with aiopen('rclone.conf', 'r') as f:
22 | contents = await f.read()
23 | config.read_string(contents)
24 | if not config.has_section('combine'):
25 | upstreams = ' '.join(
26 | f'{remote}={remote}:' for remote in config.sections())
27 | config.add_section('combine')
28 | config.set('combine', 'type', 'combine')
29 | config.set('combine', 'upstreams', upstreams)
30 | with open('rclone.conf', 'w') as f:
31 | config.write(f, space_around_delimiters=False)
32 | if RcloneServe:
33 | try:
34 | RcloneServe[0].kill()
35 | RcloneServe.clear()
36 | except:
37 | pass
38 | cmd = ["rclone", "serve", "http", "--config", "rclone.conf", "--no-modtime",
39 | "combine:", "--addr", f":{config_dict['RCLONE_SERVE_PORT']}",
40 | "--vfs-cache-mode", "full", "--vfs-cache-max-age", "1m0s",
41 | "--buffer-size", "64M"]
42 | if (user := config_dict['RCLONE_SERVE_USER']) and (pswd := config_dict['RCLONE_SERVE_PASS']):
43 | cmd.extend(("--user", user, "--pass", pswd))
44 | rcs = await create_subprocess_exec(*cmd)
45 | RcloneServe.append(rcs)
46 |
47 | bot_loop.run_until_complete(rclone_serve_booter())
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/aria2_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from bot import LOGGER, aria2
5 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, get_readable_time,
6 | sync_to_async)
7 |
8 |
9 | def get_download(gid):
10 | try:
11 | return aria2.get_download(gid)
12 | except Exception as e:
13 | LOGGER.error(f'{e}: Aria2c, Error while getting torrent info')
14 |
15 | engine_ = f"Aria2c v{aria2.client.get_version()['version']}"
16 |
17 | class Aria2Status:
18 |
19 | def __init__(self, gid, listener, seeding=False, queued=False):
20 | self.__gid = gid
21 | self.__download = get_download(gid)
22 | self.__listener = listener
23 | self.queued = queued
24 | self.start_time = 0
25 | self.seeding = seeding
26 | self.message = listener.message
27 | self.extra_details = self.__listener.extra_details
28 | self.engine = engine_
29 |
30 | def __update(self):
31 | if self.__download is None:
32 | self.__download = get_download(self.__gid)
33 | else:
34 | self.__download = self.__download.live
35 | if self.__download.followed_by_ids:
36 | self.__gid = self.__download.followed_by_ids[0]
37 | self.__download = get_download(self.__gid)
38 |
39 | def progress(self):
40 | return self.__download.progress_string()
41 |
42 | def processed_bytes(self):
43 | return self.__download.completed_length_string()
44 |
45 | def speed(self):
46 | return self.__download.download_speed_string()
47 |
48 | def name(self):
49 | return self.__download.name
50 |
51 | def size(self):
52 | return self.__download.total_length_string()
53 |
54 | def eta(self):
55 | return self.__download.eta_string()
56 |
57 | def status(self):
58 | self.__update()
59 | if self.__download.is_waiting or self.queued:
60 | if self.seeding:
61 | return MirrorStatus.STATUS_QUEUEUP
62 | else:
63 | return MirrorStatus.STATUS_QUEUEDL
64 | elif self.__download.is_paused:
65 | return MirrorStatus.STATUS_PAUSED
66 | elif self.__download.seeder and self.seeding:
67 | return MirrorStatus.STATUS_SEEDING
68 | else:
69 | return MirrorStatus.STATUS_DOWNLOADING
70 |
71 | def seeders_num(self):
72 | return self.__download.num_seeders
73 |
74 | def leechers_num(self):
75 | return self.__download.connections
76 |
77 | def uploaded_bytes(self):
78 | return self.__download.upload_length_string()
79 |
80 | def upload_speed(self):
81 | self.__update()
82 | return self.__download.upload_speed_string()
83 |
84 | def ratio(self):
85 | return f"{round(self.__download.upload_length / self.__download.completed_length, 3)}"
86 |
87 | def seeding_time(self):
88 | return get_readable_time(time() - self.start_time)
89 |
90 | def download(self):
91 | return self
92 |
93 | def listener(self):
94 | return self.__listener
95 |
96 | def gid(self):
97 | self.__update()
98 | return self.__gid
99 |
100 | async def cancel_download(self):
101 | self.__update()
102 | await sync_to_async(self.__update)
103 | if self.__download.seeder and self.seeding:
104 | LOGGER.info(f"Cancelling Seed: {self.name()}")
105 | await self.__listener.onUploadError(f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}")
106 | await sync_to_async(aria2.remove, [self.__download], force=True, files=True)
107 | elif downloads := self.__download.followed_by:
108 | LOGGER.info(f"Cancelling Download: {self.name()}")
109 | await self.__listener.onDownloadError('Download cancelled by user!')
110 | downloads.append(self.__download)
111 | await sync_to_async(aria2.remove, downloads, force=True, files=True)
112 | else:
113 | if self.queued:
114 | LOGGER.info(f'Cancelling QueueDl: {self.name()}')
115 | msg = 'task have been removed from queue/download'
116 | else:
117 | LOGGER.info(f"Cancelling Download: {self.name()}")
118 | msg = 'Download stopped by user!'
119 | await self.__listener.onDownloadError(msg)
120 | await sync_to_async(aria2.remove, [self.__download], force=True, files=True)
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/extract_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from bot import LOGGER
5 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, async_to_sync,
6 | get_readable_file_size,
7 | get_readable_time)
8 | from bot.helper.ext_utils.fs_utils import get_path_size
9 |
10 |
11 | class ExtractStatus:
12 | def __init__(self, name, size, gid, listener):
13 | self.__name = name
14 | self.__size = size
15 | self.__gid = gid
16 | self.__listener = listener
17 | self.__start_time = time()
18 | self.message = self.__listener.message
19 | self.extra_details = self.__listener.extra_details
20 | self.engine = '7z'
21 |
22 | def gid(self):
23 | return self.__gid
24 |
25 | def speed_raw(self):
26 | return self.processed_raw() / (time() - self.__start_time)
27 |
28 | def progress_raw(self):
29 | try:
30 | return self.processed_raw() / self.__size * 100
31 | except:
32 | return 0
33 |
34 | def progress(self):
35 | return f'{round(self.progress_raw(), 2)}%'
36 |
37 | def speed(self):
38 | return f'{get_readable_file_size(self.speed_raw())}/s'
39 |
40 | def name(self):
41 | return self.__name
42 |
43 | def size(self):
44 | return get_readable_file_size(self.__size)
45 |
46 | def eta(self):
47 | try:
48 | seconds = (self.__size - self.processed_raw()) / self.speed_raw()
49 | return get_readable_time(seconds)
50 | except:
51 | return '-'
52 |
53 | def status(self):
54 | return MirrorStatus.STATUS_EXTRACTING
55 |
56 | def processed_bytes(self):
57 | return get_readable_file_size(self.processed_raw())
58 |
59 | def processed_raw(self):
60 | if self.__listener.newDir:
61 | return async_to_sync(get_path_size, self.__listener.newDir)
62 | else:
63 | return async_to_sync(get_path_size, self.__listener.dir) - self.__size
64 |
65 | def download(self):
66 | return self
67 |
68 | async def cancel_download(self):
69 | LOGGER.info(f'Cancelling Extract: {self.__name}')
70 | if self.__listener.suproc is not None:
71 | self.__listener.suproc.kill()
72 | else:
73 | self.__listener.suproc = 'cancelled'
74 | await self.__listener.onUploadError('extracting stopped by user!')
75 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/gdrive_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pkg_resources import get_distribution
3 |
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time)
7 |
8 | engine_ = f"Google Api v{get_distribution('google-api-python-client').version}"
9 |
10 |
11 | class GdriveStatus:
12 | def __init__(self, obj, size, message, gid, status, extra_details):
13 | self.__obj = obj
14 | self.__size = size
15 | self.__gid = gid
16 | self.__status = status
17 | self.message = message
18 | self.extra_details = extra_details
19 | self.engine = engine_
20 |
21 | def processed_bytes(self):
22 | return get_readable_file_size(self.__obj.processed_bytes)
23 |
24 | def size(self):
25 | return get_readable_file_size(self.__size)
26 |
27 | def status(self):
28 | if self.__status == 'up':
29 | return MirrorStatus.STATUS_UPLOADING
30 | elif self.__status == 'dl':
31 | return MirrorStatus.STATUS_DOWNLOADING
32 | else:
33 | return MirrorStatus.STATUS_CLONING
34 |
35 | def name(self):
36 | return self.__obj.name
37 |
38 | def gid(self) -> str:
39 | return self.__gid
40 |
41 | def progress_raw(self):
42 | try:
43 | return self.__obj.processed_bytes / self.__size * 100
44 | except:
45 | return 0
46 |
47 | def progress(self):
48 | return f'{round(self.progress_raw(), 2)}%'
49 |
50 | def speed(self):
51 | return f'{get_readable_file_size(self.__obj.speed)}/s'
52 |
53 | def eta(self):
54 | try:
55 | seconds = (self.__size - self.__obj.processed_bytes) / \
56 | self.__obj.speed
57 | return get_readable_time(seconds)
58 | except:
59 | return '-'
60 |
61 | def download(self):
62 | return self.__obj
63 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/mega_download_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from mega import MegaApi
3 |
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time)
7 |
8 | engine_ = f"MegaSDK v{MegaApi('test').getVersion()}"
9 |
10 |
11 | class MegaDownloadStatus:
12 |
13 | def __init__(self, name, size, gid, obj, message, extra_details):
14 | self.__obj = obj
15 | self.__name = name
16 | self.__size = size
17 | self.__gid = gid
18 | self.message = message
19 | self.extra_details = extra_details
20 | self.engine = engine_
21 |
22 | def name(self):
23 | return self.__name
24 |
25 | def progress_raw(self):
26 | try:
27 | return round(self.__obj.downloaded_bytes / self.__size * 100, 2)
28 | except:
29 | return 0.0
30 |
31 | def progress(self):
32 | return f"{self.progress_raw()}%"
33 |
34 | def status(self):
35 | return MirrorStatus.STATUS_DOWNLOADING
36 |
37 | def processed_bytes(self):
38 | return get_readable_file_size(self.__obj.downloaded_bytes)
39 |
40 | def eta(self):
41 | try:
42 | seconds = (self.__size - self.__obj.downloaded_bytes) / \
43 | self.__obj.speed
44 | return get_readable_time(seconds)
45 | except ZeroDivisionError:
46 | return '-'
47 |
48 | def size(self):
49 | return get_readable_file_size(self.__size)
50 |
51 | def speed(self):
52 | return f'{get_readable_file_size(self.__obj.speed)}/s'
53 |
54 | def gid(self):
55 | return self.__gid
56 |
57 | def download(self):
58 | return self.__obj
59 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/qbit_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import sleep
3 |
4 | from bot import LOGGER, QbTorrents, get_client, qb_listener_lock
5 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
6 | get_readable_file_size,
7 | get_readable_time, sync_to_async)
8 |
9 |
10 | def get_download(client, tag):
11 | try:
12 | return client.torrents_info(tag=tag)[0]
13 | except Exception as e:
14 | LOGGER.error(
15 | f'{e}: Qbittorrent, while getting torrent info. Tag: {tag}')
16 |
17 |
18 | engine_ = f"qBittorrent {get_client().app.version}"
19 |
20 |
21 | class QbittorrentStatus:
22 |
23 | def __init__(self, listener, seeding=False, queued=False):
24 | self.__client = get_client()
25 | self.__listener = listener
26 | self.__info = get_download(self.__client, f'{self.__listener.uid}')
27 | self.queued = queued
28 | self.seeding = seeding
29 | self.message = listener.message
30 | self.extra_details = self.__listener.extra_details
31 | self.engine = engine_
32 |
33 | def __update(self):
34 | if new_info := get_download(self.__client, f'{self.__listener.uid}'):
35 | self.__info = new_info
36 |
37 | def progress(self):
38 | return f'{round(self.__info.progress*100, 2)}%'
39 |
40 | def processed_bytes(self):
41 | return get_readable_file_size(self.__info.downloaded)
42 |
43 | def speed(self):
44 | return f"{get_readable_file_size(self.__info.dlspeed)}/s"
45 |
46 | def name(self):
47 | if self.__info.state in ["metaDL", "checkingResumeData"]:
48 | return f"[METADATA]{self.__info.name}"
49 | else:
50 | return self.__info.name
51 |
52 | def size(self):
53 | return get_readable_file_size(self.__info.size)
54 |
55 | def eta(self):
56 | return get_readable_time(self.__info.eta)
57 |
58 | def status(self):
59 | self.__update()
60 | state = self.__info.state
61 | if state == "queuedDL" or self.queued:
62 | return MirrorStatus.STATUS_QUEUEDL
63 | elif state == "queuedUP":
64 | return MirrorStatus.STATUS_QUEUEUP
65 | elif state in ["pausedDL", "pausedUP"]:
66 | return MirrorStatus.STATUS_PAUSED
67 | elif state in ["checkingUP", "checkingDL"]:
68 | return MirrorStatus.STATUS_CHECKING
69 | elif state in ["stalledUP", "uploading"] and self.seeding:
70 | return MirrorStatus.STATUS_SEEDING
71 | else:
72 | return MirrorStatus.STATUS_DOWNLOADING
73 |
74 | def seeders_num(self):
75 | return self.__info.num_seeds
76 |
77 | def leechers_num(self):
78 | return self.__info.num_leechs
79 |
80 | def uploaded_bytes(self):
81 | return get_readable_file_size(self.__info.uploaded)
82 |
83 | def upload_speed(self):
84 | return f"{get_readable_file_size(self.__info.upspeed)}/s"
85 |
86 | def ratio(self):
87 | return f"{round(self.__info.ratio, 3)}"
88 |
89 | def seeding_time(self):
90 | return get_readable_time(self.__info.seeding_time)
91 |
92 | def download(self):
93 | return self
94 |
95 | def gid(self):
96 | return self.hash()[:12]
97 |
98 | def hash(self):
99 | self.__update()
100 | return self.__info.hash
101 |
102 | def client(self):
103 | return self.__client
104 |
105 | def listener(self):
106 | return self.__listener
107 |
108 | async def cancel_download(self):
109 | self.__update()
110 | await sync_to_async(self.__client.torrents_pause, torrent_hashes=self.__info.hash)
111 | if not self.seeding:
112 | if self.queued:
113 | LOGGER.info(f'Cancelling QueueDL: {self.name()}')
114 | msg = 'task have been removed from queue/download'
115 | else:
116 | LOGGER.info(f"Cancelling Download: {self.__info.name}")
117 | msg = 'Download stopped by user!'
118 | await sleep(0.3)
119 | await self.__listener.onDownloadError(msg)
120 | await sync_to_async(self.__client.torrents_delete, torrent_hashes=self.__info.hash, delete_files=True)
121 | await sync_to_async(self.__client.torrents_delete_tags, tags=self.__info.tags)
122 | async with qb_listener_lock:
123 | if self.__info.tags in QbTorrents:
124 | del QbTorrents[self.__info.tags]
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/queue_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from bot import LOGGER
3 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size
4 |
5 |
6 | class QueueStatus:
7 | def __init__(self, name, size, gid, listener, status):
8 | self.__name = name
9 | self.__size = size
10 | self.__gid = gid
11 | self.__listener = listener
12 | self.__status = status
13 | self.message = self.__listener.message
14 | self.extra_details = self.__listener.extra_details
15 | self.engine = "Queue System v2.0"
16 |
17 | def gid(self):
18 | return self.__gid
19 |
20 | def name(self):
21 | return self.__name
22 |
23 | def size(self):
24 | return get_readable_file_size(self.__size)
25 |
26 | def status(self):
27 | if self.__status == 'dl':
28 | return MirrorStatus.STATUS_QUEUEDL
29 | return MirrorStatus.STATUS_QUEUEUP
30 |
31 | def processed_bytes(self):
32 | return 0
33 |
34 | def progress(self):
35 | return '0%'
36 |
37 | def speed(self):
38 | return '0B/s'
39 |
40 | def eta(self):
41 | return '-'
42 |
43 | def download(self):
44 | return self
45 |
46 | async def cancel_download(self):
47 | LOGGER.info(f'Cancelling Queue{self.__status}: {self.__name}')
48 | if self.__status == 'dl':
49 | await self.__listener.onDownloadError('task have been removed from queue/download')
50 | else:
51 | await self.__listener.onUploadError('task have been removed from queue/upload')
52 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/rclone_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus
2 |
3 |
4 | class RcloneStatus:
5 | def __init__(self, obj, message, gid, status, extra_details):
6 | self.__obj = obj
7 | self.__gid = gid
8 | self.__status = status
9 | self.message = message
10 | self.extra_details = extra_details
11 | self.engine = "rclone"
12 |
13 | def gid(self):
14 | return self.__gid
15 |
16 | def progress(self):
17 | return self.__obj.percentage
18 |
19 | def speed(self):
20 | return self.__obj.speed
21 |
22 | def name(self):
23 | return self.__obj.name
24 |
25 | def size(self):
26 | return self.__obj.size
27 |
28 | def eta(self):
29 | return self.__obj.eta
30 |
31 | def status(self):
32 | if self.__status == 'dl':
33 | return MirrorStatus.STATUS_DOWNLOADING
34 | elif self.__status == 'up':
35 | return MirrorStatus.STATUS_UPLOADING
36 | else:
37 | return MirrorStatus.STATUS_CLONING
38 |
39 | def processed_bytes(self):
40 | return self.__obj.transferred_size
41 |
42 | def download(self):
43 | return self.__obj
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/split_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from bot import LOGGER
3 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size
4 |
5 |
6 | class SplitStatus:
7 | def __init__(self, name, size, gid, listener):
8 | self.__name = name
9 | self.__gid = gid
10 | self.__size = size
11 | self.__listener = listener
12 | self.message = listener.message
13 | self.extra_details = self.__listener.extra_details
14 | self.engine = "ffmpeg/split"
15 |
16 | def gid(self):
17 | return self.__gid
18 |
19 | def progress(self):
20 | return '0'
21 |
22 | def speed(self):
23 | return '0'
24 |
25 | def name(self):
26 | return self.__name
27 |
28 | def size(self):
29 | return get_readable_file_size(self.__size)
30 |
31 | def eta(self):
32 | return '0s'
33 |
34 | def status(self):
35 | return MirrorStatus.STATUS_SPLITTING
36 |
37 | def processed_bytes(self):
38 | return 0
39 |
40 | def download(self):
41 | return self
42 |
43 | async def cancel_download(self):
44 | LOGGER.info(f'Cancelling Split: {self.__name}')
45 | if self.__listener.suproc:
46 | self.__listener.suproc.kill()
47 | else:
48 | self.__listener.suproc = 'cancelled'
49 | await self.__listener.onUploadError('splitting stopped by user!')
50 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/telegram_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pkg_resources import get_distribution
3 |
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time)
7 |
8 | engine_ = f"pyrogram v{get_distribution('pyrogram').version}"
9 |
10 |
11 | class TelegramStatus:
12 | def __init__(self, obj, size, message, gid, status, extra_details):
13 | self.__obj = obj
14 | self.__size = size
15 | self.__gid = gid
16 | self.__status = status
17 | self.message = message
18 | self.extra_details = extra_details
19 | self.engine = engine_
20 |
21 | def processed_bytes(self):
22 | return get_readable_file_size(self.__obj.processed_bytes)
23 |
24 | def size(self):
25 | return get_readable_file_size(self.__size)
26 |
27 | def status(self):
28 | if self.__status == 'up':
29 | return MirrorStatus.STATUS_UPLOADING
30 | return MirrorStatus.STATUS_DOWNLOADING
31 |
32 | def name(self):
33 | return self.__obj.name
34 |
35 | def progress(self):
36 | try:
37 | progress_raw = self.__obj.processed_bytes / self.__size * 100
38 | except:
39 | progress_raw = 0
40 | return f'{round(progress_raw, 2)}%'
41 |
42 | def speed(self):
43 | return f'{get_readable_file_size(self.__obj.speed)}/s'
44 |
45 | def eta(self):
46 | try:
47 | seconds = (self.__size - self.__obj.processed_bytes) / \
48 | self.__obj.speed
49 | return get_readable_time(seconds)
50 | except:
51 | return '-'
52 |
53 | def gid(self) -> str:
54 | return self.__gid
55 |
56 | def download(self):
57 | return self.__obj
58 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/yt_dlp_download_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pkg_resources import get_distribution
3 |
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, async_to_sync,
5 | get_readable_file_size,
6 | get_readable_time)
7 | from bot.helper.ext_utils.fs_utils import get_path_size
8 |
9 | engine_ = f"yt-dlp v{get_distribution('yt-dlp').version}"
10 |
11 |
12 | class YtDlpDownloadStatus:
13 | def __init__(self, obj, listener, gid):
14 | self.__obj = obj
15 | self.__gid = gid
16 | self.__listener = listener
17 | self.message = self.__listener.message
18 | self.extra_details = self.__listener.extra_details
19 | self.engine = engine_
20 |
21 | def gid(self):
22 | return self.__gid
23 |
24 | def processed_bytes(self):
25 | return get_readable_file_size(self.processed_raw())
26 |
27 | def processed_raw(self):
28 | if self.__obj.downloaded_bytes != 0:
29 | return self.__obj.downloaded_bytes
30 | else:
31 | return async_to_sync(get_path_size, self.__listener.dir)
32 |
33 | def size(self):
34 | return get_readable_file_size(self.__obj.size)
35 |
36 | def status(self):
37 | return MirrorStatus.STATUS_DOWNLOADING
38 |
39 | def name(self):
40 | return self.__obj.name
41 |
42 | def progress(self):
43 | return f'{round(self.__obj.progress, 2)}%'
44 |
45 | def listener(self):
46 | return self.__listener
47 |
48 | def speed(self):
49 | return f'{get_readable_file_size(self.__obj.download_speed)}/s'
50 |
51 | def eta(self):
52 | if self.__obj.eta != '-':
53 | return get_readable_time(self.__obj.eta)
54 | try:
55 | seconds = (self.__obj.size - self.processed_raw()) / \
56 | self.__obj.download_speed
57 | return get_readable_time(seconds)
58 | except:
59 | return '-'
60 |
61 | def download(self):
62 | return self.__obj
63 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/zip_status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from bot import LOGGER
5 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, async_to_sync,
6 | get_readable_file_size,
7 | get_readable_time)
8 | from bot.helper.ext_utils.fs_utils import get_path_size
9 |
10 |
11 | class ZipStatus:
12 | def __init__(self, name, size, gid, listener):
13 | self.__name = name
14 | self.__size = size
15 | self.__gid = gid
16 | self.__listener = listener
17 | self.__start_time = time()
18 | self.message = self.__listener.message
19 | self.extra_details = self.__listener.extra_details
20 | self.engine = "7z"
21 |
22 | def gid(self):
23 | return self.__gid
24 |
25 | def speed_raw(self):
26 | return self.processed_raw() / (time() - self.__start_time)
27 |
28 | def progress_raw(self):
29 | try:
30 | return self.processed_raw() / self.__size * 100
31 | except:
32 | return 0
33 |
34 | def progress(self):
35 | return f'{round(self.progress_raw(), 2)}%'
36 |
37 | def speed(self):
38 | return f'{get_readable_file_size(self.speed_raw())}/s'
39 |
40 | def name(self):
41 | return self.__name
42 |
43 | def size(self):
44 | return get_readable_file_size(self.__size)
45 |
46 | def eta(self):
47 | try:
48 | seconds = (self.__size - self.processed_raw()) / self.speed_raw()
49 | return get_readable_time(seconds)
50 | except:
51 | return '-'
52 |
53 | def status(self):
54 | return MirrorStatus.STATUS_ARCHIVING
55 |
56 | def processed_raw(self):
57 | if self.__listener.newDir:
58 | return async_to_sync(get_path_size, self.__listener.newDir)
59 | else:
60 | return async_to_sync(get_path_size, self.__listener.dir) - self.__size
61 |
62 | def processed_bytes(self):
63 | return get_readable_file_size(self.processed_raw())
64 |
65 | def download(self):
66 | return self
67 |
68 | async def cancel_download(self):
69 | LOGGER.info(f'Cancelling Archive: {self.__name}')
70 | if self.__listener.suproc:
71 | self.__listener.suproc.kill()
72 | else:
73 | self.__listener.suproc = 'cancelled'
74 | await self.__listener.onUploadError('archiving stopped by user!')
75 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/upload_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/bot_commands.py:
--------------------------------------------------------------------------------
1 | from bot import CMD_SUFFIX
2 |
3 |
4 | class _BotCommands:
5 | def __init__(self):
6 | self.StartCommand = 'start'
7 | self.MirrorCommand = [f'mirror{CMD_SUFFIX}', f'm{CMD_SUFFIX}']
8 | self.QbMirrorCommand = [f'qbmirror{CMD_SUFFIX}', f'qm{CMD_SUFFIX}']
9 | self.YtdlCommand = [f'ytdl{CMD_SUFFIX}', f'y{CMD_SUFFIX}']
10 | self.LeechCommand = [f'leech{CMD_SUFFIX}', f'l{CMD_SUFFIX}']
11 | self.QbLeechCommand = [f'qbleech{CMD_SUFFIX}', f'ql{CMD_SUFFIX}']
12 | self.YtdlLeechCommand = [f'ytdlleech{CMD_SUFFIX}', f'yl{CMD_SUFFIX}']
13 | self.CloneCommand = f'clone{CMD_SUFFIX}'
14 | self.CountCommand = f'count{CMD_SUFFIX}'
15 | self.DeleteCommand = f'del{CMD_SUFFIX}'
16 | self.CancelMirror = f'cancel{CMD_SUFFIX}'
17 | self.CancelAllCommand = [f'cancelall{CMD_SUFFIX}', 'cancelallbot']
18 | self.ListCommand = f'list{CMD_SUFFIX}'
19 | self.SearchCommand = f'search{CMD_SUFFIX}'
20 | self.StatusCommand = [f'status{CMD_SUFFIX}', f's{CMD_SUFFIX}', 'sall']
21 | self.UsersCommand = f'users{CMD_SUFFIX}'
22 | self.AuthorizeCommand = f'authorize{CMD_SUFFIX}'
23 | self.UnAuthorizeCommand = f'unauthorize{CMD_SUFFIX}'
24 | self.AddSudoCommand = f'addsudo{CMD_SUFFIX}'
25 | self.RmSudoCommand = f'rmsudo{CMD_SUFFIX}'
26 | self.PingCommand = ['ping', 'p']
27 | self.RestartCommand = [f'restart{CMD_SUFFIX}', 'restartall']
28 | self.StatsCommand = f'stats{CMD_SUFFIX}'
29 | self.HelpCommand = f'help{CMD_SUFFIX}'
30 | self.LogCommand = f'log{CMD_SUFFIX}'
31 | self.ShellCommand = f'shell{CMD_SUFFIX}'
32 | self.EvalCommand = f'eval{CMD_SUFFIX}'
33 | self.ExecCommand = f'exec{CMD_SUFFIX}'
34 | self.ClearLocalsCommand = f'clearlocals{CMD_SUFFIX}'
35 | self.BotSetCommand = f'bsetting{CMD_SUFFIX}'
36 | self.UserSetCommand = f'usetting{CMD_SUFFIX}'
37 | self.BtSelectCommand = f'btsel{CMD_SUFFIX}'
38 | self.RssCommand = f'rss{CMD_SUFFIX}'
39 | self.CategorySelect = f'catsel{CMD_SUFFIX}'
40 | self.RmdbCommand = f'rmdb{CMD_SUFFIX}'
41 |
42 |
43 | BotCommands = _BotCommands()
44 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/button_build.py:
--------------------------------------------------------------------------------
1 | from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
2 |
3 |
4 | class ButtonMaker:
5 | def __init__(self):
6 | self.__button = []
7 | self.__header_button = []
8 | self.__footer_button = []
9 |
10 | def ubutton(self, key, link, position=None):
11 | if not position:
12 | self.__button.append(InlineKeyboardButton(text=key, url=link))
13 | elif position == 'header':
14 | self.__header_button.append(
15 | InlineKeyboardButton(text=key, url=link))
16 | elif position == 'footer':
17 | self.__footer_button.append(
18 | InlineKeyboardButton(text=key, url=link))
19 |
20 | def ibutton(self, key, data, position=None):
21 | if not position:
22 | self.__button.append(InlineKeyboardButton(
23 | text=key, callback_data=data))
24 | elif position == 'header':
25 | self.__header_button.append(
26 | InlineKeyboardButton(text=key, callback_data=data))
27 | elif position == 'footer':
28 | self.__footer_button.append(
29 | InlineKeyboardButton(text=key, callback_data=data))
30 |
31 | def build_menu(self, b_cols=1, h_cols=8, f_cols=8):
32 | menu = [self.__button[i:i+b_cols]
33 | for i in range(0, len(self.__button), b_cols)]
34 | if self.__header_button:
35 | h_cnt = len(self.__header_button)
36 | if h_cnt > h_cols:
37 | header_buttons = [self.__header_button[i:i+h_cols]
38 | for i in range(0, len(self.__header_button), h_cols)]
39 | menu = header_buttons + menu
40 | else:
41 | menu.insert(0, self.__header_button)
42 | if self.__footer_button:
43 | if len(self.__footer_button) > f_cols:
44 | [menu.append(self.__footer_button[i:i+f_cols])
45 | for i in range(0, len(self.__footer_button), f_cols)]
46 | else:
47 | menu.append(self.__footer_button)
48 | return InlineKeyboardMarkup(menu)
49 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/filters.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pyrogram.filters import create
3 |
4 | from bot import OWNER_ID, user_data
5 |
6 |
7 | class CustomFilters:
8 |
9 | async def owner_filter(self, client, update):
10 | user = update.from_user or update.sender_chat
11 | uid = user.id
12 | return uid == OWNER_ID
13 |
14 | owner = create(owner_filter)
15 |
16 | async def authorized_user(self, client, update):
17 | user = update.from_user or update.sender_chat
18 | uid = user.id
19 | chat_id = update.chat.id
20 | return bool(uid == OWNER_ID or (uid in user_data and (user_data[uid].get('is_auth', False) or
21 | user_data[uid].get('is_sudo', False))) or (chat_id in user_data and user_data[chat_id].get('is_auth', False)))
22 |
23 | authorized = create(authorized_user)
24 |
25 | async def sudo_user(self, client, update):
26 | user = update.from_user or update.sender_chat
27 | uid = user.id
28 | return bool(uid == OWNER_ID or uid in user_data and user_data[uid].get('is_sudo'))
29 |
30 | sudo = create(sudo_user)
31 |
--------------------------------------------------------------------------------
/bot/modules/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/modules/anonymous.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import regex
2 | from pyrogram.handlers import CallbackQueryHandler
3 |
4 | from bot import LOGGER, bot, cached_dict
5 | from bot.helper.telegram_helper.message_utils import (deleteMessage,
6 | editMessage, isAdmin)
7 |
8 |
9 | async def verifyAnno(_, query):
10 | message = query.message
11 | data = query.data.split()
12 | msg_id = int(data[2])
13 | if msg_id not in cached_dict:
14 | return await editMessage(message, 'Old Verification Message')
15 | user = query.from_user
16 | is_admin = await isAdmin(message, user.id)
17 | if data[1] == 'admin' and is_admin:
18 | await query.answer(f'Username: {user.username}\nYour userid : {user.id}')
19 | cached_dict[msg_id] = user
20 | LOGGER.info(f'Verification Success by ({user.username}) {user.id}')
21 | await deleteMessage(message)
22 | elif data[1] == 'admin':
23 | await query.answer('You are not really admin')
24 | else:
25 | await query.answer()
26 | await editMessage(message, 'Cancel Verification')
27 |
28 | bot.add_handler(CallbackQueryHandler(verifyAnno, filters=regex("^verify")))
--------------------------------------------------------------------------------
/bot/modules/authorize.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pyrogram.filters import command
3 | from pyrogram.handlers import MessageHandler
4 |
5 | from bot import DATABASE_URL, bot, user_data
6 | from bot.helper.ext_utils.bot_utils import update_user_ldata
7 | from bot.helper.ext_utils.db_handler import DbManger
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.message_utils import sendMessage
11 |
12 |
13 | async def authorize(_, message):
14 | msg = message.text.split()
15 | if len(msg) > 1:
16 | id_ = int(msg[1].strip())
17 | elif reply_to := message.reply_to_message:
18 | id_ = reply_to.from_user.id
19 | else:
20 | id_ = message.chat.id
21 | if id_ in user_data and user_data[id_].get('is_auth'):
22 | msg = 'Already Authorized!'
23 | else:
24 | update_user_ldata(id_, 'is_auth', True)
25 | if DATABASE_URL:
26 | await DbManger().update_user_data(id_)
27 | msg = 'Authorized'
28 | await sendMessage(message, msg)
29 |
30 |
31 | async def unauthorize(_, message):
32 | msg = message.text.split()
33 | if len(msg) > 1:
34 | id_ = int(msg[1].strip())
35 | elif reply_to := message.reply_to_message:
36 | id_ = reply_to.from_user.id
37 | else:
38 | id_ = message.chat.id
39 | if id_ not in user_data or user_data[id_].get('is_auth'):
40 | update_user_ldata(id_, 'is_auth', False)
41 | if DATABASE_URL:
42 | await DbManger().update_user_data(id_)
43 | msg = 'Unauthorized'
44 | else:
45 | msg = 'Already Unauthorized!'
46 | await sendMessage(message, msg)
47 |
48 |
49 | async def addSudo(_, message):
50 | id_ = ""
51 | msg = message.text.split()
52 | if len(msg) > 1:
53 | id_ = int(msg[1].strip())
54 | elif reply_to := message.reply_to_message:
55 | id_ = reply_to.from_user.id
56 | if id_:
57 | if id_ in user_data and user_data[id_].get('is_sudo'):
58 | msg = 'Already Sudo!'
59 | else:
60 | update_user_ldata(id_, 'is_sudo', True)
61 | if DATABASE_URL:
62 | await DbManger().update_user_data(id_)
63 | msg = 'Promoted as Sudo'
64 | else:
65 | msg = "Give ID or Reply To message of whom you want to Promote."
66 | await sendMessage(message, msg)
67 |
68 |
69 | async def removeSudo(_, message):
70 | id_ = ""
71 | msg = message.text.split()
72 | if len(msg) > 1:
73 | id_ = int(msg[1].strip())
74 | elif reply_to := message.reply_to_message:
75 | id_ = reply_to.from_user.id
76 | if id_ and id_ not in user_data or user_data[id_].get('is_sudo'):
77 | update_user_ldata(id_, 'is_sudo', False)
78 | if DATABASE_URL:
79 | await DbManger().update_user_data(id_)
80 | msg = 'Demoted'
81 | else:
82 | msg = "Give ID or Reply To message of whom you want to remove from Sudo"
83 | await sendMessage(message, msg)
84 |
85 | bot.add_handler(MessageHandler(authorize, filters=command(
86 | BotCommands.AuthorizeCommand) & CustomFilters.sudo))
87 | bot.add_handler(MessageHandler(unauthorize, filters=command(
88 | BotCommands.UnAuthorizeCommand) & CustomFilters.sudo))
89 | bot.add_handler(MessageHandler(addSudo, filters=command(
90 | BotCommands.AddSudoCommand) & CustomFilters.sudo))
91 | bot.add_handler(MessageHandler(removeSudo, filters=command(
92 | BotCommands.RmSudoCommand) & CustomFilters.sudo))
93 |
--------------------------------------------------------------------------------
/bot/modules/cancel_mirror.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import sleep
3 |
4 | from pyrogram.filters import command, regex
5 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
6 |
7 | from bot import bot, bot_loop, download_dict, download_dict_lock
8 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, getAllDownload,
9 | getDownloadByGid, new_task)
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.telegram_helper.button_build import ButtonMaker
12 | from bot.helper.telegram_helper.filters import CustomFilters
13 | from bot.helper.telegram_helper.message_utils import (anno_checker,
14 | editMessage, sendMessage)
15 |
16 |
17 | async def cancel_mirror(client, message):
18 | if not message.from_user:
19 | message.from_user = await anno_checker(message)
20 | if not message.from_user:
21 | return
22 | user_id = message.from_user.id
23 | msg = message.text.split()
24 | if len(msg) > 1:
25 | gid = msg[1]
26 | dl = await getDownloadByGid(gid)
27 | if not dl:
28 | await sendMessage(message, f"GID: {gid}
Not Found.")
29 | return
30 | elif reply_to_id := message.reply_to_message_id:
31 | async with download_dict_lock:
32 | dl = download_dict.get(reply_to_id, None)
33 | if not dl:
34 | await sendMessage(message, "This is not an active task!")
35 | return
36 | elif len(msg) == 1:
37 | msg = f"Reply to an active Command message which was used to start the download" \
38 | f" or send /{BotCommands.CancelMirror} GID
to cancel it!"
39 | await sendMessage(message, msg)
40 | return
41 |
42 | if not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id:
43 | await sendMessage(message, "This task is not for you!")
44 | return
45 | obj = dl.download()
46 | await obj.cancel_download()
47 |
48 | cancel_listener = {}
49 |
50 |
51 | async def cancel_all(status, info, listOfTasks):
52 | user_id = info[0]
53 | msg = info[1]
54 | tag = info[3]
55 | success = 0
56 | failed = 0
57 | _msg = f"User id: {user_id}\n" if user_id else "Everyone\n"
58 | _msg += f"Status: {status}\n"
59 | _msg += f"Total: {len(listOfTasks)}\n"
60 | for dl in listOfTasks:
61 | try:
62 | obj = dl.download()
63 | await obj.cancel_download()
64 | success += 1
65 | await sleep(1)
66 | except:
67 | failed += 1
68 | new_msg = f"Success: {success}\n"
69 | new_msg += f"Failed: {failed}\n"
70 | new_msg += f"#cancel_all : {tag}"
71 | await editMessage(msg, _msg+new_msg)
72 |
73 |
74 | async def cancell_all_buttons(client, message):
75 | async with download_dict_lock:
76 | count = len(download_dict)
77 | if count == 0:
78 | await sendMessage(message, "No active tasks!")
79 | return
80 | if not message.from_user:
81 | tag = 'Anonymous'
82 | message.from_user = await anno_checker(message)
83 | elif username := message.from_user.username:
84 | tag = f"@{username}"
85 | else:
86 | tag = message.from_user.mention
87 | user_id = message.from_user.id
88 | if await CustomFilters.sudo(client, message):
89 | if reply_to := message.reply_to_message:
90 | user_id = reply_to.from_user.id
91 | elif len(message.command) == 2 and message.command[1].casefold() == 'all':
92 | user_id = None
93 | elif len(message.command) == 2 and message.command[1].isdigit():
94 | try:
95 | user_id = int(message.command[1])
96 | except:
97 | return await sendMessage(message, "Invalid Argument! Send Userid or reply")
98 | if user_id and not await getAllDownload('all', user_id):
99 | return await sendMessage(message, f"{user_id} Don't have any active task!")
100 | msg_id = message.id
101 | buttons = ButtonMaker()
102 | buttons.ibutton(
103 | "Downloading", f"cnall {MirrorStatus.STATUS_DOWNLOADING} {msg_id}")
104 | buttons.ibutton(
105 | "Uploading", f"cnall {MirrorStatus.STATUS_UPLOADING} {msg_id}")
106 | buttons.ibutton("Seeding", f"cnall {MirrorStatus.STATUS_SEEDING} {msg_id}")
107 | buttons.ibutton("Cloning", f"cnall {MirrorStatus.STATUS_CLONING} {msg_id}")
108 | buttons.ibutton(
109 | "Extracting", f"cnall {MirrorStatus.STATUS_EXTRACTING} {msg_id}")
110 | buttons.ibutton(
111 | "Archiving", f"cnall {MirrorStatus.STATUS_ARCHIVING} {msg_id}")
112 | buttons.ibutton(
113 | "QueuedDl", f"cnall {MirrorStatus.STATUS_QUEUEDL} {msg_id}")
114 | buttons.ibutton(
115 | "QueuedUp", f"cnall {MirrorStatus.STATUS_QUEUEUP} {msg_id}")
116 | buttons.ibutton(
117 | "Splitting", f"cnall {MirrorStatus.STATUS_SPLITTING} {msg_id}")
118 | buttons.ibutton("Paused", f"cnall {MirrorStatus.STATUS_PAUSED} {msg_id}")
119 | buttons.ibutton("All", f"cnall all {msg_id}")
120 | buttons.ibutton("Close", f"cnall close {msg_id}")
121 | button = buttons.build_menu(2)
122 | can_msg = await sendMessage(message, 'Choose tasks to cancel. You have 30 Secounds only', button)
123 | cancel_listener[msg_id] = [user_id, can_msg, message.from_user.id, tag]
124 | bot_loop.create_task(_auto_cancel(can_msg, msg_id))
125 |
126 |
127 | @new_task
128 | async def cancel_all_update(_, query):
129 | data = query.data.split()
130 | user_id = query.from_user.id
131 | data = query.data.split()
132 | message = query.message
133 | msg_id = int(data[2])
134 | if not (info := cancel_listener.get(msg_id)):
135 | return await editMessage(message, "This is an old message")
136 | if info[0] and info[2] != user_id:
137 | return await query.answer(text="You are not allowed to do this!", show_alert=True)
138 | elif data[1] == 'close':
139 | await query.answer()
140 | del cancel_listener[msg_id]
141 | return await editMessage(message, "Cancellation Listener Closed.", message)
142 | if not (listOfTasks := await getAllDownload(data[1], info[0])):
143 | return await query.answer(text=f"You don't have any active task in {data[1]}", show_alert=True)
144 | await query.answer(f"{len(listOfTasks)} will be cancelled in {data[1]}", show_alert=True)
145 | del cancel_listener[msg_id]
146 | await cancel_all(data[1], info, listOfTasks)
147 |
148 |
149 | async def _auto_cancel(msg, msg_id):
150 | await sleep(30)
151 | if cancel_listener.get(msg_id):
152 | del cancel_listener[msg_id]
153 | await editMessage(msg, 'Timed out!')
154 |
155 | bot.add_handler(MessageHandler(cancel_mirror, filters=command(
156 | BotCommands.CancelMirror) & CustomFilters.authorized))
157 | bot.add_handler(MessageHandler(cancell_all_buttons, filters=command(
158 | BotCommands.CancelAllCommand) & CustomFilters.authorized))
159 | bot.add_handler(CallbackQueryHandler(
160 | cancel_all_update, filters=regex("^cnall")))
161 |
--------------------------------------------------------------------------------
/bot/modules/category_select.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import command, regex
2 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
3 |
4 | from bot import (bot, cached_dict, categories_dict, download_dict,
5 | download_dict_lock)
6 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, arg_parser,
7 | getDownloadByGid, is_gdrive_link,
8 | new_task, sync_to_async)
9 | from bot.helper.ext_utils.help_messages import CATEGORY_HELP_MESSAGE
10 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
11 | from bot.helper.telegram_helper.bot_commands import BotCommands
12 | from bot.helper.telegram_helper.filters import CustomFilters
13 | from bot.helper.telegram_helper.message_utils import (anno_checker,
14 | editMessage, isAdmin,
15 | open_category_btns,
16 | request_limiter,
17 | sendMessage)
18 |
19 |
20 | async def change_category(client, message):
21 | if not message.from_user:
22 | message.from_user = await anno_checker(message)
23 | if not message.from_user:
24 | return
25 | user_id = message.from_user.id
26 | if not await isAdmin(message, user_id) and await request_limiter(message):
27 | return
28 |
29 | text = message.text.split('\n')
30 | input_list = text[0].split(' ')
31 |
32 | arg_base = {'link': '', '-id': '', '-index': ''}
33 |
34 | args = arg_parser(input_list[1:], arg_base)
35 |
36 | drive_id = args['-id']
37 | index_link = args['-index']
38 |
39 | if drive_id and is_gdrive_link(drive_id):
40 | drive_id = GoogleDriveHelper.getIdFromUrl(drive_id)
41 |
42 | dl = None
43 | if gid := args['link']:
44 | dl = await getDownloadByGid(gid)
45 | if not dl:
46 | await sendMessage(message, f"GID: {gid}
Not Found.")
47 | return
48 | if reply_to := message.reply_to_message:
49 | async with download_dict_lock:
50 | dl = download_dict.get(reply_to.id, None)
51 | if not dl:
52 | await sendMessage(message, "This is not an active task!")
53 | return
54 | if not dl:
55 | await sendMessage(message, CATEGORY_HELP_MESSAGE.format(cmd=BotCommands.CategorySelect, mir=BotCommands.MirrorCommand[0]))
56 | return
57 | if not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id:
58 | await sendMessage(message, "This task is not for you!")
59 | return
60 | if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]:
61 | await sendMessage(message, f'Task should be on {MirrorStatus.STATUS_DOWNLOADING} or {MirrorStatus.STATUS_PAUSED} or {MirrorStatus.STATUS_QUEUEDL}')
62 | return
63 | listener = dl.listener() if dl and hasattr(dl, 'listener') else None
64 | if listener and not listener.isLeech:
65 | if not index_link and not drive_id and categories_dict:
66 | drive_id, index_link = await open_category_btns(message)
67 | if not index_link and not drive_id:
68 | return await sendMessage(message, "Time out")
69 | msg = 'Task has been Updated Successfully!'
70 | if drive_id:
71 | if not (folder_name := await sync_to_async(GoogleDriveHelper().getFolderData, drive_id)):
72 | return await sendMessage(message, "Google Drive id validation failed!!")
73 | if listener.drive_id and listener.drive_id == drive_id:
74 | msg += f'\n\nFolder name : {folder_name} Already selected'
75 | else:
76 | msg += f'\n\nFolder name : {folder_name}'
77 | listener.drive_id = drive_id
78 | if index_link:
79 | listener.index_link = index_link
80 | msg += f'\n\nIndex Link : {index_link}
'
81 | return await sendMessage(message, msg)
82 | else:
83 | await sendMessage(message, "Can not change Category for this task!")
84 |
85 |
86 | @new_task
87 | async def confirm_category(client, query):
88 | user_id = query.from_user.id
89 | data = query.data.split(maxsplit=3)
90 | msg_id = int(data[2])
91 | if msg_id not in cached_dict:
92 | return await editMessage(query.message, 'Old Task')
93 | if user_id != int(data[1]) and not await CustomFilters.sudo(client, query):
94 | return await query.answer(text="This task is not for you!", show_alert=True)
95 | await query.answer()
96 | cached_dict[msg_id][0] = categories_dict[data[3]].get('drive_id')
97 | cached_dict[msg_id][1] = categories_dict[data[3]].get('index_link')
98 |
99 |
100 | bot.add_handler(MessageHandler(change_category, filters=command(
101 | BotCommands.CategorySelect) & CustomFilters.authorized))
102 | bot.add_handler(CallbackQueryHandler(confirm_category, filters=regex("^scat")))
103 |
--------------------------------------------------------------------------------
/bot/modules/eval.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pyrogram.handlers import MessageHandler
3 | from pyrogram.filters import command
4 | from os import path as ospath, getcwd, chdir
5 | from traceback import format_exc
6 | from textwrap import indent
7 | from io import StringIO, BytesIO
8 | from contextlib import redirect_stdout
9 |
10 | from bot import LOGGER, bot
11 | from bot.helper.telegram_helper.filters import CustomFilters
12 | from bot.helper.telegram_helper.bot_commands import BotCommands
13 | from bot.helper.telegram_helper.message_utils import sendFile, sendMessage
14 | from bot.helper.ext_utils.bot_utils import sync_to_async, new_task
15 |
16 | namespaces = {}
17 |
18 |
19 | def namespace_of(message):
20 | if message.chat.id not in namespaces:
21 | namespaces[message.chat.id] = {
22 | '__builtins__': globals()['__builtins__'],
23 | 'bot': bot,
24 | 'message': message,
25 | 'user': message.from_user or message.sender_chat,
26 | 'chat': message.chat}
27 |
28 | return namespaces[message.chat.id]
29 |
30 |
31 | def log_input(message):
32 | LOGGER.info(
33 | f"IN: {message.text} (user={message.from_user.id}, chat={message.chat.id})")
34 |
35 |
36 | async def send(msg, message):
37 | if len(str(msg)) > 2000:
38 | with BytesIO(str.encode(msg)) as out_file:
39 | out_file.name = "output.txt"
40 | await sendFile(message, out_file)
41 | else:
42 | LOGGER.info(f"OUT: '{msg}'")
43 | await sendMessage(message, f"{msg}
")
44 |
45 |
46 | @new_task
47 | async def evaluate(_, message):
48 | await send(await sync_to_async(do, eval, message), message)
49 |
50 |
51 | @new_task
52 | async def execute(_, message):
53 | await send(await sync_to_async(do, exec, message), message)
54 |
55 |
56 | def cleanup_code(code):
57 | if code.startswith('```') and code.endswith('```'):
58 | return '\n'.join(code.split('\n')[1:-1])
59 | return code.strip('` \n')
60 |
61 |
62 | def do(func, message):
63 | log_input(message)
64 | content = message.text.split(maxsplit=1)[-1]
65 | body = cleanup_code(content)
66 | env = namespace_of(message)
67 |
68 | chdir(getcwd())
69 | with open(ospath.join(getcwd(), 'bot/modules/temp.txt'), 'w') as temp:
70 | temp.write(body)
71 |
72 | stdout = StringIO()
73 |
74 | to_compile = f'def func():\n{indent(body, " ")}'
75 |
76 | try:
77 | exec(to_compile, env)
78 | except Exception as e:
79 | return f'{e.__class__.__name__}: {e}'
80 |
81 | func = env['func']
82 |
83 | try:
84 | with redirect_stdout(stdout):
85 | func_return = func()
86 | except Exception as e:
87 | value = stdout.getvalue()
88 | return f'{value}{format_exc()}'
89 | else:
90 | value = stdout.getvalue()
91 | result = None
92 | if func_return is None:
93 | if value:
94 | result = f'{value}'
95 | else:
96 | try:
97 | result = f'{repr(eval(body, env))}'
98 | except:
99 | pass
100 | else:
101 | result = f'{value}{func_return}'
102 | if result:
103 | return result
104 |
105 |
106 | async def clear(_, message):
107 | log_input(message)
108 | global namespaces
109 | if message.chat.id in namespaces:
110 | del namespaces[message.chat.id]
111 | await send("Locals Cleared.", message)
112 |
113 |
114 | bot.add_handler(MessageHandler(evaluate, filters=command(
115 | BotCommands.EvalCommand) & CustomFilters.owner))
116 | bot.add_handler(MessageHandler(execute, filters=command(
117 | BotCommands.ExecCommand) & CustomFilters.owner))
118 | bot.add_handler(MessageHandler(clear, filters=command(
119 | BotCommands.ClearLocalsCommand) & CustomFilters.owner))
120 |
--------------------------------------------------------------------------------
/bot/modules/gd_count.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from pyrogram.filters import command
5 | from pyrogram.handlers import MessageHandler
6 |
7 | from bot import bot
8 | from bot.helper.ext_utils.bot_utils import (get_readable_file_size,
9 | get_readable_time, is_gdrive_link,
10 | new_task, sync_to_async)
11 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
12 | from bot.helper.telegram_helper.bot_commands import BotCommands
13 | from bot.helper.telegram_helper.filters import CustomFilters
14 | from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage
15 |
16 |
17 | @new_task
18 | async def countNode(_, message):
19 | args = message.text.split()
20 | if sender_chat := message.sender_chat:
21 | tag = sender_chat.title
22 | elif username := message.from_user.username:
23 | tag = f"@{username}"
24 | else:
25 | tag = message.from_user.mention
26 |
27 | link = args[1] if len(args) > 1 else ''
28 | if len(link) == 0 and (reply_to := message.reply_to_message):
29 | link = reply_to.text.split(maxsplit=1)[0].strip()
30 |
31 | if is_gdrive_link(link):
32 | msg = await sendMessage(message, f"Counting: {link}
")
33 | gd = GoogleDriveHelper()
34 | start_time = time()
35 | name, mime_type, size, files, folders = await sync_to_async(gd.count, link)
36 | elapsed = time() - start_time
37 | if mime_type is None:
38 | await sendMessage(message, name)
39 | return
40 | await deleteMessage(msg)
41 | msg = f'Name: {name}
'
42 | msg += f'\n\nSize: {get_readable_file_size(size)}'
43 | msg += f'\n\nType: {mime_type}'
44 | if mime_type == 'Folder':
45 | msg += f'\nSubFolders: {folders}'
46 | msg += f'\nFiles: {files}'
47 | msg += f'\n\ncc: {tag} | Elapsed: {get_readable_time(elapsed)}'
48 | else:
49 | msg = 'Send Gdrive link along with command or by replying to the link by command'
50 |
51 | await sendMessage(message, msg)
52 |
53 |
54 | bot.add_handler(MessageHandler(countNode, filters=command(
55 | BotCommands.CountCommand) & CustomFilters.authorized))
--------------------------------------------------------------------------------
/bot/modules/gd_delete.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pyrogram.filters import command
3 | from pyrogram.handlers import MessageHandler
4 |
5 | from bot import LOGGER, bot
6 | from bot.helper.ext_utils.bot_utils import (is_gdrive_link, new_task,
7 | sync_to_async)
8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
9 | from bot.helper.telegram_helper.bot_commands import BotCommands
10 | from bot.helper.telegram_helper.filters import CustomFilters
11 | from bot.helper.telegram_helper.message_utils import (auto_delete_message,
12 | sendMessage)
13 |
14 |
15 | @new_task
16 | async def deletefile(_, message):
17 | args = message.text.split()
18 | if len(args) > 1:
19 | link = args[1]
20 | elif reply_to := message.reply_to_message:
21 | link = reply_to.text.split(maxsplit=1)[0].strip()
22 | else:
23 | link = ''
24 | if is_gdrive_link(link):
25 | LOGGER.info(link)
26 | drive = GoogleDriveHelper()
27 | msg = await sync_to_async(drive.deletefile, link)
28 | else:
29 | msg = 'Send Gdrive link along with command or by replying to the link by command'
30 | reply_message = await sendMessage(message, msg)
31 | await auto_delete_message(message, reply_message)
32 |
33 |
34 | bot.add_handler(MessageHandler(deletefile, filters=command(
35 | BotCommands.DeleteCommand) & CustomFilters.authorized))
36 |
--------------------------------------------------------------------------------
/bot/modules/gd_list.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from pyrogram.filters import command, regex
5 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
6 |
7 | from bot import LOGGER, bot
8 | from bot.helper.ext_utils.bot_utils import (checking_access, get_readable_time,
9 | get_telegraph_list, new_task,
10 | sync_to_async)
11 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
12 | from bot.helper.telegram_helper.bot_commands import BotCommands
13 | from bot.helper.telegram_helper.button_build import ButtonMaker
14 | from bot.helper.telegram_helper.filters import CustomFilters
15 | from bot.helper.telegram_helper.message_utils import (anno_checker,
16 | editMessage, isAdmin,
17 | request_limiter,
18 | sendMessage)
19 |
20 |
21 | async def list_buttons(user_id, isRecursive=True):
22 | buttons = ButtonMaker()
23 | buttons.ibutton("Folders", f"list_types {user_id} folders {isRecursive}")
24 | buttons.ibutton("Files", f"list_types {user_id} files {isRecursive}")
25 | buttons.ibutton("Both", f"list_types {user_id} both {isRecursive}")
26 | buttons.ibutton(f"Recursive: {isRecursive}",
27 | f"list_types {user_id} rec {isRecursive}")
28 | buttons.ibutton("Cancel", f"list_types {user_id} cancel")
29 | return buttons.build_menu(2)
30 |
31 |
32 | async def _list_drive(key, message, item_type, isRecursive):
33 | LOGGER.info(f"listing: {key}")
34 | start_time = time()
35 | gdrive = GoogleDriveHelper()
36 | telegraph_content, contents_no = await sync_to_async(gdrive.drive_list, key, isRecursive=isRecursive, itemType=item_type)
37 | Elapsed = get_readable_time(time() - start_time)
38 | if telegraph_content:
39 | try:
40 | button = await get_telegraph_list(telegraph_content)
41 | except Exception as e:
42 | await editMessage(message, e)
43 | return
44 | msg = f'Found {contents_no} result for {key}\n\nType: {item_type} | Recursive list: {isRecursive}\nElapsed: {Elapsed}'
45 | await editMessage(message, msg, button)
46 | else:
47 | msg = f'No result found for {key}\n\nType: {item_type} | Recursive list: {isRecursive}\nElapsed: {Elapsed}'
48 | await editMessage(message, msg)
49 |
50 |
51 | @new_task
52 | async def select_type(_, query):
53 | user_id = query.from_user.id
54 | message = query.message
55 | key = message.reply_to_message.text.split(maxsplit=1)[1].strip()
56 | data = query.data.split()
57 | if user_id != int(data[1]):
58 | return await query.answer(text="Not Yours!", show_alert=True)
59 | elif data[2] == 'rec':
60 | await query.answer()
61 | isRecursive = not bool(eval(data[3]))
62 | buttons = await list_buttons(user_id, isRecursive)
63 | return await editMessage(message, 'Choose list options:', buttons)
64 | elif data[2] == 'cancel':
65 | await query.answer()
66 | return await editMessage(message, "list has been canceled!")
67 | await query.answer()
68 | item_type = data[2]
69 | isRecursive = eval(data[3])
70 | await editMessage(message, f"Searching for {key}")
71 | await _list_drive(key, message, item_type, isRecursive)
72 |
73 |
74 | async def drive_list(_, message):
75 | if len(message.text.split()) == 1:
76 | return await sendMessage(message, 'Send a search key along with command')
77 | if not message.from_user:
78 | message.from_user = await anno_checker(message)
79 | if not message.from_user:
80 | return
81 | user_id = message.from_user.id
82 | if not await isAdmin(message, user_id):
83 | if await request_limiter(message):
84 | return
85 | if message.chat.type != message.chat.type.PRIVATE:
86 | msg, btn = checking_access(user_id)
87 | if msg is not None:
88 | await sendMessage(message, msg, btn.build_menu(1))
89 | return
90 | buttons = await list_buttons(user_id)
91 | await sendMessage(message, 'Choose list options:', buttons)
92 |
93 | bot.add_handler(MessageHandler(drive_list, filters=command(
94 | BotCommands.ListCommand) & CustomFilters.authorized))
95 | bot.add_handler(CallbackQueryHandler(
96 | select_type, filters=regex("^list_types")))
97 |
--------------------------------------------------------------------------------
/bot/modules/leech_del.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from asyncio import sleep
3 |
4 | from pyrogram.filters import command
5 | from pyrogram.handlers import MessageHandler
6 |
7 | from bot import bot
8 | from bot.helper.ext_utils.bot_utils import new_task
9 | from bot.helper.telegram_helper.bot_commands import BotCommands
10 | from bot.helper.telegram_helper.filters import CustomFilters
11 | from bot.helper.telegram_helper.message_utils import editMessage, sendMessage
12 |
13 | delete = set()
14 |
15 | @new_task
16 | async def delete_leech(client, message):
17 | args = message.text.split()
18 | if len(args) > 1:
19 | link = args[1]
20 | elif reply_to := message.reply_to_message:
21 | link = reply_to.text.strip()
22 | else:
23 | link = ''
24 | if not link.startswith('https://t.me/'):
25 | msg = 'Send telegram message link along with command or by replying to the link by command'
26 | return await sendMessage(message, msg)
27 | msg = f'Okay deleting all replies with {link}'
28 | link = link.split('/')
29 | message_id = int(link[-1])
30 | if message_id in delete:
31 | msg = 'Already deleting in progress'
32 | return await sendMessage(message, msg)
33 | chat_id = link[-2]
34 | if chat_id.isdigit():
35 | chat_id = f'-100{chat_id}'
36 | chat_id = int(chat_id)
37 | reply_message = await sendMessage(message, msg)
38 | await deleting(client, chat_id, message_id, reply_message)
39 |
40 | @new_task
41 | async def deleting(client, chat_id, message_id, message):
42 | delete.add(message_id)
43 | try:
44 | msg = await client.get_messages(chat_id, message_id, replies=-1)
45 | replies_ids = []
46 | while msg:
47 | await sleep(0)
48 | replies_ids.append(msg.id)
49 | if msg.media_group_id:
50 | media_group = await msg.get_media_group()
51 | media_ids = []
52 | for media in media_group:
53 | media_ids.append(media.id)
54 | msg = media.reply_to_message
55 | if not msg:
56 | msg = await client.get_messages(chat_id, media.reply_to_message_id, replies=-1)
57 | replies_ids.extend(media_ids)
58 | else:
59 | msg = msg.reply_to_message
60 | replies_ids = list(set(replies_ids))
61 | total_ids = len(replies_ids)
62 | replies_ids = [replies_ids[i * 100:(i + 1) * 100] for i in range((total_ids + 100 - 1) // 100 )]
63 | deleted = 0
64 | for each100 in replies_ids:
65 | deleted += await client.delete_messages(chat_id, each100)
66 | if len(each100) > 100:
67 | await sleep(1)
68 | await editMessage(message, f'{deleted}/{total_ids} message deleted')
69 | except Exception as e:
70 | await editMessage(message, str(e))
71 | delete.remove(message_id)
72 |
73 | bot.add_handler(MessageHandler(delete_leech, filters=command(f'leech{BotCommands.DeleteCommand}') & CustomFilters.sudo))
--------------------------------------------------------------------------------
/bot/modules/rmdb.py:
--------------------------------------------------------------------------------
1 | from pyrogram.filters import command
2 | from pyrogram.handlers import MessageHandler
3 |
4 | from bot import DATABASE_URL, bot, config_dict
5 | from bot.helper.ext_utils.bot_utils import is_magnet, is_url, new_task
6 | from bot.helper.ext_utils.db_handler import DbManger
7 | from bot.helper.jmdkh_utils import extract_link
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.message_utils import sendMessage
11 |
12 |
13 | @new_task
14 | async def rmdbNode(_, message):
15 | if DATABASE_URL and not config_dict['STOP_DUPLICATE_TASKS']:
16 | return await sendMessage(message, 'STOP_DUPLICATE_TASKS feature is not enabled')
17 | mesg = message.text.split('\n')
18 | message_args = mesg[0].split(' ', maxsplit=1)
19 | file = None
20 | shouldDel = False
21 | try:
22 | link = message_args[1]
23 | except IndexError:
24 | link = ''
25 | if reply_to := message.reply_to_message:
26 | media_array = [reply_to.document, reply_to.photo, reply_to.video, reply_to.audio, reply_to.voice, reply_to.video_note, reply_to.sticker, reply_to.animation]
27 | file = next((i for i in media_array if i), None)
28 | if not is_url(link) and not is_magnet(link) and not link:
29 | if not file:
30 | if is_url(reply_to.text) or is_magnet(reply_to.text):
31 | link = reply_to.text.strip()
32 | else:
33 | mesg = message.text.split('\n')
34 | message_args = mesg[0].split(' ', maxsplit=1)
35 | try:
36 | link = message_args[1]
37 | except IndexError:
38 | pass
39 | elif file.mime_type == 'application/x-bittorrent':
40 | link = await reply_to.download()
41 | shouldDel = True
42 | else:
43 | link = file.file_unique_id
44 | if not link:
45 | msg = 'Something went wrong!!'
46 | return await sendMessage(message, msg)
47 | raw_url = await extract_link(link, shouldDel)
48 | if exist := await DbManger().check_download(raw_url):
49 | await DbManger().remove_download(exist['_id'])
50 | msg = 'Download is removed from database successfully'
51 | msg += f'\n{exist["tag"]} Your download is removed.'
52 | else:
53 | msg = 'This download is not exists in database'
54 | return await sendMessage(message, msg)
55 |
56 |
57 | if DATABASE_URL:
58 | bot.add_handler(MessageHandler(rmdbNode, filters=command(BotCommands.RmdbCommand) & CustomFilters.authorized))
59 |
--------------------------------------------------------------------------------
/bot/modules/save_message.py:
--------------------------------------------------------------------------------
1 | from pyrogram.errors import UserIsBlocked
2 | from pyrogram.filters import regex
3 | from pyrogram.handlers import CallbackQueryHandler
4 |
5 | from bot import LOGGER, bot
6 | from bot.helper.ext_utils.bot_utils import new_task
7 | from bot.helper.telegram_helper.button_build import ButtonMaker
8 | from bot.helper.telegram_helper.filters import CustomFilters
9 | from bot.helper.telegram_helper.message_utils import request_limiter
10 |
11 |
12 | @new_task
13 | async def save_message(client, query):
14 | if not await CustomFilters.sudo(client, query) and await request_limiter(query=query):
15 | return
16 | try:
17 | button = ButtonMaker()
18 | button_exist = False
19 | for _markup in query.message.reply_markup.inline_keyboard:
20 | if isinstance(_markup, list):
21 | for another_markup in _markup:
22 | if isinstance(another_markup, list):
23 | for one_more_markup in another_markup:
24 | if one_more_markup and not one_more_markup.callback_data:
25 | button_exist = True
26 | button.ubutton(one_more_markup.text, one_more_markup.url)
27 | elif another_markup and not another_markup.callback_data:
28 | button_exist = True
29 | button.ubutton(another_markup.text, another_markup.url)
30 | elif _markup and not _markup.callback_data:
31 | button_exist = True
32 | button.ubutton(_markup.text, _markup.url)
33 | reply_markup = button.build_menu(2) if button_exist else None
34 | await query.message.copy(query.from_user.id, reply_markup=reply_markup, disable_notification=False)
35 | await query.answer('Saved Successfully', show_alert=True)
36 | except UserIsBlocked:
37 | await query.answer(f'Start @{client.me.username} in private and try again', show_alert=True)
38 | except Exception as e:
39 | LOGGER.error(e)
40 | await query.answer("Something went wrong!", show_alert=True)
41 |
42 |
43 | bot.add_handler(CallbackQueryHandler(save_message, filters=regex("^save")))
--------------------------------------------------------------------------------
/bot/modules/shell.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from io import BytesIO
3 |
4 | from pyrogram.filters import command
5 | from pyrogram.handlers import EditedMessageHandler, MessageHandler
6 |
7 | from bot import LOGGER, bot
8 | from bot.helper.ext_utils.bot_utils import cmd_exec, new_task
9 | from bot.helper.telegram_helper.bot_commands import BotCommands
10 | from bot.helper.telegram_helper.filters import CustomFilters
11 | from bot.helper.telegram_helper.message_utils import sendFile, sendMessage
12 |
13 |
14 | @new_task
15 | async def shell(_, message):
16 | cmd = message.text.split(maxsplit=1)
17 | if len(cmd) == 1:
18 | await sendMessage(message, 'No command to execute was given.')
19 | return
20 | cmd = cmd[1]
21 | stdout, stderr, _ = await cmd_exec(cmd, shell=True)
22 | reply = ''
23 | if len(stdout) != 0:
24 | reply += f"*Stdout*\n{stdout}
\n"
25 | LOGGER.info(f"Shell - {cmd} - {stdout}")
26 | if len(stderr) != 0:
27 | reply += f"*Stderr*\n{stderr}
"
28 | LOGGER.error(f"Shell - {cmd} - {stderr}")
29 | if len(reply) > 3000:
30 | with BytesIO(str.encode(reply)) as out_file:
31 | out_file.name = "shell_output.txt"
32 | await sendFile(message, out_file)
33 | elif len(reply) != 0:
34 | await sendMessage(message, reply)
35 | else:
36 | await sendMessage(message, 'No Reply')
37 |
38 |
39 | bot.add_handler(MessageHandler(shell, filters=command(
40 | BotCommands.ShellCommand) & CustomFilters.owner))
41 | bot.add_handler(EditedMessageHandler(shell, filters=command(
42 | BotCommands.ShellCommand) & CustomFilters.owner))
43 |
--------------------------------------------------------------------------------
/bot/modules/status.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from time import time
3 |
4 | from psutil import cpu_percent, disk_usage, virtual_memory
5 | from pyrogram.filters import command, regex
6 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
7 |
8 | from bot import (Interval, bot, botStartTime, config_dict, download_dict,
9 | download_dict_lock, status_reply_dict_lock)
10 | from bot.helper.ext_utils.bot_utils import (get_readable_file_size,
11 | get_readable_time, new_task,
12 | setInterval, turn_page)
13 | from bot.helper.telegram_helper.bot_commands import BotCommands
14 | from bot.helper.telegram_helper.filters import CustomFilters
15 | from bot.helper.telegram_helper.message_utils import (auto_delete_message,
16 | deleteMessage, isAdmin,
17 | request_limiter,
18 | sendMessage,
19 | sendStatusMessage,
20 | update_all_messages)
21 |
22 |
23 | @new_task
24 | async def mirror_status(_, message):
25 | async with download_dict_lock:
26 | count = len(download_dict)
27 | if count == 0:
28 | currentTime = get_readable_time(time() - botStartTime)
29 | free = get_readable_file_size(
30 | disk_usage(config_dict['DOWNLOAD_DIR']).free)
31 | msg = 'No Active Downloads !\n___________________________'
32 | msg += f"\nCPU: {cpu_percent()}% | FREE: {free}" \
33 | f"\nRAM: {virtual_memory().percent}% | UPTIME: {currentTime}"
34 | reply_message = await sendMessage(message, msg)
35 | await auto_delete_message(message, reply_message)
36 | else:
37 | await sendStatusMessage(message)
38 | await deleteMessage(message)
39 | async with status_reply_dict_lock:
40 | if Interval:
41 | Interval[0].cancel()
42 | Interval.clear()
43 | Interval.append(setInterval(
44 | config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages))
45 |
46 |
47 | @new_task
48 | async def status_pages(_, query):
49 | if not await isAdmin(query.message, query.from_user.id) and await request_limiter(query=query):
50 | return
51 | await query.answer()
52 | data = query.data.split()
53 | if data[1] == "ref":
54 | await update_all_messages(True)
55 | else:
56 | await turn_page(data)
57 |
58 |
59 | bot.add_handler(MessageHandler(mirror_status, filters=command(
60 | BotCommands.StatusCommand) & CustomFilters.authorized))
61 | bot.add_handler(CallbackQueryHandler(status_pages, filters=regex("^status")))
62 |
--------------------------------------------------------------------------------
/bot/modules/torrent_select.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from aiofiles.os import path as aiopath
3 | from aiofiles.os import remove as aioremove
4 | from pyrogram.filters import command, regex
5 | from pyrogram.handlers import CallbackQueryHandler, MessageHandler
6 |
7 | from bot import LOGGER, aria2, bot, download_dict, download_dict_lock
8 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, bt_selection_buttons,
9 | getDownloadByGid, sync_to_async)
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.telegram_helper.filters import CustomFilters
12 | from bot.helper.telegram_helper.message_utils import (anno_checker, isAdmin,
13 | request_limiter,
14 | sendMessage,
15 | sendStatusMessage)
16 |
17 |
18 | async def select(client, message):
19 | if not message.from_user:
20 | message.from_user = await anno_checker(message)
21 | if not message.from_user:
22 | return
23 | user_id = message.from_user.id
24 | if not await isAdmin(message, user_id) and await request_limiter(message):
25 | return
26 | msg = message.text.split()
27 | if len(msg) > 1:
28 | gid = msg[1]
29 | dl = await getDownloadByGid(gid)
30 | if dl is None:
31 | await sendMessage(message, f"GID: {gid}
Not Found.")
32 | return
33 | elif reply_to_id := message.reply_to_message_id:
34 | async with download_dict_lock:
35 | dl = download_dict.get(reply_to_id, None)
36 | if dl is None:
37 | await sendMessage(message, "This is not an active task!")
38 | return
39 | elif len(msg) == 1:
40 | msg = ("Reply to an active /{cmd} which was used to start the qb-download or add gid along with cmd\n\n"
41 | + "This command mainly for selection incase you decided to select files from already added torrent. "
42 | + "But you can always use /{mir} with arg `s` to select files before download start."
43 | .format_map({'cmd': BotCommands.BtSelectCommand, 'mir': BotCommands.MirrorCommand[0]}))
44 | await sendMessage(message, msg)
45 | return
46 |
47 | if not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id:
48 | await sendMessage(message, "This task is not for you!")
49 | return
50 | if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]:
51 | await sendMessage(message, 'Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!')
52 | return
53 | if dl.name().startswith('[METADATA]'):
54 | await sendMessage(message, 'Try after downloading metadata finished!')
55 | return
56 |
57 | try:
58 | listener = dl.listener()
59 | if listener.isQbit:
60 | id_ = dl.hash()
61 | client = dl.client()
62 | if not dl.queued:
63 | await sync_to_async(client.torrents_pause, torrent_hashes=id_)
64 | else:
65 | id_ = dl.gid()
66 | if not dl.queued:
67 | try:
68 | await sync_to_async(aria2.client.force_pause, id_)
69 | except Exception as e:
70 | LOGGER.error(
71 | f"{e} Error in pause, this mostly happens after abuse aria2")
72 | listener.select = True
73 | except:
74 | await sendMessage(message, "This is not a bittorrent task!")
75 | return
76 |
77 | SBUTTONS = bt_selection_buttons(id_, False)
78 | msg = f"Name: {dl.name()}
\n\nYour download paused. Choose files then press Done Selecting button to resume downloading." \
79 | "\nYour download will not start automatically"
80 | await sendMessage(message, msg, SBUTTONS)
81 |
82 |
83 | async def get_confirm(client, query):
84 | user_id = query.from_user.id
85 | data = query.data.split()
86 | message = query.message
87 | dl = await getDownloadByGid(data[2])
88 | if dl is None:
89 | await query.answer("This task has been cancelled!", show_alert=True)
90 | await message.delete()
91 | return
92 | if hasattr(dl, 'listener'):
93 | listener = dl.listener()
94 | else:
95 | await query.answer("Not in download state anymore! Keep this message to resume the seed if seed enabled!", show_alert=True)
96 | return
97 | if user_id != listener.message.from_user.id and not await CustomFilters.sudo(client, query):
98 | await query.answer("This task is not for you!", show_alert=True)
99 | elif data[1] == "pin":
100 | await query.answer(data[3], show_alert=True)
101 | elif data[1] == "done":
102 | await query.answer()
103 | id_ = data[3]
104 | if len(id_) > 20:
105 | client = dl.client()
106 | tor_info = (await sync_to_async(client.torrents_info, torrent_hash=id_))[0]
107 | path = tor_info.content_path.rsplit('/', 1)[0]
108 | res = await sync_to_async(client.torrents_files, torrent_hash=id_)
109 | for f in res:
110 | if f.priority == 0:
111 | f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"]
112 | for f_path in f_paths:
113 | if await aiopath.exists(f_path):
114 | try:
115 | await aioremove(f_path)
116 | except:
117 | pass
118 | if not dl.queued:
119 | await sync_to_async(client.torrents_resume, torrent_hashes=id_)
120 | else:
121 | res = await sync_to_async(aria2.client.get_files, id_)
122 | for f in res:
123 | if f['selected'] == 'false' and await aiopath.exists(f['path']):
124 | try:
125 | await aioremove(f['path'])
126 | except:
127 | pass
128 | if not dl.queued:
129 | try:
130 | await sync_to_async(aria2.client.unpause, id_)
131 | except Exception as e:
132 | LOGGER.error(
133 | f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!")
134 | await sendStatusMessage(message)
135 | await message.delete()
136 | elif data[1] == "rm":
137 | await query.answer()
138 | obj = dl.download()
139 | await obj.cancel_download()
140 | await message.delete()
141 |
142 |
143 | bot.add_handler(MessageHandler(select, filters=command(
144 | BotCommands.BtSelectCommand) & CustomFilters.authorized))
145 | bot.add_handler(CallbackQueryHandler(get_confirm, filters=regex("^btsel")))
--------------------------------------------------------------------------------
/config_sample.env:
--------------------------------------------------------------------------------
1 | # Remove this line before deploying
2 | _____REMOVE_THIS_LINE_____=True
3 |
4 | # REQUIRED CONFIG
5 | BOT_TOKEN = "" # Require restart after changing it while bot running
6 | OWNER_ID = "" # Require restart after changing it while bot running
7 | TELEGRAM_API = "" # Require restart after changing it while bot running
8 | TELEGRAM_HASH = "" # Require restart after changing it while bot running
9 |
10 | # OPTIONAL CONFIG
11 | USER_SESSION_STRING = "" # Require restart after changing it while bot running
12 | DATABASE_URL = "" # Require restart after changing it while bot running
13 | DOWNLOAD_DIR = "/usr/src/app/downloads/" # Require restart after changing it while bot running
14 | CMD_SUFFIX = "" # Require restart after changing it while bot running
15 | AUTHORIZED_CHATS = "" # Require restart after changing it while bot running
16 | SUDO_USERS = "" # Require restart after changing it while bot running
17 | STATUS_LIMIT = "8"
18 | DEFAULT_UPLOAD = "gd"
19 | STATUS_UPDATE_INTERVAL = "10"
20 | AUTO_DELETE_MESSAGE_DURATION = "30"
21 | UPTOBOX_TOKEN = ""
22 | EXTENSION_FILTER = ""
23 | INCOMPLETE_TASK_NOTIFIER = "False"
24 | YT_DLP_OPTIONS = ""
25 |
26 | # GDrirve Tools
27 | GDRIVE_ID = ""
28 | IS_TEAM_DRIVE = "False"
29 | USE_SERVICE_ACCOUNTS = "False"
30 | STOP_DUPLICATE = "False"
31 | INDEX_URL = ""
32 |
33 | # Rclone
34 | RCLONE_PATH = ""
35 | RCLONE_FLAGS = ""
36 | RCLONE_SERVE_URL = ""
37 | RCLONE_SERVE_PORT = ""
38 | RCLONE_SERVE_USER = ""
39 | RCLONE_SERVE_PASS = ""
40 |
41 | # Update
42 | UPSTREAM_REPO = ""
43 | UPSTREAM_BRANCH = ""
44 |
45 | # Leech
46 | LEECH_SPLIT_SIZE = ""
47 | AS_DOCUMENT = "False"
48 | EQUAL_SPLITS = "False"
49 | MEDIA_GROUP = "False"
50 | LEECH_FILENAME_PREFIX = ""
51 | DUMP_CHAT_ID = ""
52 |
53 | # qBittorrent/Aria2c
54 | TORRENT_TIMEOUT = ""
55 | BASE_URL = ""
56 | BASE_URL_PORT = ""
57 | WEB_PINCODE = "False"
58 |
59 | #Queueing system
60 | QUEUE_ALL = ""
61 | QUEUE_DOWNLOAD = ""
62 | QUEUE_UPLOAD = ""
63 |
64 | # RSS
65 | RSS_DELAY = "900"
66 | RSS_CHAT_ID = ""
67 |
68 | # Mega
69 | MEGA_EMAIL = ""
70 | MEGA_PASSWORD = ""
71 |
72 | # Torrent Search
73 | SEARCH_API_LINK = ""
74 | SEARCH_LIMIT = "0"
75 | SEARCH_PLUGINS = '["https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py",
76 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py",
77 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py",
78 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py",
79 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py",
80 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentproject.py",
81 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py",
82 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py",
83 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py",
84 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py",
85 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py",
86 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py",
87 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py",
88 | "https://raw.githubusercontent.com/v1k45/1337x-qBittorrent-search-plugin/master/leetx.py",
89 | "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py",
90 | "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py",
91 | "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py"]'
92 |
93 | #Limits
94 | STORAGE_THRESHOLD = ""
95 | TORRENT_LIMIT = ""
96 | DIRECT_LIMIT = ""
97 | YTDLP_LIMIT = ""
98 | GDRIVE_LIMIT = ""
99 | MEGA_LIMIT = ""
100 | LEECH_LIMIT = ""
101 | CLONE_LIMIT = ""
102 |
103 | #Group Features
104 | FSUB_IDS = ""
105 | USER_MAX_TASKS = ""
106 | REQUEST_LIMITS = ""
107 | ENABLE_MESSAGE_FILTER = "False"
108 | STOP_DUPLICATE_TASKS = "False"
109 | TOKEN_TIMEOUT = ""
110 |
111 | #Extra
112 | SET_COMMANDS = "False"
113 | DISABLE_LEECH = "False"
114 | DM_MODE = ""
115 | DELETE_LINKS = "False"
116 | LOG_CHAT_ID = ""
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 |
3 | services:
4 | app:
5 | container_name: jmdkh-mltb
6 | build:
7 | context: .
8 | dockerfile: Dockerfile
9 | command: bash start.sh
10 | restart: on-failure
11 | ports:
12 | - "80:80"
13 | - "8080:8080"
--------------------------------------------------------------------------------
/driveid.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | print("\n\n"
4 | " Bot can search files recursively, but you have to add the list of drives you want to search.\n"
5 | " Use the following format: (You can use 'root' in the ID in case you wan to use main drive.)\n"
6 | " teamdrive NAME --> anything that you likes\n"
7 | " teamdrive ID --> id of teamdrives in which you likes to search ('root' for main drive)\n"
8 | " teamdrive INDEX URL --> enter index url for this drive.\n"
9 | " go to the respective drive and copy the url from address bar\n")
10 | msg = ''
11 | if os.path.exists('drive_folder'):
12 | with open('drive_folder', 'r+') as f:
13 | lines = f.read()
14 | if not re.match(r'^\s*$', lines):
15 | print(lines)
16 | print("\n\n"
17 | " DO YOU WISH TO KEEP THE ABOVE DETAILS THAT YOU PREVIOUSLY ADDED???? ENTER (y/n)\n"
18 | " IF NOTHING SHOWS ENTER n")
19 | while 1:
20 | choice = input()
21 | if choice in ['y', 'Y']:
22 | msg = f'{lines}'
23 | break
24 | elif choice in ['n', 'N']:
25 | break
26 | else:
27 | print(
28 | "\n\n DO YOU WISH TO KEEP THE ABOVE DETAILS ???? y/n <=== this is option ..... OPEN YOUR EYES & READ...")
29 | num = int(input(" How Many Drive/Folder You Likes To Add : "))
30 | for count in range(1, num + 1):
31 | print(f"\n > DRIVE - {count}\n")
32 | name = input(" Enter Drive NAME (anything) : ")
33 | id = input(" Enter Drive ID : ")
34 | index = input(" Enter Drive INDEX URL (optional) : ")
35 | if not name or not id:
36 | print("\n\n ERROR : Dont leave the name/id without filling.")
37 | exit(1)
38 | name = name.replace(" ", "_")
39 | if index:
40 | if index[-1] == "/":
41 | index = index[:-1]
42 | else:
43 | index = ''
44 | msg += f"{name} {id} {index}\n"
45 | with open('drive_folder', 'w') as file:
46 | file.truncate(0)
47 | file.write(msg)
48 | print("\n\n Done!")
49 |
--------------------------------------------------------------------------------
/generate_drive_token.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import os
3 | from google_auth_oauthlib.flow import InstalledAppFlow
4 | from google.auth.transport.requests import Request
5 |
6 | credentials = None
7 | __G_DRIVE_TOKEN_FILE = "token.pickle"
8 | __OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"]
9 | if os.path.exists(__G_DRIVE_TOKEN_FILE):
10 | with open(__G_DRIVE_TOKEN_FILE, 'rb') as f:
11 | credentials = pickle.load(f)
12 | if (
13 | (credentials is None or not credentials.valid)
14 | and credentials
15 | and credentials.expired
16 | and credentials.refresh_token
17 | ):
18 | credentials.refresh(Request())
19 | else:
20 | flow = InstalledAppFlow.from_client_secrets_file(
21 | 'credentials.json', __OAUTH_SCOPE)
22 | credentials = flow.run_local_server(port=0, open_browser=False)
23 |
24 | # Save the credentials for the next run
25 | with open(__G_DRIVE_TOKEN_FILE, 'wb') as token:
26 | pickle.dump(credentials, token)
27 |
--------------------------------------------------------------------------------
/generate_string_session.py:
--------------------------------------------------------------------------------
1 | from pyrogram import Client
2 |
3 | print('Required pyrogram V2 or greater.')
4 | API_KEY = int(input("Enter API KEY: "))
5 | API_HASH = input("Enter API HASH: ")
6 | with Client(name='USS', api_id=API_KEY, api_hash=API_HASH, in_memory=True) as app:
7 | print(app.export_session_string())
8 |
--------------------------------------------------------------------------------
/qBittorrent/config/qBittorrent.conf:
--------------------------------------------------------------------------------
1 | [Application]
2 | MemoryWorkingSetLimit=512
3 |
4 | [BitTorrent]
5 | Session\AddExtensionToIncompleteFiles=true
6 | Session\AddTrackersEnabled=false
7 | Session\AnnounceToAllTrackers=true
8 | Session\AnonymousModeEnabled=false
9 | Session\AsyncIOThreadsCount=16
10 | Session\ConnectionSpeed=-1
11 | Session\DHTEnabled=true
12 | Session\DiskCacheSize=-1
13 | Session\GlobalDLSpeedLimit=0
14 | Session\GlobalMaxRatio=-1
15 | Session\GlobalMaxSeedingMinutes=-1
16 | Session\GlobalUPSpeedLimit=0
17 | Session\HashingThreadsCount=1
18 | Session\IgnoreSlowTorrentsForQueueing=true
19 | Session\IncludeOverheadInLimits=false
20 | Session\LSDEnabled=true
21 | Session\MaxActiveCheckingTorrents=3
22 | Session\MaxActiveDownloads=100
23 | Session\MaxActiveTorrents=50
24 | Session\MaxActiveUploads=50
25 | Session\MaxConnections=-1
26 | Session\MaxConnectionsPerTorrent=-1
27 | Session\MaxRatioAction=0
28 | Session\MaxUploads=-1
29 | Session\MaxUploadsPerTorrent=-1
30 | Session\MultiConnectionsPerIp=true
31 | Session\PexEnabled=true
32 | Session\PerformanceWarning=true
33 | Session\Preallocation=true
34 | Session\QueueingSystemEnabled=false
35 | Session\SlowTorrentsDownloadRate=2
36 | Session\SlowTorrentsInactivityTimer=600
37 | Session\SlowTorrentsUploadRate=2
38 | Session\StopTrackerTimeout=5
39 | TrackerEnabled=true
40 |
41 | [LegalNotice]
42 | Accepted=true
43 |
44 | [Meta]
45 | MigrationVersion=4
46 |
47 | [Preferences]
48 | Advanced\DisableRecursiveDownload=false
49 | Advanced\RecheckOnCompletion=false
50 | Advanced\trackerPortForwarding=true
51 | General\PreventFromSuspendWhenDownloading=true
52 | General\PreventFromSuspendWhenSeeding=true
53 | Search\SearchEnabled=true
54 | WebUI\BanDuration=3600
55 | WebUI\CSRFProtection=false
56 | WebUI\ClickjackingProtection=false
57 | WebUI\Enabled=true
58 | WebUI\HTTPS\Enabled=false
59 | WebUI\HostHeaderValidation=false
60 | WebUI\LocalHostAuth=false
61 | WebUI\MaxAuthenticationFailCount=10
62 | WebUI\Port=8090
63 | WebUI\SecureCookie=false
64 | WebUI\SessionTimeout=3600
65 | WebUI\UseUPnP=false
66 |
--------------------------------------------------------------------------------
/requirements-cli.txt:
--------------------------------------------------------------------------------
1 | oauth2client
2 | google-api-python-client
3 | progress
4 | progressbar2
5 | httplib2shim
6 | google_auth_oauthlib
7 | pyrogram>=2
8 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp
2 | aiofiles
3 | aioshutil
4 | anytree
5 | apscheduler
6 | aria2p
7 | asyncio
8 | beautifulsoup4
9 | bencoding
10 | cloudscraper
11 | dnspython
12 | feedparser
13 | flask
14 | gevent
15 | google-api-python-client
16 | google-auth-httplib2
17 | google-auth-oauthlib
18 | gunicorn
19 | git+https://github.com/zevtyardt/lk21.git
20 | httpx
21 | lxml
22 | motor
23 | mutagen
24 | natsort
25 | pillow
26 | psutil
27 | pybase64
28 | pymongo
29 | pyrogram
30 | python-dotenv
31 | python-magic
32 | qbittorrent-api
33 | requests
34 | telegraph
35 | tenacity
36 | tgcrypto
37 | uvloop
38 | xattr
39 | yt-dlp
--------------------------------------------------------------------------------
/start.sh:
--------------------------------------------------------------------------------
1 | python3 update.py && python3 -m bot
--------------------------------------------------------------------------------
/update.py:
--------------------------------------------------------------------------------
1 | from logging import FileHandler, StreamHandler, INFO, basicConfig, error as log_error, info as log_info
2 | from os import path as ospath, environ
3 | from subprocess import run as srun
4 | from dotenv import load_dotenv
5 | from pymongo import MongoClient
6 |
7 | if ospath.exists('log.txt'):
8 | with open('log.txt', 'r+') as f:
9 | f.truncate(0)
10 |
11 | basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
12 | handlers=[FileHandler('log.txt'), StreamHandler()],
13 | level=INFO)
14 |
15 | load_dotenv('config.env', override=True)
16 |
17 | try:
18 | if bool(environ.get('_____REMOVE_THIS_LINE_____')):
19 | log_error('The README.md file there to be read! Exiting now!')
20 | exit()
21 | except:
22 | pass
23 |
24 | BOT_TOKEN = environ.get('BOT_TOKEN', '')
25 | if len(BOT_TOKEN) == 0:
26 | log_error("BOT_TOKEN variable is missing! Exiting now")
27 | exit(1)
28 |
29 | bot_id = BOT_TOKEN.split(':', 1)[0]
30 |
31 | DATABASE_URL = environ.get('DATABASE_URL', '')
32 | if len(DATABASE_URL) == 0:
33 | DATABASE_URL = None
34 |
35 | if DATABASE_URL:
36 | conn = MongoClient(DATABASE_URL)
37 | db = conn.mltb
38 | # retrun config dict (all env vars)
39 | if config_dict := db.settings.config.find_one({'_id': bot_id}):
40 | environ['UPSTREAM_REPO'] = config_dict['UPSTREAM_REPO']
41 | environ['UPSTREAM_BRANCH'] = config_dict['UPSTREAM_BRANCH']
42 | conn.close()
43 |
44 | UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '')
45 | if len(UPSTREAM_REPO) == 0:
46 | UPSTREAM_REPO = None
47 |
48 | UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '')
49 | if len(UPSTREAM_BRANCH) == 0:
50 | UPSTREAM_BRANCH = 'master'
51 |
52 | if UPSTREAM_REPO:
53 | if ospath.exists('.git'):
54 | srun(["rm", "-rf", ".git"])
55 |
56 | update = srun([f"git init -q \
57 | && git config --global user.email jmdkh007@gmail.com \
58 | && git config --global user.name jmdkh \
59 | && git add . \
60 | && git commit -sm update -q \
61 | && git remote add origin {UPSTREAM_REPO} \
62 | && git fetch origin -q \
63 | && git reset --hard origin/{UPSTREAM_BRANCH} -q"], shell=True)
64 |
65 | if update.returncode == 0:
66 | log_info('Successfully updated with latest commit from UPSTREAM_REPO')
67 | else:
68 | log_error(
69 | 'Something went wrong while updating, check UPSTREAM_REPO if valid or not!')
70 |
--------------------------------------------------------------------------------
/web/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/web/nodes.py:
--------------------------------------------------------------------------------
1 | from os import environ
2 | from re import findall
3 |
4 | from anytree import NodeMixin
5 |
6 | DOWNLOAD_DIR = environ.get('DOWNLOAD_DIR', '')
7 | if len(DOWNLOAD_DIR) == 0:
8 | DOWNLOAD_DIR = '/usr/src/app/downloads/'
9 | elif not DOWNLOAD_DIR.endswith("/"):
10 | DOWNLOAD_DIR = f'{DOWNLOAD_DIR}/'
11 |
12 |
13 | class TorNode(NodeMixin):
14 | def __init__(self, name, is_folder=False, is_file=False, parent=None, size=None, priority=None, file_id=None, progress=None):
15 | super().__init__()
16 | self.name = name
17 | self.is_folder = is_folder
18 | self.is_file = is_file
19 |
20 | if parent is not None:
21 | self.parent = parent
22 | if size is not None:
23 | self.size = size
24 | if priority is not None:
25 | self.priority = priority
26 | if file_id is not None:
27 | self.file_id = file_id
28 | if progress is not None:
29 | self.progress = progress
30 |
31 |
32 | def qb_get_folders(path):
33 | return path.split("/")
34 |
35 | def get_folders(path):
36 | fs = findall(f'{DOWNLOAD_DIR}[0-9]+/(.+)', path)[0]
37 | return fs.split('/')
38 |
39 | def make_tree(res, aria2=False):
40 | parent = TorNode("Torrent")
41 | if not aria2:
42 | for i in res:
43 | folders = qb_get_folders(i.name)
44 | if len(folders) > 1:
45 | previous_node = parent
46 | for j in range(len(folders)-1):
47 | current_node = next((k for k in previous_node.children if k.name == folders[j]), None)
48 | if current_node is None:
49 | previous_node = TorNode(folders[j], parent=previous_node, is_folder=True)
50 | else:
51 | previous_node = current_node
52 | TorNode(folders[-1], is_file=True, parent=previous_node, size=i.size, priority=i.priority, \
53 | file_id=i.id, progress=round(i.progress*100, 5))
54 | else:
55 | TorNode(folders[-1], is_file=True, parent=parent, size=i.size, priority=i.priority, \
56 | file_id=i.id, progress=round(i.progress*100, 5))
57 | else:
58 | for i in res:
59 | folders = get_folders(i['path'])
60 | priority = 1
61 | if i['selected'] == 'false':
62 | priority = 0
63 | if len(folders) > 1:
64 | previous_node = parent
65 | for j in range(len(folders)-1):
66 | current_node = next((k for k in previous_node.children if k.name == folders[j]), None)
67 | if current_node is None:
68 | previous_node = TorNode(folders[j], parent=previous_node, is_folder=True)
69 | else:
70 | previous_node = current_node
71 | TorNode(folders[-1], is_file=True, parent=previous_node, size=i['length'], priority=priority, \
72 | file_id=i['index'], progress=round((int(i['completedLength'])/int(i['length']))*100, 5))
73 | else:
74 | TorNode(folders[-1], is_file=True, parent=parent, size=i['length'], priority=priority, \
75 | file_id=i['index'], progress=round((int(i['completedLength'])/int(i['length']))*100, 5))
76 | return create_list(parent, ["", 0])
77 |
78 | """
79 | def print_tree(parent):
80 | for pre, _, node in RenderTree(parent):
81 | treestr = u"%s%s" % (pre, node.name)
82 | print(treestr.ljust(8), node.is_folder, node.is_file)
83 | """
84 |
85 | def create_list(par, msg):
86 | if par.name != ".unwanted":
87 | msg[0] += '