├── web
├── __init__.py
└── nodes.py
├── bot
├── helper
│ ├── __init__.py
│ ├── ext_utils
│ │ ├── __init__.py
│ │ ├── exceptions.py
│ │ ├── jmdkh_utils.py
│ │ ├── z_utils.py
│ │ ├── rate_limiter.py
│ │ ├── shortenurl.py
│ │ ├── shortener.py
│ │ ├── telegraph_helper.py
│ │ ├── queued_starter.py
│ │ ├── html_helper.py
│ │ ├── db_handler.py
│ │ └── fs_utils.py
│ ├── mirror_utils
│ │ ├── __init__.py
│ │ ├── status_utils
│ │ │ ├── __init__.py
│ │ │ ├── convert_status.py
│ │ │ ├── queue_status.py
│ │ │ ├── telegram_download_status.py
│ │ │ ├── clone_status.py
│ │ │ ├── tg_upload_status.py
│ │ │ ├── upload_status.py
│ │ │ ├── gd_download_status.py
│ │ │ ├── mega_download_status.py
│ │ │ ├── split_status.py
│ │ │ ├── zip_status.py
│ │ │ ├── extract_status.py
│ │ │ ├── yt_dlp_download_status.py
│ │ │ ├── qbit_download_status.py
│ │ │ └── aria_download_status.py
│ │ ├── upload_utils
│ │ │ └── __init__.py
│ │ └── download_utils
│ │ │ ├── __init__.py
│ │ │ ├── clonner.py
│ │ │ ├── gd_downloader.py
│ │ │ ├── direct_link_generator_license.md
│ │ │ ├── telegram_downloader.py
│ │ │ ├── mega_downloader.py
│ │ │ └── aria2_download.py
│ └── telegram_helper
│ │ ├── __init__.py
│ │ ├── filters.py
│ │ ├── button_build.py
│ │ ├── bot_commands.py
│ │ └── message_utils.py
└── modules
│ ├── __init__.py
│ ├── save_message.py
│ ├── anonymous.py
│ ├── shell.py
│ ├── list.py
│ ├── count.py
│ ├── rmdb.py
│ ├── mirror_status.py
│ ├── delete.py
│ ├── authorize.py
│ ├── eval.py
│ ├── drive_list.py
│ ├── leech_settings.py
│ ├── category_select.py
│ ├── bt_select.py
│ └── cancel_mirror.py
├── _config.yml
├── heroku.yml
├── captain-definition
├── requirements-cli.txt
├── README.md
├── start.sh
├── docker-compose.yml
├── .gitignore
├── railway.json
├── generate_string_session.py
├── Dockerfile
├── requirements.txt
├── aria.sh
├── generate_drive_token.py
├── qBittorrent
└── config
│ └── qBittorrent.conf
├── driveid.py
├── update.py
├── add_to_team_drive.py
└── config_sample.env
/web/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/helper/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/modules/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-time-machine
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/upload_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/heroku.yml:
--------------------------------------------------------------------------------
1 | build:
2 | docker:
3 | web: Dockerfile
4 | run:
5 | web: bash start.sh
6 |
--------------------------------------------------------------------------------
/captain-definition:
--------------------------------------------------------------------------------
1 | {
2 | "schemaVersion": 2,
3 | "dockerfilePath": "./Dockerfile"
4 | }
5 |
--------------------------------------------------------------------------------
/requirements-cli.txt:
--------------------------------------------------------------------------------
1 | oauth2client
2 | google-api-python-client
3 | progress
4 | progressbar2
5 | httplib2shim
6 | google_auth_oauthlib
7 | pyrogram>=2
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## How To Deploy
2 |
3 |
4 |
--------------------------------------------------------------------------------
/start.sh:
--------------------------------------------------------------------------------
1 | pip3 install aria2p
2 |
3 | pip3 install yt-dlp
4 |
5 | pip3 install speedtest-cli
6 |
7 | pip3 install qbittorrent-api
8 |
9 |
10 |
11 |
12 | python3 update.py && python3 -m bot
13 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 |
3 | services:
4 | app:
5 | container_name: z-mirror
6 | build:
7 | context: .
8 | dockerfile: Dockerfile
9 | command: bash start.sh
10 | restart: on-failure
11 | ports:
12 | - "80:80"
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | config.env
2 | *auth_token.txt
3 | *.pyc
4 | data*
5 | .vscode
6 | .idea
7 | *.json
8 | *.pickle
9 | .netrc
10 | log.txt
11 | accounts/*
12 | Thumbnails/*
13 | list_drives.txt
14 | cookies.txt
15 | downloads
16 | categories.txt
17 | shorteners.txt
18 | buttons.txt
19 | terabox.txt
--------------------------------------------------------------------------------
/railway.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://railway.app/railway.schema.json",
3 | "build": {
4 | "builder": "DOCKERFILE",
5 | "dockerfilePath": "Dockerfile"
6 | },
7 | "deploy": {
8 | "restartPolicyType": "ON_FAILURE",
9 | "restartPolicyMaxRetries": 10
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/exceptions.py:
--------------------------------------------------------------------------------
1 | class DirectDownloadLinkException(Exception):
2 | """Not method found for extracting direct download link from the http link"""
3 | pass
4 |
5 | class NotSupportedExtractionArchive(Exception):
6 | """The archive format use is trying to extract is not supported"""
7 | pass
8 |
--------------------------------------------------------------------------------
/generate_string_session.py:
--------------------------------------------------------------------------------
1 | from pyrogram import Client
2 |
3 | print('Required pyrogram V2 or greater.')
4 | API_KEY = int(input("Enter API KEY: "))
5 | API_HASH = input("Enter API HASH: ")
6 | with Client(name='USS', api_id=API_KEY, api_hash=API_HASH, in_memory=True) as app:
7 | print(app.export_session_string())
8 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM dawn001/z_mirror:latest
2 | # FROM dawn001/z_mirror:arm64
3 | # FROM dawn001/z_mirror:armv7
4 | # FROM dawn001/z_mirror:s390x
5 | # Select based on your device's arch. Default is amd64(latest)
6 |
7 | WORKDIR /usr/src/app
8 | RUN chmod 777 /usr/src/app
9 |
10 | COPY requirements.txt .
11 | RUN pip3 install --no-cache-dir -r requirements.txt
12 |
13 | COPY . .
14 |
15 | CMD ["bash", "start.sh"]
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | anytree
2 |
3 | asyncio
4 | beautifulsoup4
5 | bencoding
6 | cfscrape
7 | dnspython
8 | feedparser
9 | flask
10 | google-api-python-client
11 | google-auth-httplib2
12 | google-auth-oauthlib
13 | gunicorn
14 | git+https://github.com/zevtyardt/lk21.git
15 | lxml
16 | pillow
17 | playwright
18 | psutil
19 | pybase64
20 | pymongo
21 | pyrate_limiter
22 | Pyrogram==2.0.66
23 | python-dotenv
24 | python-magic
25 | python-telegram-bot==13.15
26 |
27 | requests
28 | telegraph
29 | tenacity
30 | tgCrypto
31 | xattr
32 |
--------------------------------------------------------------------------------
/bot/modules/save_message.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CallbackQueryHandler
2 |
3 | from bot import dispatcher
4 | from bot.helper.ext_utils.rate_limiter import ratelimiter
5 |
6 |
7 | @ratelimiter
8 | def save_message(update, context):
9 | query = update.callback_query
10 | if query.data == "save":
11 | try:
12 | del query.message.reply_markup['inline_keyboard'][-1]
13 | query.message.copy(query.from_user.id, reply_markup=query.message.reply_markup)
14 | query.answer('Message Saved Successfully', show_alert=True)
15 | except:
16 | query.answer('Start the bot in private and try again', show_alert=True)
17 |
18 |
19 | msgsave_handler = CallbackQueryHandler(save_message, pattern="save")
20 |
21 | dispatcher.add_handler(msgsave_handler)
22 |
--------------------------------------------------------------------------------
/aria.sh:
--------------------------------------------------------------------------------
1 | tracker_list=$(curl -Ns https://ngosang.github.io/trackerslist/trackers_all_http.txt | awk '$0' | tr '\n\n' ',')
2 | aria2c --allow-overwrite=true --auto-file-renaming=true --bt-enable-lpd=true --bt-detach-seed-only=true \
3 | --bt-remove-unselected-file=true --bt-tracker="[$tracker_list]" --check-certificate=false \
4 | --check-integrity=true --continue=true --content-disposition-default-utf8=true --daemon=true \
5 | --disk-cache=40M --enable-rpc=true --follow-torrent=mem --force-save=true --http-accept-gzip=true \
6 | --max-connection-per-server=10 --max-concurrent-downloads=10 --max-file-not-found=0 --max-tries=20 \
7 | --min-split-size=10M --optimize-concurrent-downloads=true --peer-id-prefix=-qB4390- --reuse-uri=true \
8 | --peer-agent=qBittorrent/4.3.9 --quiet=true --rpc-max-request-size=1024M --seed-ratio=0 --split=10 \
9 | --summary-interval=0 --user-agent=Wget/1.12
10 |
--------------------------------------------------------------------------------
/generate_drive_token.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import os
3 | from google_auth_oauthlib.flow import InstalledAppFlow
4 | from google.auth.transport.requests import Request
5 |
6 | credentials = None
7 | __G_DRIVE_TOKEN_FILE = "token.pickle"
8 | __OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"]
9 | if os.path.exists(__G_DRIVE_TOKEN_FILE):
10 | with open(__G_DRIVE_TOKEN_FILE, 'rb') as f:
11 | credentials = pickle.load(f)
12 | if (
13 | (credentials is None or not credentials.valid)
14 | and credentials
15 | and credentials.expired
16 | and credentials.refresh_token
17 | ):
18 | credentials.refresh(Request())
19 | else:
20 | flow = InstalledAppFlow.from_client_secrets_file(
21 | 'credentials.json', __OAUTH_SCOPE)
22 | credentials = flow.run_local_server(port=0, open_browser=False)
23 |
24 | # Save the credentials for the next run
25 | with open(__G_DRIVE_TOKEN_FILE, 'wb') as token:
26 | pickle.dump(credentials, token)
27 |
--------------------------------------------------------------------------------
/qBittorrent/config/qBittorrent.conf:
--------------------------------------------------------------------------------
1 | [LegalNotice]
2 | Accepted=true
3 |
4 | [BitTorrent]
5 | Session\AsyncIOThreadsCount=16
6 | Session\MultiConnectionsPerIp=true
7 | Session\SlowTorrentsDownloadRate=50
8 | Session\SlowTorrentsInactivityTimer=600
9 | Session\GlobalMaxSeedingMinutes=0
10 |
11 | [Preferences]
12 | Advanced\AnnounceToAllTrackers=true
13 | Advanced\AnonymousMode=false
14 | Advanced\IgnoreLimitsLAN=false
15 | Advanced\LtTrackerExchange=true
16 | Advanced\RecheckOnCompletion=false
17 | Bittorrent\AddTrackers=false
18 | Bittorrent\MaxRatio=0
19 | Bittorrent\MaxRatioAction=0
20 | Bittorrent\MaxConnecs=-1
21 | Bittorrent\MaxConnecsPerTorrent=-1
22 | Bittorrent\MaxUploads=-1
23 | Bittorrent\MaxUploadsPerTorrent=-1
24 | Bittorrent\DHT=true
25 | Bittorrent\PeX=true
26 | Bittorrent\LSD=true
27 | Downloads\PreAllocation=true
28 | Downloads\UseIncompleteExtension=true
29 | Downloads\DiskWriteCacheSize=-1
30 | General\PreventFromSuspendWhenDownloading=true
31 | Queueing\IgnoreSlowTorrents=true
32 | Queueing\MaxActiveDownloads=100
33 | Queueing\MaxActiveTorrents=50
34 | Queueing\MaxActiveUploads=50
35 | Queueing\QueueingEnabled=false
36 | Search\SearchEnabled=true
37 | WebUI\Enabled=true
38 | WebUI\Port=8090
39 | WebUI\LocalHostAuth=false
40 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/filters.py:
--------------------------------------------------------------------------------
1 | from telegram import Message
2 | from telegram.ext import MessageFilter
3 |
4 | from bot import OWNER_ID, user_data
5 |
6 |
7 | class CustomFilters:
8 | class __OwnerFilter(MessageFilter):
9 | def filter(self, message: Message):
10 | return message.from_user.id == OWNER_ID
11 |
12 | owner_filter = __OwnerFilter()
13 |
14 | class __AuthorizedUserFilter(MessageFilter):
15 | def filter(self, message: Message):
16 | uid = message.from_user.id
17 | return uid == OWNER_ID or uid in user_data and (user_data[uid].get('is_auth') or user_data[uid].get('is_sudo'))
18 |
19 | authorized_user = __AuthorizedUserFilter()
20 |
21 | class __AuthorizedChat(MessageFilter):
22 | def filter(self, message: Message):
23 | uid = message.chat.id
24 | return uid in user_data and user_data[uid].get('is_auth')
25 |
26 | authorized_chat = __AuthorizedChat()
27 |
28 | class __SudoUser(MessageFilter):
29 | def filter(self, message: Message):
30 | uid = message.from_user.id
31 | return uid in user_data and user_data[uid].get('is_sudo')
32 |
33 | sudo_user = __SudoUser()
34 |
35 | @staticmethod
36 | def owner_query(uid):
37 | return uid == OWNER_ID or uid in user_data and user_data[uid].get('is_sudo')
38 |
--------------------------------------------------------------------------------
/bot/modules/anonymous.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CallbackQueryHandler
2 |
3 | from bot import LOGGER, btn_listener, dispatcher
4 | from bot.helper.telegram_helper.message_utils import (deleteMessage,
5 | editMessage, isAdmin)
6 |
7 |
8 | def verifyAnno(update, context):
9 | query = update.callback_query
10 | message = query.message
11 | data = query.data.split()
12 | msg_id = int(data[2])
13 | if msg_id not in btn_listener:
14 | return editMessage('Old Verification Message', message)
15 | user = query.from_user
16 | if (
17 | data[1] == 'admin'
18 | and isAdmin(message, user.id)
19 | or data[1] != 'admin'
20 | and data[1] == 'channel'
21 | ):
22 | query.answer(f'Username: {user.username}\nYour userid : {user.id}')
23 | btn_listener[msg_id][1] = user.id
24 | btn_listener[msg_id][0] = False
25 | LOGGER.info(f'Verification Success by ({user.username}){user.id}')
26 | deleteMessage(message.bot, message)
27 | elif data[1] == 'admin' and not isAdmin(message, user.id):
28 | query.answer('You are not really admin')
29 | else:
30 | query.answer()
31 | btn_listener[msg_id][0] = False
32 | editMessage('Cancel Verification', message)
33 |
34 | anno_handler = CallbackQueryHandler(verifyAnno, pattern="verify")
35 | dispatcher.add_handler(anno_handler)
36 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/convert_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size
2 |
3 |
4 | class ConvertStatus:
5 | def __init__(self, name, size, gid, listener):
6 | self.__name = name
7 | self.__gid = gid
8 | self.__size = size
9 | self.__listener = listener
10 | self.message = self.__listener.message
11 | self.startTime = self.__listener.startTime
12 | self.mode = self.__listener.mode
13 | self.source = self.__source()
14 | self.engine = "ffmpeg"
15 |
16 | def gid(self):
17 | return self.__gid
18 |
19 | def progress(self):
20 | return '0'
21 |
22 | def speed(self):
23 | return '0'
24 |
25 | def name(self):
26 | return self.__name
27 |
28 | def size(self):
29 | return get_readable_file_size(self.__size)
30 |
31 | def eta(self):
32 | return '0s'
33 |
34 | def status(self):
35 | return MirrorStatus.STATUS_CONVERTING
36 |
37 | def processed_bytes(self):
38 | return 0
39 |
40 | def download(self):
41 | return self
42 |
43 | def __source(self):
44 | reply_to = self.message.reply_to_message
45 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
46 | not reply_to.from_user.is_bot else self.message.from_user.username \
47 | or self.message.from_user.id
48 | return f"{source}"
49 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/button_build.py:
--------------------------------------------------------------------------------
1 | from telegram import InlineKeyboardButton, InlineKeyboardMarkup
2 |
3 |
4 | class ButtonMaker:
5 | def __init__(self):
6 | self.__button = []
7 | self.__header_button = []
8 | self.__footer_button = []
9 |
10 | def buildbutton(self, key, link, position=None):
11 | if not position:
12 | self.__button.append(InlineKeyboardButton(text = key, url = link))
13 | elif position == 'header':
14 | self.__header_button.append(InlineKeyboardButton(text = key, url = link))
15 | elif position == 'footer':
16 | self.__footer_button.append(InlineKeyboardButton(text = key, url = link))
17 |
18 | def sbutton(self, key, data, position=None):
19 | if not position:
20 | self.__button.append(InlineKeyboardButton(text = key, callback_data = data))
21 | elif position == 'header':
22 | self.__header_button.append(InlineKeyboardButton(text = key, callback_data = data))
23 | elif position == 'footer':
24 | self.__footer_button.append(InlineKeyboardButton(text = key, callback_data = data))
25 |
26 | def build_menu(self, n_cols):
27 | menu = [self.__button[i:i + n_cols] for i in range(0, len(self.__button), n_cols)]
28 | if self.__header_button:
29 | menu.insert(0, self.__header_button)
30 | if self.__footer_button:
31 | if len(self.__footer_button) > 8:
32 | [menu.append(self.__footer_button[i:i+8]) for i in range(0, len(self.__footer_button), 8)]
33 | else:
34 | menu.append(self.__footer_button)
35 | return InlineKeyboardMarkup(menu)
36 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/jmdkh_utils.py:
--------------------------------------------------------------------------------
1 | from hashlib import sha1
2 | from os import path, remove
3 | from re import search
4 | from time import time
5 | from urllib.parse import parse_qs, urlparse
6 |
7 | from bencoding import bdecode, bencode
8 | from requests import get
9 |
10 | from bot import LOGGER
11 |
12 |
13 | def extract_link(link, tfile=False):
14 | try:
15 | if link and link.startswith('magnet:'):
16 | raw_link = search(r'(?<=xt=urn:btih:)[a-zA-Z0-9]+', link).group(0).lower()
17 | elif "drive.google.com" in urlparse(link).netloc:
18 | if "folders" in link or "file" in link:
19 | regex = r"https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)"
20 | res = search(regex, link)
21 | raw_link = link if res is None else res.group(3)
22 | raw_link = parse_qs(urlparse(link).query)['id'][0]
23 | elif tfile:
24 | if not path.exists(link):
25 | resp = get(link)
26 | if resp.status_code == 200:
27 | file_name = f'{time()}.torrent'
28 | with open(file_name, "wb") as t:
29 | t.write(resp.content)
30 | with open(file_name, "rb") as f:
31 | decodedDict = bdecode(f.read())
32 | remove(file_name)
33 | else:
34 | with open(link, "rb") as f:
35 | decodedDict = bdecode(f.read())
36 | raw_link = str(sha1(bencode(decodedDict[b'info'])).hexdigest())
37 | else:
38 | raw_link = link
39 | except Exception as e:
40 | LOGGER.error(e)
41 | raw_link = link
42 | return raw_link
--------------------------------------------------------------------------------
/bot/helper/ext_utils/z_utils.py:
--------------------------------------------------------------------------------
1 | from hashlib import sha1
2 | from os import path, remove
3 | from re import search
4 | from time import time
5 | from urllib.parse import parse_qs, urlparse
6 |
7 | from bencoding import bdecode, bencode
8 | from requests import get
9 |
10 | from bot import LOGGER
11 |
12 |
13 | def extract_link(link, tfile=False):
14 | try:
15 | if link and link.startswith('magnet:'):
16 | raw_link = search(r'(?<=xt=urn:btih:)[a-zA-Z0-9]+', link).group(0).lower()
17 | elif "drive.google.com" in urlparse(link).netloc:
18 | if "folders" in link or "file" in link:
19 | regex = r"https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)"
20 | res = search(regex, link)
21 | raw_link = link if res is None else res.group(3)
22 | raw_link = parse_qs(urlparse(link).query)['id'][0]
23 | elif tfile:
24 | if not path.exists(link):
25 | resp = get(link)
26 | if resp.status_code == 200:
27 | file_name = f'{time()}.torrent'
28 | with open(file_name, "wb") as t:
29 | t.write(resp.content)
30 | with open(file_name, "rb") as f:
31 | decodedDict = bdecode(f.read())
32 | remove(file_name)
33 | else:
34 | with open(link, "rb") as f:
35 | decodedDict = bdecode(f.read())
36 | raw_link = str(sha1(bencode(decodedDict[b'info'])).hexdigest())
37 | else:
38 | raw_link = link
39 | except Exception as e:
40 | LOGGER.error(e)
41 | raw_link = link
42 | return raw_link
--------------------------------------------------------------------------------
/bot/modules/shell.py:
--------------------------------------------------------------------------------
1 | from subprocess import PIPE, Popen
2 |
3 | from telegram.ext import CommandHandler
4 |
5 | from bot import LOGGER, dispatcher
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 |
9 |
10 | def shell(update, context):
11 | message = update.effective_message
12 | cmd = message.text.split(maxsplit=1)
13 | if len(cmd) == 1:
14 | return message.reply_text('No command to execute was given.')
15 | cmd = cmd[1]
16 | process = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
17 | stdout, stderr = process.communicate()
18 | reply = ''
19 | stderr = stderr.decode()
20 | stdout = stdout.decode()
21 | if len(stdout) != 0:
22 | reply += f"`{stdout}`\n"
23 | LOGGER.info(f"Shell - {cmd} - {stdout}")
24 | if len(stderr) != 0:
25 | reply += f"*Stderr*\n`{stderr}`\n"
26 | LOGGER.error(f"Shell - {cmd} - {stderr}")
27 | if len(reply) > 3000:
28 | with open('shell_output.txt', 'w') as file:
29 | file.write(reply)
30 | with open('shell_output.txt', 'rb') as doc:
31 | context.bot.send_document(
32 | document=doc,
33 | filename=doc.name,
34 | reply_to_message_id=message.message_id,
35 | chat_id=message.chat_id)
36 | elif len(reply) != 0:
37 | message.reply_text(reply, parse_mode='Markdown')
38 | else:
39 | message.reply_text('No Reply', parse_mode='Markdown')
40 |
41 |
42 | SHELL_HANDLER = CommandHandler(BotCommands.ShellCommand, shell,
43 | filters=CustomFilters.owner_filter)
44 | dispatcher.add_handler(SHELL_HANDLER)
45 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/queue_status.py:
--------------------------------------------------------------------------------
1 | from bot import LOGGER
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size
3 |
4 |
5 | class QueueStatus:
6 | def __init__(self, name, size, gid, listener, state):
7 | self.__name = name
8 | self.__size = size
9 | self.__gid = gid
10 | self.__listener = listener
11 | self.__state = state
12 | self.message = self.__listener.message
13 | self.startTime = self.__listener.startTime
14 | self.mode = self.__listener.mode
15 | self.source = self.__source()
16 | self.engine = "Queue System v1.0"
17 |
18 | def gid(self):
19 | return self.__gid
20 |
21 | def name(self):
22 | return self.__name
23 |
24 | def size_raw(self):
25 | return self.__size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__size)
29 |
30 | def status(self):
31 | if self.__state == 'Dl':
32 | return MirrorStatus.STATUS_QUEUEDL
33 | else:
34 | return MirrorStatus.STATUS_QUEUEUP
35 |
36 | def processed_bytes(self):
37 | return 0
38 |
39 | def progress(self):
40 | return '0%'
41 |
42 | def speed(self):
43 | return '0B/s'
44 |
45 | def eta(self):
46 | return '-'
47 |
48 | def download(self):
49 | return self
50 |
51 | def cancel_download(self):
52 | LOGGER.info(f'Cancelling Queue{self.__state}: {self.__name}')
53 | if self.__state == 'Dl':
54 | self.__listener.onDownloadError('task have been removed from queue/download')
55 | else:
56 | self.__listener.onUploadError('task have been removed from queue/upload')
57 |
58 | def __source(self):
59 | reply_to = self.message.reply_to_message
60 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
61 | not reply_to.from_user.is_bot else self.message.from_user.username \
62 | or self.message.from_user.id
63 | return f"{source}"
64 |
--------------------------------------------------------------------------------
/driveid.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | print("\n\n"\
4 | " Bot can search files recursively, but you have to add the list of drives you want to search.\n"\
5 | " Use the following format: (You can use 'root' in the ID in case you wan to use main drive.)\n"\
6 | " teamdrive NAME --> anything that you likes\n"\
7 | " teamdrive ID --> id of teamdrives in which you likes to search ('root' for main drive)\n"\
8 | " teamdrive INDEX URL --> enter index url for this drive.\n" \
9 | " go to the respective drive and copy the url from address bar\n")
10 | msg = ''
11 | if os.path.exists('drive_folder'):
12 | with open('drive_folder', 'r+') as f:
13 | lines = f.read()
14 | if not re.match(r'^\s*$', lines):
15 | print(lines)
16 | print("\n\n"\
17 | " DO YOU WISH TO KEEP THE ABOVE DETAILS THAT YOU PREVIOUSLY ADDED???? ENTER (y/n)\n"\
18 | " IF NOTHING SHOWS ENTER n")
19 | while 1:
20 | choice = input()
21 | if choice in ['y', 'Y']:
22 | msg = f'{lines}'
23 | break
24 | elif choice in ['n', 'N']:
25 | break
26 | else:
27 | print("\n\n DO YOU WISH TO KEEP THE ABOVE DETAILS ???? y/n <=== this is option ..... OPEN YOUR EYES & READ...")
28 | num = int(input(" How Many Drive/Folder You Likes To Add : "))
29 | for count in range(1, num + 1):
30 | print(f"\n > DRIVE - {count}\n")
31 | name = input(" Enter Drive NAME (anything) : ")
32 | id = input(" Enter Drive ID : ")
33 | index = input(" Enter Drive INDEX URL (optional) : ")
34 | if not name or not id:
35 | print("\n\n ERROR : Dont leave the name/id without filling.")
36 | exit(1)
37 | name=name.replace(" ", "_")
38 | if index:
39 | if index[-1] == "/":
40 | index = index[:-1]
41 | else:
42 | index = ''
43 | msg += f"{name} {id} {index}\n"
44 | with open('drive_folder', 'w') as file:
45 | file.truncate(0)
46 | file.write(msg)
47 | print("\n\n Done!")
48 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/clonner.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 |
4 | from bot import LOGGER, config_dict, download_dict, download_dict_lock
5 | from bot.helper.ext_utils.bot_utils import get_readable_file_size
6 | from bot.helper.mirror_utils.status_utils.clone_status import CloneStatus
7 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
8 | from bot.helper.telegram_helper.message_utils import (deleteMessage, delete_links,
9 | sendMessage,
10 | sendStatusMessage)
11 |
12 |
13 | def start_clone(link, listener):
14 | gd = GoogleDriveHelper(listener=listener)
15 | res, size, name, files = gd.helper(link)
16 | if res != "":
17 | return listener.onDownloadError(res)
18 | if config_dict['STOP_DUPLICATE'] and not listener.select:
19 | LOGGER.info('Checking File/Folder if already in Drive...')
20 | smsg, button = gd.drive_list(name, True)
21 | if smsg:
22 | delete_links(listener.bot, listener.message)
23 | msg = "File/Folder is already available in Drive.\nHere are the search results:"
24 | return listener.onDownloadError(msg, button)
25 | if CLONE_LIMIT := config_dict['CLONE_LIMIT']:
26 | limit = CLONE_LIMIT * 1024**3
27 | if size > limit:
28 | msg2 = f'Failed, Clone limit is {get_readable_file_size(limit)}.\nYour File/Folder size is {get_readable_file_size(size)}.'
29 | return listener.onDownloadError(msg2)
30 | listener.onDownloadStart()
31 | if files <= 20:
32 | msg = sendMessage(f"Cloning:
{link}", listener.bot, listener.message)
33 | gd.clone(link, listener.drive_id or config_dict['GDRIVE_ID'])
34 | deleteMessage(listener.bot, msg)
35 | else:
36 | gd.name = name
37 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
38 | clone_status = CloneStatus(gd, size, listener, gid)
39 | with download_dict_lock:
40 | download_dict[listener.uid] = clone_status
41 | sendStatusMessage(listener.message, listener.bot)
42 | gd.clone(link, listener.drive_id or config_dict['GDRIVE_ID'])
43 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/rate_limiter.py:
--------------------------------------------------------------------------------
1 | # Reference https://github.com/sanjit-sinha/TelegramBot-Boilerplate/blob/main/TelegramBot/helpers/ratelimiter.py
2 |
3 | from functools import wraps
4 |
5 | from cachetools import TTLCache
6 | from pyrate_limiter import (BucketFullException, Limiter, MemoryListBucket,
7 | RequestRate)
8 |
9 | from bot import config_dict
10 | from bot.helper.telegram_helper.filters import CustomFilters
11 |
12 |
13 | class RateLimiter:
14 | def __init__(self) -> None:
15 |
16 | # 1 requests per seconds
17 | self.second_rate = RequestRate(1, 1)
18 |
19 | self.limiter = Limiter(self.second_rate, bucket_class=MemoryListBucket)
20 |
21 | def acquire(self, userid):
22 | try:
23 | self.limiter.try_acquire(userid)
24 | return False
25 | except BucketFullException:
26 | return True
27 |
28 | ratelimit = RateLimiter()
29 | warned_users = TTLCache(maxsize=128, ttl=60)
30 |
31 | def ratelimiter(func):
32 | @wraps(func)
33 | def decorator(update, context):
34 | if not config_dict['ENABLE_RATE_LIMITER']:
35 | return func(update, context)
36 | if query := update.callback_query:
37 | userid = query.from_user.id
38 | elif message := update.message:
39 | userid = message.from_user.id
40 | else:
41 | return func(update, context)
42 | if CustomFilters.owner_query(userid) or userid == 1087968824:
43 | return func(update, context)
44 | is_limited = ratelimit.acquire(userid)
45 | if is_limited and userid not in warned_users:
46 | if query := update.callback_query:
47 | query.answer("Spam detected! ignoring your all requests for few minutes.", show_alert=True)
48 | warned_users[userid] = 1
49 | return
50 | elif message := update.message:
51 | message.reply_text("Spam detected! ignoring your all requests for few minutes.")
52 | warned_users[userid] = 1
53 | return
54 | else:
55 | return func(update, context)
56 | elif is_limited and userid in warned_users:
57 | pass
58 | else:
59 | return func(update, context)
60 | return decorator
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/telegram_download_status.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import get_distribution
2 |
3 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
4 | get_readable_file_size,
5 | get_readable_time)
6 |
7 | engine_ = f"pyrogram v{get_distribution('pyrogram').version}"
8 | class TelegramDownloadStatus:
9 | def __init__(self, obj, listener, gid):
10 | self.__obj = obj
11 | self.__gid = gid
12 | self.__listener = listener
13 | self.message = self.__listener.message
14 | self.startTime = self.__listener.startTime
15 | self.mode = self.__listener.mode
16 | self.source = self.__source()
17 | self.engine = engine_
18 |
19 | def gid(self):
20 | return self.__gid
21 |
22 | def processed_bytes(self):
23 | return self.__obj.downloaded_bytes
24 |
25 | def size_raw(self):
26 | return self.__obj.size
27 |
28 | def size(self):
29 | return get_readable_file_size(self.size_raw())
30 |
31 | def status(self):
32 | return MirrorStatus.STATUS_DOWNLOADING
33 |
34 | def name(self):
35 | return self.__obj.name
36 |
37 | def progress_raw(self):
38 | return self.__obj.progress
39 |
40 | def progress(self):
41 | return f'{round(self.progress_raw(), 2)}%'
42 |
43 | def speed_raw(self):
44 | """
45 | :return: Download speed in Bytes/Seconds
46 | """
47 | return self.__obj.download_speed
48 |
49 | def speed(self):
50 | return f'{get_readable_file_size(self.speed_raw())}/s'
51 |
52 | def eta(self):
53 | try:
54 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
55 | return f'{get_readable_time(seconds)}'
56 | except:
57 | return '-'
58 |
59 | def download(self):
60 | return self.__obj
61 |
62 | def __source(self):
63 | reply_to = self.message.reply_to_message
64 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
65 | not reply_to.from_user.is_bot else self.message.from_user.username \
66 | or self.message.from_user.id
67 | return f"{source}"
68 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/clone_status.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import get_distribution
2 |
3 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
4 | get_readable_file_size,
5 | get_readable_time)
6 |
7 | engine_ = f"Google Api v{get_distribution('google-api-python-client').version}"
8 |
9 | class CloneStatus:
10 | def __init__(self, obj, size, listener, gid):
11 | self.__obj = obj
12 | self.__size = size
13 | self.__gid = gid
14 | self.__listener = listener
15 | self.message = listener.message
16 | self.startTime = self.__listener.startTime
17 | self.mode = self.__listener.mode
18 | self.source = self.__source()
19 | self.engine = engine_
20 |
21 | def processed_bytes(self):
22 | return self.__obj.transferred_size
23 |
24 | def size_raw(self):
25 | return self.__size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__size)
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_CLONING
32 |
33 | def name(self):
34 | return self.__obj.name
35 |
36 | def gid(self) -> str:
37 | return self.__gid
38 |
39 | def progress_raw(self):
40 | try:
41 | return self.__obj.transferred_size / self.__size * 100
42 | except:
43 | return 0
44 |
45 | def progress(self):
46 | return f'{round(self.progress_raw(), 2)}%'
47 |
48 | def speed_raw(self):
49 | """
50 | :return: Download speed in Bytes/Seconds
51 | """
52 | return self.__obj.cspeed()
53 |
54 | def speed(self):
55 | return f'{get_readable_file_size(self.speed_raw())}/s'
56 |
57 | def eta(self):
58 | try:
59 | seconds = (self.__size - self.__obj.transferred_size) / self.speed_raw()
60 | return f'{get_readable_time(seconds)}'
61 | except:
62 | return '-'
63 |
64 | def download(self):
65 | return self.__obj
66 |
67 | def __source(self):
68 | reply_to = self.message.reply_to_message
69 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
70 | not reply_to.from_user.is_bot else self.message.from_user.username \
71 | or self.message.from_user.id
72 | return f"{source}"
73 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/tg_upload_status.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import get_distribution
2 |
3 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
4 | get_readable_file_size,
5 | get_readable_time)
6 |
7 | engine_ = f"pyrogram v{get_distribution('pyrogram').version}"
8 |
9 | class TgUploadStatus:
10 | def __init__(self, obj, size, gid, listener):
11 | self.__obj = obj
12 | self.__size = size
13 | self.__gid = gid
14 | self.__listener = listener
15 | self.message = self.__listener.message
16 | self.startTime = self.__listener.startTime
17 | self.mode = self.__listener.mode
18 | self.source = self.__source()
19 | self.engine = engine_
20 |
21 | def processed_bytes(self):
22 | return self.__obj.uploaded_bytes
23 |
24 | def size_raw(self):
25 | return self.__size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__size)
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_UPLOADING
32 |
33 | def name(self):
34 | return self.__obj.name
35 |
36 | def progress_raw(self):
37 | try:
38 | return self.__obj.uploaded_bytes / self.__size * 100
39 | except ZeroDivisionError:
40 | return 0
41 |
42 | def progress(self):
43 | return f'{round(self.progress_raw(), 2)}%'
44 |
45 | def speed_raw(self):
46 | """
47 | :return: Upload speed in Bytes/Seconds
48 | """
49 | return self.__obj.speed
50 |
51 | def speed(self):
52 | return f'{get_readable_file_size(self.speed_raw())}/s'
53 |
54 | def eta(self):
55 | try:
56 | seconds = (self.__size - self.__obj.uploaded_bytes) / self.speed_raw()
57 | return f'{get_readable_time(seconds)}'
58 | except ZeroDivisionError:
59 | return '-'
60 |
61 | def gid(self) -> str:
62 | return self.__gid
63 |
64 | def download(self):
65 | return self.__obj
66 |
67 | def __source(self):
68 | reply_to = self.message.reply_to_message
69 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
70 | not reply_to.from_user.is_bot else self.message.from_user.username \
71 | or self.message.from_user.id
72 | return f"{source}"
73 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/upload_status.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import get_distribution
2 |
3 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
4 | get_readable_file_size,
5 | get_readable_time)
6 |
7 | engine_ = f"Google Api v{get_distribution('google-api-python-client').version}"
8 |
9 | class UploadStatus:
10 | def __init__(self, obj, size, gid, listener):
11 | self.__obj = obj
12 | self.__size = size
13 | self.__gid = gid
14 | self.__listener = listener
15 | self.message = self.__listener.message
16 | self.startTime = self.__listener.startTime
17 | self.mode = self.__listener.mode
18 | self.source = self.__source()
19 | self.engine = engine_
20 |
21 | def processed_bytes(self):
22 | return self.__obj.processed_bytes
23 |
24 | def size_raw(self):
25 | return self.__size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__size)
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_UPLOADING
32 |
33 | def name(self):
34 | return self.__obj.name
35 |
36 | def progress_raw(self):
37 | try:
38 | return self.__obj.processed_bytes / self.__size * 100
39 | except ZeroDivisionError:
40 | return 0
41 |
42 | def progress(self):
43 | return f'{round(self.progress_raw(), 2)}%'
44 |
45 | def speed_raw(self):
46 | """
47 | :return: Upload speed in Bytes/Seconds
48 | """
49 | return self.__obj.speed()
50 |
51 | def speed(self):
52 | return f'{get_readable_file_size(self.speed_raw())}/s'
53 |
54 | def eta(self):
55 | try:
56 | seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw()
57 | return f'{get_readable_time(seconds)}'
58 | except ZeroDivisionError:
59 | return '-'
60 |
61 | def gid(self) -> str:
62 | return self.__gid
63 |
64 | def download(self):
65 | return self.__obj
66 |
67 | def __source(self):
68 | reply_to = self.message.reply_to_message
69 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
70 | not reply_to.from_user.is_bot else self.message.from_user.username \
71 | or self.message.from_user.id
72 | return f"{source}"
73 |
--------------------------------------------------------------------------------
/bot/modules/list.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | from bot import LOGGER, dispatcher
3 | from bot.helper.telegram_helper.filters import CustomFilters
4 | from telegram.ext import CommandHandler, CallbackQueryHandler
5 | from bot.helper.telegram_helper.button_build import ButtonMaker
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | from bot.helper.telegram_helper.message_utils import sendMessage, editMessage
8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
9 |
10 | def list_buttons(update, context):
11 | user_id = update.message.from_user.id
12 | if len(context.args) == 0:
13 | return sendMessage('Send a search key along with command', context.bot, update.message)
14 | buttons = ButtonMaker()
15 | buttons.sbutton("Folders", f"types {user_id} folders")
16 | buttons.sbutton("Files", f"types {user_id} files")
17 | buttons.sbutton("Both", f"types {user_id} both")
18 | buttons.sbutton("Cancel", f"types {user_id} cancel")
19 | button = buttons.build_menu(2)
20 | sendMessage('Choose option to list.', context.bot, update.message, button)
21 |
22 | def select_type(update, context):
23 | query = update.callback_query
24 | user_id = query.from_user.id
25 | msg = query.message
26 | key = msg.reply_to_message.text.split(" ", maxsplit=1)[1]
27 | data = query.data
28 | data = data.split()
29 | if user_id != int(data[1]):
30 | return query.answer(text="Not Yours!", show_alert=True)
31 | elif data[2] == 'cancel':
32 | query.answer()
33 | return editMessage("list has been canceled!", msg)
34 | query.answer()
35 | item_type = data[2]
36 | editMessage(f"Searching for {key}", msg)
37 | Thread(target=_list_drive, args=(key, msg, item_type)).start()
38 |
39 | def _list_drive(key, bmsg, item_type):
40 | LOGGER.info(f"listing: {key}")
41 | gdrive = GoogleDriveHelper()
42 | msg, button = gdrive.drive_list(key, isRecursive=True, itemType=item_type)
43 | if button:
44 | editMessage(msg, bmsg, button)
45 | else:
46 | editMessage(f'No result found for {key}', bmsg)
47 |
48 |
49 | list_handler = CommandHandler(BotCommands.ListCommand, list_buttons,
50 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
51 | list_type_handler = CallbackQueryHandler(select_type, pattern="types")
52 |
53 | dispatcher.add_handler(list_handler)
54 | dispatcher.add_handler(list_type_handler)
55 |
--------------------------------------------------------------------------------
/bot/modules/count.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from telegram.ext import CommandHandler
4 |
5 | from bot import dispatcher
6 | from bot.helper.ext_utils.bot_utils import (get_readable_time, is_gdrive_link,
7 | new_thread)
8 | from bot.helper.ext_utils.rate_limiter import ratelimiter
9 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.telegram_helper.filters import CustomFilters
12 | from bot.helper.telegram_helper.message_utils import (anno_checker,
13 | deleteMessage,
14 | sendMessage)
15 |
16 |
17 | @ratelimiter
18 | @new_thread
19 | def countNode(update, context):
20 | message = update.message
21 | if message.from_user.id in [1087968824, 136817688]:
22 | message.from_user.id = anno_checker(message)
23 | if not message.from_user.id:
24 | return
25 | reply_to = message.reply_to_message
26 | link = ''
27 | if len(context.args) == 1:
28 | link = context.args[0].strip()
29 | if message.from_user.username:
30 | tag = f"@{message.from_user.username}"
31 | else:
32 | tag = message.from_user.mention_html(message.from_user.first_name)
33 | elif reply_to:
34 | if len(context.args) == 0:
35 | link = reply_to.text.split(maxsplit=1)[0].strip()
36 | if reply_to.from_user.username:
37 | tag = f"@{reply_to.from_user.username}"
38 | else:
39 | tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
40 | if is_gdrive_link(link):
41 | msg = sendMessage(f"Counting: {link}", context.bot, message)
42 | gd = GoogleDriveHelper()
43 | result = gd.count(link)
44 | deleteMessage(context.bot, msg)
45 | cc = f'\n\n#cc: {tag} | Elapsed: {get_readable_time(time() - message.date.timestamp())}'
46 | sendMessage(result + cc, context.bot, message)
47 | else:
48 | msg = 'Send Gdrive link along with command or by replying to the link by command'
49 | sendMessage(msg, context.bot, message)
50 |
51 |
52 | count_handler = CommandHandler(BotCommands.CountCommand, countNode,
53 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
54 | dispatcher.add_handler(count_handler)
55 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/gd_download_status.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import get_distribution
2 |
3 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
4 | get_readable_file_size,
5 | get_readable_time)
6 |
7 | engine_ = f"Google Api v{get_distribution('google-api-python-client').version}"
8 |
9 | class GdDownloadStatus:
10 | def __init__(self, obj, size, listener, gid):
11 | self.__obj = obj
12 | self.__size = size
13 | self.__gid = gid
14 | self.__listener = listener
15 | self.message = self.__listener.message
16 | self.startTime = self.__listener.startTime
17 | self.mode = self.__listener.mode
18 | self.source = self.__source()
19 | self.engine = engine_
20 |
21 | def processed_bytes(self):
22 | return self.__obj.processed_bytes
23 |
24 | def size_raw(self):
25 | return self.__size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__size)
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_DOWNLOADING
32 |
33 | def name(self):
34 | return self.__obj.name
35 |
36 | def gid(self) -> str:
37 | return self.__gid
38 |
39 | def progress_raw(self):
40 | try:
41 | return self.__obj.processed_bytes / self.__size * 100
42 | except:
43 | return 0
44 |
45 | def progress(self):
46 | return f'{round(self.progress_raw(), 2)}%'
47 |
48 | def listener(self):
49 | return self.__listener
50 |
51 | def speed_raw(self):
52 | """
53 | :return: Download speed in Bytes/Seconds
54 | """
55 | return self.__obj.speed()
56 |
57 | def speed(self):
58 | return f'{get_readable_file_size(self.speed_raw())}/s'
59 |
60 | def eta(self):
61 | try:
62 | seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw()
63 | return f'{get_readable_time(seconds)}'
64 | except:
65 | return '-'
66 |
67 | def download(self):
68 | return self.__obj
69 |
70 | def __source(self):
71 | reply_to = self.message.reply_to_message
72 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
73 | not reply_to.from_user.is_bot else self.message.from_user.username \
74 | or self.message.from_user.id
75 | return f"{source}"
76 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/mega_download_status.py:
--------------------------------------------------------------------------------
1 | from mega import MegaApi
2 |
3 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
4 | get_readable_file_size,
5 | get_readable_time)
6 |
7 | engine_ = f"MegaSDK v{MegaApi('test').getVersion()}"
8 |
9 | class MegaDownloadStatus:
10 |
11 | def __init__(self, obj, listener):
12 | self.__listener = listener
13 | self.__obj = obj
14 | self.message = self.__listener.message
15 | self.startTime = self.__listener.startTime
16 | self.mode = self.__listener.mode
17 | self.source = self.__source()
18 | self.engine = engine_
19 |
20 | def name(self) -> str:
21 | return self.__obj.name
22 |
23 | def progress_raw(self):
24 | try:
25 | return round(self.processed_bytes() / self.__obj.size * 100,2)
26 | except:
27 | return 0.0
28 |
29 | def progress(self):
30 | """Progress of download in percentage"""
31 | return f"{self.progress_raw()}%"
32 |
33 | def status(self) -> str:
34 | return MirrorStatus.STATUS_DOWNLOADING
35 |
36 | def processed_bytes(self):
37 | return self.__obj.downloaded_bytes
38 |
39 | def eta(self):
40 | try:
41 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
42 | return f'{get_readable_time(seconds)}'
43 | except ZeroDivisionError:
44 | return '-'
45 |
46 | def size_raw(self):
47 | return self.__obj.size
48 |
49 | def size(self) -> str:
50 | return get_readable_file_size(self.size_raw())
51 |
52 | def downloaded(self) -> str:
53 | return get_readable_file_size(self.__obj.downloadedBytes)
54 |
55 | def speed_raw(self):
56 | return self.__obj.speed
57 |
58 | def listener(self):
59 | return self.__listener
60 |
61 | def speed(self) -> str:
62 | return f'{get_readable_file_size(self.speed_raw())}/s'
63 |
64 | def gid(self) -> str:
65 | return self.__obj.gid
66 |
67 | def download(self):
68 | return self.__obj
69 |
70 | def __source(self):
71 | reply_to = self.message.reply_to_message
72 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
73 | not reply_to.from_user.is_bot else self.message.from_user.username \
74 | or self.message.from_user.id
75 | return f"{source}"
76 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/split_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from bot import DOWNLOAD_DIR, LOGGER
3 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus, get_readable_time
4 | from bot.helper.ext_utils.fs_utils import get_path_size
5 |
6 | class SplitStatus:
7 | def __init__(self, name, size, gid, listener):
8 | self.__name = name
9 | self.__gid = gid
10 | self.__size = size
11 | self.__listener = listener
12 | self.__uid = listener.uid
13 | self.__start_time = time()
14 | self.message = self.__listener.message
15 | self.startTime = self.__listener.startTime
16 | self.mode = self.__listener.mode
17 | self.source = self.__source()
18 | self.engine = "ffmpeg"
19 |
20 | def gid(self):
21 | return self.__gid
22 |
23 | def speed_raw(self):
24 | return self.processed_bytes() / (time() - self.__start_time)
25 |
26 | def progress_raw(self):
27 | try:
28 | return self.processed_bytes() / self.__size * 100
29 | except:
30 | return 0
31 |
32 | def progress(self):
33 | return f'{round(self.progress_raw(), 2)}%'
34 |
35 | def speed(self):
36 | return f'{get_readable_file_size(self.speed_raw())}/s'
37 |
38 | def name(self):
39 | return self.__name
40 |
41 | def size_raw(self):
42 | return self.__size
43 |
44 | def size(self):
45 | return get_readable_file_size(self.__size)
46 |
47 | def eta(self):
48 | try:
49 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
50 | return f'{get_readable_time(seconds)}'
51 | except:
52 | return '-'
53 |
54 | def status(self):
55 | return MirrorStatus.STATUS_SPLITTING
56 |
57 | def processed_bytes(self):
58 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
59 |
60 | def download(self):
61 | return self
62 |
63 | def cancel_download(self):
64 | LOGGER.info(f'Cancelling Split: {self.__name}')
65 | if self.__listener.suproc:
66 | self.__listener.suproc.kill()
67 | self.__listener.onUploadError('Splitting stopped by user!')
68 |
69 | def __source(self):
70 | reply_to = self.message.reply_to_message
71 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
72 | not reply_to.from_user.is_bot else self.message.from_user.username \
73 | or self.message.from_user.id
74 | return f"{source}"
75 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/shortenurl.py:
--------------------------------------------------------------------------------
1 | # Implemented by https://github.com/junedkh
2 |
3 | from base64 import b64encode
4 | from random import random, choice
5 | from cfscrape import create_scraper
6 | from urllib3 import disable_warnings
7 | from urllib.parse import quote, unquote
8 |
9 | from bot import LOGGER, SHORTENER, SHORTENER_API
10 |
11 | def short_url(longurl):
12 | if SHORTENER is None and SHORTENER_API is None:
13 | return longurl
14 | try:
15 | cget = create_scraper().get
16 | try:
17 | unquote(longurl).encode('ascii')
18 | if "{" in unquote(longurl) or "}" in unquote(longurl):
19 | raise TypeError
20 | except (UnicodeEncodeError, TypeError):
21 | longurl = cget('http://tinyurl.com/api-create.php', params=dict(url=longurl)).text
22 | if "shorte.st" in SHORTENER:
23 | disable_warnings()
24 | return cget(f'http://api.shorte.st/stxt/{SHORTENER_API}/{longurl}', verify=False).text
25 | elif "linkvertise" in SHORTENER:
26 | url = quote(b64encode(longurl.encode("utf-8")))
27 | linkvertise = [
28 | f"https://link-to.net/{SHORTENER_API}/{random() * 1000}/dynamic?r={url}",
29 | f"https://up-to-down.net/{SHORTENER_API}/{random() * 1000}/dynamic?r={url}",
30 | f"https://direct-link.net/{SHORTENER_API}/{random() * 1000}/dynamic?r={url}",
31 | f"https://file-link.net/{SHORTENER_API}/{random() * 1000}/dynamic?r={url}"]
32 | return choice(linkvertise)
33 | elif "bitly.com" in SHORTENER:
34 | shorten_url = "https://api-ssl.bit.ly/v4/shorten"
35 | headers = {"Authorization": f"Bearer {SHORTENER_API}"}
36 | response = create_scraper().post(shorten_url, json={"long_url": longurl}, headers=headers).json()
37 | return response["link"]
38 | elif "ouo.io" in SHORTENER:
39 | disable_warnings()
40 | return cget(f'http://ouo.io/api/{SHORTENER_API}?s={longurl}', verify=False).text
41 | elif "adfoc.us" in SHORTENER:
42 | disable_warnings()
43 | return cget(f'http://adfoc.us/api/?key={SHORTENER_API}&url={longurl}', verify=False).text
44 | elif "cutt.ly" in SHORTENER:
45 | disable_warnings()
46 | return cget(f'http://cutt.ly/api/api.php?key={SHORTENER_API}&short={longurl}', verify=False).json()['url']['shortLink']
47 | else:
48 | return cget(f'https://{SHORTENER}/api?api={SHORTENER_API}&url={quote(longurl)}&format=text').text
49 | except Exception as e:
50 | LOGGER.error(e)
51 | return longurl
52 |
--------------------------------------------------------------------------------
/update.py:
--------------------------------------------------------------------------------
1 | from logging import FileHandler, StreamHandler, INFO, basicConfig, error as log_error, info as log_info
2 | from os import path as ospath, environ
3 | from subprocess import run as srun
4 | from dotenv import load_dotenv
5 | from pymongo import MongoClient
6 |
7 | if ospath.exists('log.txt'):
8 | with open('log.txt', 'r+') as f:
9 | f.truncate(0)
10 |
11 | basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
12 | handlers=[FileHandler('log.txt'), StreamHandler()],
13 | level=INFO)
14 |
15 | load_dotenv('config.env', override=True)
16 |
17 | try:
18 | if bool(environ.get('_____REMOVE_THIS_LINE_____')):
19 | log_error('The README.md file there to be read! Exiting now!')
20 | exit()
21 | except:
22 | pass
23 |
24 | BOT_TOKEN = environ.get('BOT_TOKEN', '')
25 | if len(BOT_TOKEN) == 0:
26 | log_error("BOT_TOKEN variable is missing! Exiting now")
27 | exit(1)
28 |
29 | bot_id = int(BOT_TOKEN.split(':', 1)[0])
30 |
31 | DATABASE_URL = environ.get('DATABASE_URL', '')
32 | if len(DATABASE_URL) == 0:
33 | DATABASE_URL = None
34 |
35 | if DATABASE_URL:
36 | conn = MongoClient(DATABASE_URL)
37 | db = conn.z
38 | if config_dict := db.settings.config.find_one({'_id': bot_id}): #retrun config dict (all env vars)
39 | environ['UPSTREAM_REPO'] = config_dict['UPSTREAM_REPO']
40 | environ['UPSTREAM_BRANCH'] = config_dict['UPSTREAM_BRANCH']
41 | conn.close()
42 |
43 | UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '')
44 | if len(UPSTREAM_REPO) == 0:
45 | UPSTREAM_REPO = ''
46 |
47 | UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '')
48 | if len(UPSTREAM_BRANCH) == 0:
49 | UPSTREAM_BRANCH = ''
50 |
51 | if UPSTREAM_REPO:
52 | if ospath.exists('.git'):
53 | srun(["rm", "-rf", ".git"])
54 |
55 | update = srun([f"git init -q \
56 | && git config --global user.email shuvam.dawn12345@gmail.com \
57 | && git config --global user.name Dawn-India \
58 | && git add . \
59 | && git commit -sm update -q \
60 | && git remote add origin {UPSTREAM_REPO} \
61 | && git fetch origin -q \
62 | && git reset --hard origin/{UPSTREAM_BRANCH} -q"], shell=True)
63 |
64 | if update.returncode == 0:
65 | log_info('Successfully updated with latest commit.')
66 | log_info(f'Repo in use: {UPSTREAM_REPO}')
67 | log_info(f'Branch in use: {UPSTREAM_BRANCH}')
68 | log_info('Thanks For Using Z_Mirror')
69 | else:
70 | log_error('Something went wrong while updating.')
71 | log_info('Check if entered UPSTREAM_REPO is valid or not!')
72 | log_info(f'Entered upstream repo: {UPSTREAM_REPO}')
73 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/shortener.py:
--------------------------------------------------------------------------------
1 | from base64 import b64encode
2 | from random import choice, random, randrange
3 | from urllib.parse import quote
4 | from time import sleep
5 | from cfscrape import create_scraper
6 | from urllib3 import disable_warnings
7 |
8 | from bot import LOGGER, SHORTENER_APIS, SHORTENERES
9 |
10 |
11 | def short_url(longurl, attempt=0):
12 | if not SHORTENERES and not SHORTENER_APIS:
13 | return longurl
14 | if attempt >= 4:
15 | return longurl
16 | i = 0 if len(SHORTENERES) == 1 else randrange(len(SHORTENERES))
17 | _shortener = SHORTENERES[i].strip()
18 | _shortener_api = SHORTENER_APIS[i].strip()
19 | cget = create_scraper().request
20 | disable_warnings()
21 | try:
22 | if "shorte.st" in _shortener:
23 | headers = {'public-api-token': _shortener_api}
24 | data = {'urlToShorten': quote(longurl)}
25 | return cget('PUT', 'https://api.shorte.st/v1/data/url', headers=headers, data=data).json()['shortenedUrl']
26 | elif "linkvertise" in _shortener:
27 | url = quote(b64encode(longurl.encode("utf-8")))
28 | linkvertise = [
29 | f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
30 | f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
31 | f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
32 | f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}"]
33 | return choice(linkvertise)
34 | elif "bitly.com" in _shortener:
35 | headers = {"Authorization": f"Bearer {_shortener_api}"}
36 | return cget('POST', "https://api-ssl.bit.ly/v4/shorten", json={"long_url": longurl}, headers=headers).json()["link"]
37 | elif "ouo.io" in _shortener:
38 | return cget('GET', f'http://ouo.io/api/{_shortener_api}?s={longurl}', verify=False).text
39 | elif "cutt.ly" in _shortener:
40 | return cget('GET', f'http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}', verify=False).json()['url']['shortLink']
41 | else:
42 | res = cget('GET', f'https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}').json()
43 | shorted = res['shortenedUrl']
44 | if not shorted:
45 | longurl = cget('GET','http://tinyurl.com/api-create.php', params=dict(url=longurl)).text
46 | res = cget('GET', f'https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}').json()
47 | shorted = res['shortenedUrl']
48 | return shorted
49 | except Exception as e:
50 | LOGGER.error(e)
51 | sleep(1)
52 | attempt +=1
53 | return short_url(longurl, attempt)
54 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/zip_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from bot import DOWNLOAD_DIR, LOGGER
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time)
7 | from bot.helper.ext_utils.fs_utils import get_path_size
8 |
9 |
10 | class ZipStatus:
11 | def __init__(self, name, size, gid, listener):
12 | self.__name = name
13 | self.__size = size
14 | self.__gid = gid
15 | self.__listener = listener
16 | self.__uid = self.__listener.uid
17 | self.__start_time = time()
18 | self.message = self.__listener.message
19 | self.startTime = self.__listener.startTime
20 | self.mode = self.__listener.mode
21 | self.source = self.__source()
22 | self.engine = "p7zip"
23 |
24 | def gid(self):
25 | return self.__gid
26 |
27 | def speed_raw(self):
28 | return self.processed_bytes() / (time() - self.__start_time)
29 |
30 | def progress_raw(self):
31 | try:
32 | return self.processed_bytes() / self.__size * 100
33 | except:
34 | return 0
35 |
36 | def progress(self):
37 | return f'{round(self.progress_raw(), 2)}%'
38 |
39 | def speed(self):
40 | return f'{get_readable_file_size(self.speed_raw())}/s'
41 |
42 | def name(self):
43 | return self.__name
44 |
45 | def size_raw(self):
46 | return self.__size
47 |
48 | def size(self):
49 | return get_readable_file_size(self.__size)
50 |
51 | def eta(self):
52 | try:
53 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
54 | return f'{get_readable_time(seconds)}'
55 | except:
56 | return '-'
57 |
58 | def status(self):
59 | return MirrorStatus.STATUS_ARCHIVING
60 |
61 | def processed_bytes(self):
62 | if self.__listener.newDir:
63 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}10000")
64 | else:
65 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
66 |
67 | def download(self):
68 | return self
69 |
70 | def cancel_download(self):
71 | LOGGER.info(f'Cancelling Archive: {self.__name}')
72 | if self.__listener.suproc:
73 | self.__listener.suproc.kill()
74 | self.__listener.onUploadError('Archiving stopped by user!')
75 |
76 | def __source(self):
77 | reply_to = self.message.reply_to_message
78 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
79 | not reply_to.from_user.is_bot else self.message.from_user.username \
80 | or self.message.from_user.id
81 | return f"{source}"
82 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/extract_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from bot import DOWNLOAD_DIR, LOGGER
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time)
7 | from bot.helper.ext_utils.fs_utils import get_path_size
8 |
9 |
10 | class ExtractStatus:
11 | def __init__(self, name, size, gid, listener):
12 | self.__name = name
13 | self.__size = size
14 | self.__gid = gid
15 | self.__listener = listener
16 | self.__uid = self.__listener.uid
17 | self.__start_time = time()
18 | self.message = self.__listener.message
19 | self.startTime = self.__listener.startTime
20 | self.mode = self.__listener.mode
21 | self.source = self.__source()
22 | self.engine = '7z'
23 |
24 | def gid(self):
25 | return self.__gid
26 |
27 | def speed_raw(self):
28 | return self.processed_bytes() / (time() - self.__start_time)
29 |
30 | def progress_raw(self):
31 | try:
32 | return self.processed_bytes() / self.__size * 100
33 | except:
34 | return 0
35 |
36 | def progress(self):
37 | return f'{round(self.progress_raw(), 2)}%'
38 |
39 | def speed(self):
40 | return f'{get_readable_file_size(self.speed_raw())}/s'
41 |
42 | def name(self):
43 | return self.__name
44 |
45 | def size_raw(self):
46 | return self.__size
47 |
48 | def size(self):
49 | return get_readable_file_size(self.__size)
50 |
51 | def eta(self):
52 | try:
53 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
54 | return f'{get_readable_time(seconds)}'
55 | except:
56 | return '-'
57 |
58 | def status(self):
59 | return MirrorStatus.STATUS_EXTRACTING
60 |
61 | def processed_bytes(self):
62 | if self.__listener.newDir:
63 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}10000")
64 | else:
65 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
66 |
67 | def download(self):
68 | return self
69 |
70 | def cancel_download(self):
71 | LOGGER.info(f'Cancelling Extract: {self.__name}')
72 | if self.__listener.suproc:
73 | self.__listener.suproc.kill()
74 | self.__listener.onUploadError('extracting stopped by user!')
75 |
76 | def __source(self):
77 | reply_to = self.message.reply_to_message
78 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
79 | not reply_to.from_user.is_bot else self.message.from_user.username \
80 | or self.message.from_user.id
81 | return f"{source}"
82 |
--------------------------------------------------------------------------------
/bot/modules/rmdb.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 |
3 | from bot import DATABASE_URL, config_dict, dispatcher
4 | from bot.helper.ext_utils.bot_utils import is_magnet, is_url, new_thread
5 | from bot.helper.ext_utils.db_handler import DbManger
6 | from bot.helper.ext_utils.z_utils import extract_link
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.telegram_helper.filters import CustomFilters
9 | from bot.helper.telegram_helper.message_utils import sendMessage
10 |
11 |
12 | def _rmdb(message, bot):
13 | mesg = message.text.split('\n')
14 | message_args = mesg[0].split(' ', maxsplit=1)
15 | tfile = False
16 | file = None
17 | try:
18 | link = message_args[1]
19 | except IndexError:
20 | link = ''
21 | tag = None
22 | reply_to = message.reply_to_message
23 | if reply_to:
24 | media_array = [reply_to.document, reply_to.video, reply_to.audio]
25 | file = next((i for i in media_array if i), None)
26 | if not reply_to.from_user.is_bot:
27 | if reply_to.from_user.username:
28 | tag = f"@{reply_to.from_user.username}"
29 | else:
30 | tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
31 |
32 | if not is_url(link) and not is_magnet(link) and not link:
33 | if file is None:
34 | if is_url(reply_to.text) or is_magnet(reply_to.text):
35 | link = reply_to.text.strip()
36 | else:
37 | mesg = message.text.split('\n')
38 | message_args = mesg[0].split(' ', maxsplit=1)
39 | try:
40 | link = message_args[1]
41 | except IndexError:
42 | pass
43 | elif file.mime_type != "application/x-bittorrent":
44 | link = file.file_unique_id
45 | else:
46 | link = file.get_file().download_url
47 | tfile = True
48 |
49 | if DATABASE_URL and config_dict['STOP_DUPLICATE_TASKS']:
50 | raw_url = extract_link(link, tfile)
51 | exist = DbManger().check_download(raw_url)
52 | if exist:
53 | DbManger().remove_download(exist['_id'])
54 | msg = 'Download is removed from database successfully'
55 | msg += f'\n{exist["tag"]} Your download is removed.'
56 | else:
57 | msg = 'This download is not exists in database'
58 | else:
59 | msg = 'STOP_DUPLICATE_TASKS feature is not enabled'
60 | return sendMessage(msg, bot, message)
61 |
62 | @new_thread
63 | def rmdbNode(update, context):
64 | _rmdb(update.message, context.bot)
65 |
66 | if DATABASE_URL:
67 | rmdb_handler = CommandHandler(command=BotCommands.RmdbCommand, callback=rmdbNode, filters=CustomFilters.owner_filter | CustomFilters.sudo_user)
68 | dispatcher.add_handler(rmdb_handler)
69 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/yt_dlp_download_status.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import get_distribution
2 |
3 | from bot import DOWNLOAD_DIR
4 | from bot.helper.ext_utils.bot_utils import (MirrorStatus,
5 | get_readable_file_size,
6 | get_readable_time)
7 | from bot.helper.ext_utils.fs_utils import get_path_size
8 |
9 | engine_ = f"yt-dlp v{get_distribution('yt-dlp').version}"
10 |
11 | class YtDlpDownloadStatus:
12 | def __init__(self, obj, listener, gid):
13 | self.__obj = obj
14 | self.__uid = listener.uid
15 | self.__gid = gid
16 | self.__listener = listener
17 | self.message = self.__listener.message
18 | self.__isPlayList = self.__obj.is_playlist
19 | self.startTime = self.__listener.startTime
20 | self.mode = self.__listener.mode
21 | self.source = self.__source()
22 | self.engine = engine_
23 |
24 | def playList(self):
25 | if self.__isPlayList:
26 | return f"{self.__obj.playlist_index} of {self.__obj.playlist_count}"
27 | else:
28 | return None
29 |
30 | def gid(self):
31 | return self.__gid
32 |
33 | def processed_bytes(self):
34 | if self.__obj.downloaded_bytes != 0:
35 | return self.__obj.downloaded_bytes
36 | else:
37 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}")
38 |
39 | def size_raw(self):
40 | return self.__obj.size
41 |
42 | def size(self):
43 | return get_readable_file_size(self.size_raw())
44 |
45 | def status(self):
46 | return MirrorStatus.STATUS_DOWNLOADING
47 |
48 | def name(self):
49 | return self.__obj.name
50 |
51 | def progress_raw(self):
52 | return self.__obj.progress
53 |
54 | def progress(self):
55 | return f'{round(self.progress_raw(), 2)}%'
56 |
57 | def speed_raw(self):
58 | """
59 | :return: Download speed in Bytes/Seconds
60 | """
61 | return self.__obj.download_speed
62 |
63 | def listener(self):
64 | return self.__listener
65 |
66 | def speed(self):
67 | return f'{get_readable_file_size(self.speed_raw())}/s'
68 |
69 | def eta(self):
70 | if self.__obj.eta != '-':
71 | return f'{get_readable_time(self.__obj.eta)}'
72 | try:
73 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
74 | return f'{get_readable_time(seconds)}'
75 | except:
76 | return '-'
77 |
78 | def download(self):
79 | return self.__obj
80 |
81 | def __source(self):
82 | reply_to = self.message.reply_to_message
83 | source = reply_to.from_user.username or reply_to.from_user.id if reply_to and \
84 | not reply_to.from_user.is_bot else self.message.from_user.username \
85 | or self.message.from_user.id
86 | return f"{source}"
87 |
--------------------------------------------------------------------------------
/bot/modules/mirror_status.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | from time import time
3 |
4 | from psutil import cpu_percent, disk_usage, virtual_memory
5 | from telegram.ext import CallbackQueryHandler, CommandHandler
6 |
7 | from bot import (DOWNLOAD_DIR, Interval, botStartTime, config_dict, dispatcher,
8 | download_dict, download_dict_lock, status_reply_dict_lock)
9 | from bot.helper.ext_utils.bot_utils import (get_readable_file_size,
10 | get_readable_time, new_thread,
11 | setInterval, turn)
12 | from bot.helper.ext_utils.rate_limiter import ratelimiter
13 | from bot.helper.telegram_helper.bot_commands import BotCommands
14 | from bot.helper.telegram_helper.filters import CustomFilters
15 | from bot.helper.telegram_helper.message_utils import (auto_delete_message,
16 | deleteMessage,
17 | sendMessage,
18 | sendStatusMessage,
19 | update_all_messages)
20 |
21 |
22 | @ratelimiter
23 | def mirror_status(update, context):
24 | with download_dict_lock:
25 | count = len(download_dict)
26 | if count == 0:
27 | currentTime = get_readable_time(time() - botStartTime)
28 | free = get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)
29 | message = 'No Active Downloads !\n___________________________'
30 | message += f"\nCPU: {cpu_percent()}% | FREE: {free}" \
31 | f"\nRAM: {virtual_memory().percent}% | UPTIME: {currentTime}"
32 | reply_message = sendMessage(message, context.bot, update.message)
33 | Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
34 | else:
35 | sendStatusMessage(update.message, context.bot)
36 | deleteMessage(context.bot, update.message)
37 | with status_reply_dict_lock:
38 | if Interval:
39 | Interval[0].cancel()
40 | Interval.clear()
41 | Interval.append(setInterval(config_dict['DOWNLOAD_STATUS_UPDATE_INTERVAL'], update_all_messages))
42 |
43 | @new_thread
44 | @ratelimiter
45 | def status_pages(update, context):
46 | query = update.callback_query
47 | query.answer()
48 | data = query.data
49 | data = data.split()
50 | if data[1] == "ref":
51 | update_all_messages(True)
52 | return
53 | done = turn(data)
54 | if not done:
55 | query.message.delete()
56 |
57 |
58 | mirror_status_handler = CommandHandler(BotCommands.StatusCommand, mirror_status,
59 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
60 | status_pages_handler = CallbackQueryHandler(status_pages, pattern="status")
61 |
62 | dispatcher.add_handler(mirror_status_handler)
63 | dispatcher.add_handler(status_pages_handler)
64 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/bot_commands.py:
--------------------------------------------------------------------------------
1 | from bot import CMD_SUFFIX
2 |
3 |
4 | class _BotCommands:
5 | def __init__(self):
6 | self.StartCommand = f'start{CMD_SUFFIX}'
7 | self.MirrorCommand = (f'mirror{CMD_SUFFIX}', f'm{CMD_SUFFIX}')
8 | self.UnzipMirrorCommand = (f'unzipmirror{CMD_SUFFIX}', f'uzm{CMD_SUFFIX}')
9 | self.ZipMirrorCommand = (f'zipmirror{CMD_SUFFIX}', f'zm{CMD_SUFFIX}')
10 | self.QbMirrorCommand = (f'qbmirror{CMD_SUFFIX}', f'qm{CMD_SUFFIX}')
11 | self.QbUnzipMirrorCommand = (f'qbunzipmirror{CMD_SUFFIX}', f'quzm{CMD_SUFFIX}')
12 | self.QbZipMirrorCommand = (f'qbzipmirror{CMD_SUFFIX}', f'qzm{CMD_SUFFIX}')
13 | self.YtdlCommand = (f'ytdl{CMD_SUFFIX}', f'y{CMD_SUFFIX}')
14 | self.YtdlZipCommand = (f'ytdlzip{CMD_SUFFIX}', f'yz{CMD_SUFFIX}')
15 | self.LeechCommand = (f'leech{CMD_SUFFIX}', f'l{CMD_SUFFIX}')
16 | self.UnzipLeechCommand = (f'unzipleech{CMD_SUFFIX}', f'uzl{CMD_SUFFIX}')
17 | self.ZipLeechCommand = (f'zipleech{CMD_SUFFIX}', f'zl{CMD_SUFFIX}')
18 | self.QbLeechCommand = (f'qbleech{CMD_SUFFIX}', f'ql{CMD_SUFFIX}')
19 | self.QbUnzipLeechCommand = (f'qbunzipleech{CMD_SUFFIX}', f'quzl{CMD_SUFFIX}')
20 | self.QbZipLeechCommand = (f'qbzipleech{CMD_SUFFIX}', f'qzl{CMD_SUFFIX}')
21 | self.YtdlLeechCommand = (f'ytdlleech{CMD_SUFFIX}', f'yl{CMD_SUFFIX}')
22 | self.YtdlZipLeechCommand = (f'ytdlzipleech{CMD_SUFFIX}', f'yzl{CMD_SUFFIX}')
23 | self.CloneCommand = f'clone{CMD_SUFFIX}'
24 | self.CountCommand = f'count{CMD_SUFFIX}'
25 | self.DeleteCommand = f'del{CMD_SUFFIX}'
26 | self.CancelMirror = f'cancel{CMD_SUFFIX}'
27 | self.CancelAllCommand = (f'cancelall{CMD_SUFFIX}', 'cancelallbot')
28 | self.ListCommand = f'list{CMD_SUFFIX}'
29 | self.SearchCommand = f'search{CMD_SUFFIX}'
30 | self.StatusCommand = (f'status{CMD_SUFFIX}', f's{CMD_SUFFIX}')
31 | self.UsersCommand = f'users{CMD_SUFFIX}'
32 | self.AuthorizeCommand = f'authorize{CMD_SUFFIX}'
33 | self.UnAuthorizeCommand = f'unauthorize{CMD_SUFFIX}'
34 | self.AddSudoCommand = f'addsudo{CMD_SUFFIX}'
35 | self.RmSudoCommand = f'rmsudo{CMD_SUFFIX}'
36 | self.PingCommand = ('ping','p')
37 | self.RestartCommand = (f'restart{CMD_SUFFIX}', 'restartall')
38 | self.StatsCommand = f'stats{CMD_SUFFIX}'
39 | self.HelpCommand = f'help{CMD_SUFFIX}'
40 | self.LogCommand = f'log{CMD_SUFFIX}'
41 | self.ShellCommand = f'shell{CMD_SUFFIX}'
42 | self.EvalCommand = f'eval{CMD_SUFFIX}'
43 | self.ExecCommand = f'exec{CMD_SUFFIX}'
44 | self.ClearLocalsCommand = f'clearlocals{CMD_SUFFIX}'
45 | self.BotSetCommand = f'bsetting{CMD_SUFFIX}'
46 | self.UserSetCommand = f'usetting{CMD_SUFFIX}'
47 | self.BtSelectCommand = f'btsel{CMD_SUFFIX}'
48 | self.CategorySelect = f'catsel{CMD_SUFFIX}'
49 | self.RssListCommand = (f'rsslist{CMD_SUFFIX}', f'rl{CMD_SUFFIX}')
50 | self.RssGetCommand = (f'rssget{CMD_SUFFIX}', f'rg{CMD_SUFFIX}')
51 | self.RssSubCommand = (f'rsssub{CMD_SUFFIX}', f'rs{CMD_SUFFIX}')
52 | self.RssUnSubCommand = (f'rssunsub{CMD_SUFFIX}', f'rus{CMD_SUFFIX}')
53 | self.RssSettingsCommand = (f'rssset{CMD_SUFFIX}', f'rst{CMD_SUFFIX}')
54 | self.RmdbCommand = f'rmdb{CMD_SUFFIX}'
55 |
56 | BotCommands = _BotCommands()
57 |
--------------------------------------------------------------------------------
/add_to_team_drive.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | from google.oauth2.service_account import Credentials
3 | import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time
4 | from google_auth_oauthlib.flow import InstalledAppFlow
5 | from google.auth.transport.requests import Request
6 | import os, pickle
7 |
8 | stt = time.time()
9 |
10 | parse = argparse.ArgumentParser(
11 | description='A tool to add service accounts to a shared drive from a folder containing credential files.')
12 | parse.add_argument('--path', '-p', default='accounts',
13 | help='Specify an alternative path to the service accounts folder.')
14 | parse.add_argument('--credentials', '-c', default='./credentials.json',
15 | help='Specify the relative path for the credentials file.')
16 | parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.')
17 | parsereq = parse.add_argument_group('required arguments')
18 | parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True)
19 |
20 | args = parse.parse_args()
21 | acc_dir = args.path
22 | did = args.drive_id
23 | credentials = glob.glob(args.credentials)
24 |
25 | try:
26 | open(credentials[0], 'r')
27 | print('>> Found credentials.')
28 | except IndexError:
29 | print('>> No credentials found.')
30 | sys.exit(0)
31 |
32 | if not args.yes:
33 | # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
34 | # credentials[0],'r').read()))['installed']['client_id'])
35 | input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
36 | '(shared drive) as Manager\n>> (Press any key to continue)')
37 |
38 | creds = None
39 | if os.path.exists('token_sa.pickle'):
40 | with open('token_sa.pickle', 'rb') as token:
41 | creds = pickle.load(token)
42 | # If there are no (valid) credentials available, let the user log in.
43 | if not creds or not creds.valid:
44 | if creds and creds.expired and creds.refresh_token:
45 | creds.refresh(Request())
46 | else:
47 | flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
48 | 'https://www.googleapis.com/auth/admin.directory.group',
49 | 'https://www.googleapis.com/auth/admin.directory.group.member'
50 | ])
51 | # creds = flow.run_local_server(port=0)
52 | creds = flow.run_console()
53 | # Save the credentials for the next run
54 | with open('token_sa.pickle', 'wb') as token:
55 | pickle.dump(creds, token)
56 |
57 | drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
58 | batch = drive.new_batch_http_request()
59 |
60 | aa = glob.glob('%s/*.json' % acc_dir)
61 | pbar = progress.bar.Bar("Readying accounts", max=len(aa))
62 | for i in aa:
63 | ce = json.loads(open(i, 'r').read())['client_email']
64 | batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
65 | "role": "organizer",
66 | "type": "user",
67 | "emailAddress": ce
68 | }))
69 | pbar.next()
70 | pbar.finish()
71 | print('Adding...')
72 | batch.execute()
73 |
74 | print('Complete.')
75 | hours, rem = divmod((time.time() - stt), 3600)
76 | minutes, sec = divmod(rem, 60)
77 | print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
78 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/telegraph_helper.py:
--------------------------------------------------------------------------------
1 | from string import ascii_letters
2 | from random import SystemRandom
3 | from time import sleep
4 | from telegraph import Telegraph
5 | from telegraph.exceptions import RetryAfterError
6 |
7 | from bot import LOGGER
8 |
9 |
10 | class TelegraphHelper:
11 | def __init__(self, author_name=None, author_url=None):
12 | self.telegraph = Telegraph(domain='graph.org')
13 | self.short_name = ''.join(SystemRandom().choices(ascii_letters, k=8))
14 | self.access_token = None
15 | self.author_name = author_name
16 | self.author_url = author_url
17 | self.create_account()
18 |
19 | def create_account(self):
20 | self.telegraph.create_account(
21 | short_name=self.short_name,
22 | author_name=self.author_name,
23 | author_url=self.author_url
24 | )
25 | self.access_token = self.telegraph.get_access_token()
26 | LOGGER.info("Creating Telegraph Account")
27 |
28 | def create_page(self, title, content):
29 | try:
30 | return self.telegraph.create_page(
31 | title = title,
32 | author_name=self.author_name,
33 | author_url=self.author_url,
34 | html_content=content
35 | )
36 | except RetryAfterError as st:
37 | LOGGER.warning(f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.')
38 | sleep(st.retry_after)
39 | return self.create_page(title, content)
40 |
41 | def edit_page(self, path, title, content):
42 | try:
43 | return self.telegraph.edit_page(
44 | path = path,
45 | title = title,
46 | author_name=self.author_name,
47 | author_url=self.author_url,
48 | html_content=content
49 | )
50 | except RetryAfterError as st:
51 | LOGGER.warning(f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.')
52 | sleep(st.retry_after)
53 | return self.edit_page(path, title, content)
54 |
55 | def edit_telegraph(self, path, telegraph_content):
56 | nxt_page = 1
57 | prev_page = 0
58 | num_of_path = len(path)
59 | for content in telegraph_content :
60 | if nxt_page == 1 :
61 | content += f'Next'
62 | nxt_page += 1
63 | else :
64 | if prev_page <= num_of_path:
65 | content += f'Prev'
66 | prev_page += 1
67 | if nxt_page < num_of_path:
68 | content += f' | Next'
69 | nxt_page += 1
70 | self.edit_page(
71 | path = path[prev_page],
72 | title = 'Z Torrent Search',
73 | content=content
74 | )
75 | return
76 |
77 | def revoke_access_token(self):
78 | LOGGER.info('Revoking telegraph access token...')
79 | try:
80 | return self.telegraph.revoke_access_token()
81 | except Exception as e:
82 | LOGGER.error(f'Failed Revoking telegraph access token due to : {e}')
83 |
84 | try:
85 | telegraph=TelegraphHelper('Z', 'https://github.com/Dawn-India/Z-Mirror')
86 | except Exception as err:
87 | LOGGER.warning(f"Can't Create Telegraph Account: {err}")
88 | telegraph = None
89 | pass
90 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/queued_starter.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 | from bot import (config_dict, non_queued_dl, non_queued_up, queue_dict_lock,
4 | queued_dl, queued_up)
5 | from bot.helper.mirror_utils.download_utils.gd_downloader import add_gd_download
6 | from bot.helper.mirror_utils.download_utils.mega_downloader import add_mega_download
7 | from bot.helper.mirror_utils.download_utils.telegram_downloader import TelegramDownloadHelper
8 | from bot.helper.mirror_utils.download_utils.yt_dlp_download_helper import YoutubeDLHelper
9 |
10 |
11 | def start_dl_from_queued(uid):
12 | dl = queued_dl[uid]
13 | if dl[0] == 'gd':
14 | Thread(target=add_gd_download, args=(dl[1], dl[2], dl[3], dl[4], True)).start()
15 | elif dl[0] == 'mega':
16 | Thread(target=add_mega_download, args=(dl[1], dl[2], dl[3], dl[4], True)).start()
17 | elif dl[0] == 'yt':
18 | ydl = YoutubeDLHelper(dl[7])
19 | Thread(target=ydl.add_download, args=(dl[1], dl[2], dl[3], dl[4], dl[5], dl[6], True)).start()
20 | elif dl[0] == 'tg':
21 | tg = TelegramDownloadHelper(dl[4])
22 | Thread(target=tg.add_download, args=(dl[1], dl[2], dl[3], True)).start()
23 | del queued_dl[uid]
24 |
25 | def start_up_from_queued(uid):
26 | up = queued_up[uid]
27 | up[0].queuedUp = False
28 | del queued_up[uid]
29 |
30 | def start_from_queued():
31 | if all_limit := config_dict['QUEUE_ALL']:
32 | dl_limit = config_dict['QUEUE_DOWNLOAD']
33 | up_limit = config_dict['QUEUE_UPLOAD']
34 | with queue_dict_lock:
35 | dl = len(non_queued_dl)
36 | up = len(non_queued_up)
37 | all_ = dl + up
38 | if all_ < all_limit:
39 | f_tasks = all_limit - all_
40 | if queued_up and (not up_limit or up < up_limit):
41 | for index, uid in enumerate(list(queued_up.keys()), start=1):
42 | f_tasks = all_limit - all_
43 | start_up_from_queued(uid)
44 | f_tasks -= 1
45 | if f_tasks == 0 or (up_limit and index >= up_limit - up):
46 | break
47 | if queued_dl and (not dl_limit or dl < dl_limit) and f_tasks != 0:
48 | for index, uid in enumerate(list(queued_dl.keys()), start=1):
49 | start_dl_from_queued(uid)
50 | if (dl_limit and index >= dl_limit - dl) or index == f_tasks:
51 | break
52 | return
53 |
54 | if up_limit := config_dict['QUEUE_UPLOAD']:
55 | with queue_dict_lock:
56 | up = len(non_queued_up)
57 | if queued_up and up < up_limit:
58 | f_tasks = up_limit - up
59 | for index, uid in enumerate(list(queued_up.keys()), start=1):
60 | start_up_from_queued(uid)
61 | if index == f_tasks:
62 | break
63 | else:
64 | with queue_dict_lock:
65 | if queued_up:
66 | for uid in list(queued_up.keys()):
67 | start_up_from_queued(uid)
68 |
69 | if dl_limit := config_dict['QUEUE_DOWNLOAD']:
70 | with queue_dict_lock:
71 | dl = len(non_queued_dl)
72 | if queued_dl and dl < dl_limit:
73 | f_tasks = dl_limit - dl
74 | for index, uid in enumerate(list(queued_dl.keys()), start=1):
75 | start_dl_from_queued(uid)
76 | if index == f_tasks:
77 | break
78 | else:
79 | with queue_dict_lock:
80 | if queued_dl:
81 | for uid in list(queued_dl.keys()):
82 | start_dl_from_queued(uid)
83 |
--------------------------------------------------------------------------------
/bot/modules/delete.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 | from telegram.ext import CommandHandler
4 |
5 | from bot import LOGGER, app, dispatcher
6 | from bot.helper.ext_utils.bot_utils import is_gdrive_link
7 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.message_utils import (auto_delete_message,
11 | editMessage, sendMessage)
12 |
13 |
14 | def deletefile(update, context):
15 | reply_to = update.message.reply_to_message
16 | if len(context.args) == 1:
17 | link = context.args[0].strip()
18 | elif reply_to:
19 | link = reply_to.text.split(maxsplit=1)[0].strip()
20 | else:
21 | link = ''
22 | if is_gdrive_link(link):
23 | LOGGER.info(link)
24 | drive = GoogleDriveHelper()
25 | msg = drive.deletefile(link)
26 | else:
27 | msg = 'Send Gdrive link along with command or by replying to the link by command'
28 | reply_message = sendMessage(msg, context.bot, update.message)
29 | Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
30 |
31 | delete = set()
32 |
33 | def delete_leech(update, context):
34 | reply_to = update.message.reply_to_message
35 | if len(context.args) == 1:
36 | link = context.args[0].strip()
37 | elif reply_to:
38 | link = reply_to.text.split(maxsplit=1)[0].strip()
39 | else:
40 | link = ''
41 | if not link.startswith('https://t.me/'):
42 | msg = 'Send telegram message link along with command or by replying to the link by command'
43 | return sendMessage(msg, context.bot, update.message)
44 | if len(delete) != 0:
45 | msg = 'Already deleting in progress'
46 | return sendMessage(msg, context.bot, update.message)
47 | msg = f'Okay deleting all replies with {link}'
48 | link = link.split('/')
49 | message_id = int(link[-1])
50 | chat_id = link[-2]
51 | if chat_id.isdigit():
52 | chat_id = f'-100{chat_id}'
53 | chat_id = int(chat_id)
54 | reply_message = sendMessage(msg, context.bot, update.message)
55 | Thread(target=deleting, args=(chat_id, message_id, reply_message)).start()
56 |
57 |
58 | def deleting(chat_id, message_id, message):
59 | delete.add(message_id)
60 | try:
61 | msg = app.get_messages(chat_id, message_id, replies=-1)
62 | replies_ids = []
63 | while msg:
64 | replies_ids.append(msg.id)
65 | if msg.media_group_id:
66 | media_group = msg.get_media_group()
67 | media_ids = []
68 | for media in media_group:
69 | media_ids.append(media.id)
70 | msg = media.reply_to_message
71 | if not msg:
72 | msg = app.get_messages(chat_id, media.reply_to_message_id, replies=-1)
73 | replies_ids.extend(media_ids)
74 | else:
75 | msg = msg.reply_to_message
76 | replies_ids = list(set(replies_ids))
77 | deleted = app.delete_messages(chat_id, replies_ids)
78 | editMessage(f'{deleted} message deleted', message)
79 | except Exception as e:
80 | editMessage(str(e), message)
81 | delete.remove(message_id)
82 |
83 | delete_handler = CommandHandler(BotCommands.DeleteCommand, deletefile,
84 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user)
85 | dispatcher.add_handler(delete_handler)
86 |
87 | leech_delete_handler = CommandHandler(f'leech{BotCommands.DeleteCommand}', delete_leech,
88 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user)
89 | dispatcher.add_handler(leech_delete_handler)
--------------------------------------------------------------------------------
/bot/modules/authorize.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 |
3 | from bot import DATABASE_URL, dispatcher, user_data
4 | from bot.helper.ext_utils.bot_utils import update_user_ldata
5 | from bot.helper.ext_utils.db_handler import DbManger
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 | from bot.helper.telegram_helper.message_utils import sendMessage
9 |
10 |
11 | def authorize(update, context):
12 | reply_message = update.message.reply_to_message
13 | if len(context.args) == 1:
14 | id_ = int(context.args[0])
15 | elif reply_message:
16 | id_ = reply_message.from_user.id
17 | else:
18 | id_ = update.effective_chat.id
19 | if id_ in user_data and user_data[id_].get('is_auth'):
20 | msg = 'Already Authorized!'
21 | else:
22 | update_user_ldata(id_, 'is_auth', True)
23 | if DATABASE_URL:
24 | DbManger().update_user_data(id_)
25 | msg = 'Authorized'
26 | sendMessage(msg, context.bot, update.message)
27 |
28 | def unauthorize(update, context):
29 | reply_message = update.message.reply_to_message
30 | if len(context.args) == 1:
31 | id_ = int(context.args[0])
32 | elif reply_message:
33 | id_ = reply_message.from_user.id
34 | else:
35 | id_ = update.effective_chat.id
36 | if id_ not in user_data or user_data[id_].get('is_auth'):
37 | update_user_ldata(id_, 'is_auth', False)
38 | if DATABASE_URL:
39 | DbManger().update_user_data(id_)
40 | msg = 'Unauthorized'
41 | else:
42 | msg = 'Already Unauthorized!'
43 | sendMessage(msg, context.bot, update.message)
44 |
45 | def addSudo(update, context):
46 | id_ = ""
47 | reply_message = update.message.reply_to_message
48 | if len(context.args) == 1:
49 | id_ = int(context.args[0])
50 | elif reply_message:
51 | id_ = reply_message.from_user.id
52 | if id_:
53 | if id_ in user_data and user_data[id_].get('is_sudo'):
54 | msg = 'Already Sudo!'
55 | else:
56 | update_user_ldata(id_, 'is_sudo', True)
57 | if DATABASE_URL:
58 | DbManger().update_user_data(id_)
59 | msg = 'Promoted as Sudo'
60 | else:
61 | msg = "Give ID or Reply To message of whom you want to Promote."
62 | sendMessage(msg, context.bot, update.message)
63 |
64 | def removeSudo(update, context):
65 | id_ = ""
66 | reply_message = update.message.reply_to_message
67 | if len(context.args) == 1:
68 | id_ = int(context.args[0])
69 | elif reply_message:
70 | id_ = reply_message.from_user.id
71 | if id_ and id_ not in user_data or user_data[id_].get('is_sudo'):
72 | update_user_ldata(id_, 'is_sudo', False)
73 | if DATABASE_URL:
74 | DbManger().update_user_data(id_)
75 | msg = 'Demoted'
76 | else:
77 | msg = "Give ID or Reply To message of whom you want to remove from Sudo"
78 | sendMessage(msg, context.bot, update.message)
79 |
80 |
81 | authorize_handler = CommandHandler(BotCommands.AuthorizeCommand, authorize,
82 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user)
83 | unauthorize_handler = CommandHandler(BotCommands.UnAuthorizeCommand, unauthorize,
84 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user)
85 | addsudo_handler = CommandHandler(BotCommands.AddSudoCommand, addSudo,
86 | filters=CustomFilters.owner_filter)
87 | removesudo_handler = CommandHandler(BotCommands.RmSudoCommand, removeSudo,
88 | filters=CustomFilters.owner_filter)
89 |
90 | dispatcher.add_handler(authorize_handler)
91 | dispatcher.add_handler(unauthorize_handler)
92 | dispatcher.add_handler(addsudo_handler)
93 | dispatcher.add_handler(removesudo_handler)
94 |
--------------------------------------------------------------------------------
/bot/modules/eval.py:
--------------------------------------------------------------------------------
1 | from contextlib import redirect_stdout
2 | from io import BytesIO, StringIO
3 | from os import chdir, getcwd, path
4 | from textwrap import indent
5 | from traceback import format_exc
6 |
7 | from telegram.ext import CommandHandler
8 |
9 | from bot import LOGGER, dispatcher
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.telegram_helper.filters import CustomFilters
12 |
13 | namespaces = {}
14 |
15 | def namespace_of(chat, update, bot):
16 | if chat not in namespaces:
17 | namespaces[chat] = {
18 | '__builtins__': globals()['__builtins__'],
19 | 'bot': bot,
20 | 'effective_message': update.effective_message,
21 | 'effective_user': update.effective_user,
22 | 'effective_chat': update.effective_chat,
23 | 'update': update
24 | }
25 |
26 | return namespaces[chat]
27 |
28 | def log_input(update):
29 | user = update.effective_user.id
30 | chat = update.effective_chat.id
31 | LOGGER.info(
32 | f"IN: {update.effective_message.text} (user={user}, chat={chat})")
33 |
34 | def send(msg, bot, update):
35 | if len(str(msg)) > 2000:
36 | with BytesIO(str.encode(msg)) as out_file:
37 | out_file.name = "output.txt"
38 | bot.send_document(
39 | chat_id=update.effective_chat.id, document=out_file)
40 | else:
41 | LOGGER.info(f"OUT: '{msg}'")
42 | bot.send_message(
43 | chat_id=update.effective_chat.id,
44 | text=f"`{msg}`",
45 | parse_mode='Markdown')
46 |
47 | def evaluate(update, context):
48 | bot = context.bot
49 | send(do(eval, bot, update), bot, update)
50 |
51 | def execute(update, context):
52 | bot = context.bot
53 | send(do(exec, bot, update), bot, update)
54 |
55 | def cleanup_code(code):
56 | if code.startswith('```') and code.endswith('```'):
57 | return '\n'.join(code.split('\n')[1:-1])
58 | return code.strip('` \n')
59 |
60 | def do(func, bot, update):
61 | log_input(update)
62 | content = update.message.text.split(maxsplit=1)[-1]
63 | body = cleanup_code(content)
64 | env = namespace_of(update.message.chat_id, update, bot)
65 |
66 | chdir(getcwd())
67 | with open(
68 | path.join(getcwd(),
69 | 'bot/modules/temp.txt'),
70 | 'w') as temp:
71 | temp.write(body)
72 |
73 | stdout = StringIO()
74 |
75 | to_compile = f'def func():\n{indent(body, " ")}'
76 |
77 | try:
78 | exec(to_compile, env)
79 | except Exception as e:
80 | return f'{e.__class__.__name__}: {e}'
81 |
82 | func = env['func']
83 |
84 | try:
85 | with redirect_stdout(stdout):
86 | func_return = func()
87 | except Exception as e:
88 | value = stdout.getvalue()
89 | return f'{value}{format_exc()}'
90 | else:
91 | value = stdout.getvalue()
92 | result = None
93 | if func_return is None:
94 | if value:
95 | result = f'{value}'
96 | else:
97 | try:
98 | result = f'{repr(eval(body, env))}'
99 | except:
100 | pass
101 | else:
102 | result = f'{value}{func_return}'
103 | if result:
104 | return result
105 |
106 | def clear(update, context):
107 | bot = context.bot
108 | log_input(update)
109 | global namespaces
110 | if update.message.chat_id in namespaces:
111 | del namespaces[update.message.chat_id]
112 | send("Cleared locals.", bot, update)
113 |
114 | eval_handler = CommandHandler(BotCommands.EvalCommand, evaluate, filters=CustomFilters.owner_filter)
115 | exec_handler = CommandHandler(BotCommands.ExecCommand, execute, filters=CustomFilters.owner_filter)
116 | clear_handler = CommandHandler(BotCommands.ClearLocalsCommand, clear, filters=CustomFilters.owner_filter)
117 |
118 | dispatcher.add_handler(eval_handler)
119 | dispatcher.add_handler(exec_handler)
120 | dispatcher.add_handler(clear_handler)
121 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/html_helper.py:
--------------------------------------------------------------------------------
1 | hmtl_content = """
2 |
3 |
{gid} Not Found.", context.bot, message)
55 | return
56 | if reply_to := message.reply_to_message:
57 | with download_dict_lock:
58 | dl = download_dict.get(reply_to.message_id, None)
59 | if not dl:
60 | sendMessage("This is not an active task!", context.bot, message)
61 | return
62 | if not dl:
63 | msg = """
64 | Reply to an active /{cmd} which was used to start the download or add gid along with {cmd}
65 | This command mainly for change category incase you decided to change category from already added download.
66 | But you can always use /{mir} with to select category before download start.
67 |
68 | Upload Custom Drive
69 | /{cmd} id:drive_folder_link or drive_id index:https://anything.in/0: gid or by replying to active download
70 | drive_id must be folder id and index must be url else it will not accept
71 | """.format_map({'cmd': BotCommands.CategorySelect,'mir': BotCommands.MirrorCommand[0]})
72 | sendMessage(msg, context.bot, message)
73 | return
74 | if not CustomFilters.owner_query(user_id) and dl.message.from_user.id != user_id:
75 | sendMessage("This task is not for you!", context.bot, message)
76 | return
77 | if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]:
78 | sendMessage(f'Task should be on {MirrorStatus.STATUS_DOWNLOADING} or {MirrorStatus.STATUS_PAUSED} or {MirrorStatus.STATUS_QUEUEDL}', context.bot, message)
79 | return
80 | listener = dl.listener() if dl and hasattr(dl, 'listener') else None
81 | if listener and not listener.isLeech:
82 | if not index_link and not drive_id and categories:
83 | drive_id, index_link = open_category_btns(message)
84 | if not index_link and not drive_id:
85 | return sendMessage("Time out", context.bot, message)
86 | msg = 'Task has been Updated Successfully!'
87 | if drive_id:
88 | if not (folder_name:= GoogleDriveHelper().getFolderData(drive_id)):
89 | return sendMessage("Google Drive id validation failed!!", context.bot, message)
90 | if listener.drive_id and listener.drive_id == drive_id:
91 | msg +=f'\n\nFolder name : {folder_name} Already selected'
92 | else:
93 | msg +=f'\n\nFolder name : {folder_name}'
94 | listener.drive_id = drive_id
95 | if index_link:
96 | listener.index_link = index_link
97 | msg +=f'\n\nIndex Link : {index_link}'
98 | return sendMessage(msg, context.bot, message)
99 | else:
100 | sendMessage("Can not change Category for this task!", context.bot, message)
101 |
102 | @ratelimiter
103 | def confirm_category(update, context):
104 | query = update.callback_query
105 | user_id = query.from_user.id
106 | data = query.data
107 | data = data.split(maxsplit=3)
108 | msg_id = int(data[2])
109 | if msg_id not in btn_listener:
110 | return editMessage('Old Task', query.message)
111 | if user_id != int(data[1]) and not CustomFilters.owner_query(user_id):
112 | return query.answer(text="This task is not for you!", show_alert=True)
113 | query.answer()
114 | btn_listener[msg_id][1] = categories[data[3]].get('drive_id')
115 | btn_listener[msg_id][2] = categories[data[3]].get('index_link')
116 | btn_listener[msg_id][0] = False
117 |
118 |
119 | confirm_category_handler = CallbackQueryHandler(confirm_category, pattern="scat")
120 | change_category_handler = CommandHandler(BotCommands.CategorySelect, change_category,
121 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user))
122 | dispatcher.add_handler(confirm_category_handler)
123 | dispatcher.add_handler(change_category_handler)
124 |
--------------------------------------------------------------------------------
/bot/modules/bt_select.py:
--------------------------------------------------------------------------------
1 | from os import path, remove
2 |
3 | from telegram.ext import CallbackQueryHandler, CommandHandler
4 |
5 | from bot import LOGGER, aria2, dispatcher, download_dict, download_dict_lock
6 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, bt_selection_buttons,
7 | getDownloadByGid)
8 | from bot.helper.ext_utils.rate_limiter import ratelimiter
9 | from bot.helper.telegram_helper.bot_commands import BotCommands
10 | from bot.helper.telegram_helper.filters import CustomFilters
11 | from bot.helper.telegram_helper.message_utils import (anno_checker,
12 | sendMessage,
13 | sendStatusMessage)
14 |
15 |
16 | @ratelimiter
17 | def select(update, context):
18 | message = update.message
19 | if message.from_user.id in [1087968824, 136817688]:
20 | message.from_user.id = anno_checker(message)
21 | if not message.from_user.id:
22 | return
23 | user_id = message.from_user.id
24 | if len(context.args) == 1:
25 | gid = context.args[0]
26 | dl = getDownloadByGid(gid)
27 | if not dl:
28 | sendMessage(f"GID: {gid} Not Found.", context.bot, message)
29 | return
30 | elif message.reply_to_message:
31 | mirror_message = message.reply_to_message
32 | with download_dict_lock:
33 | if mirror_message.message_id in download_dict:
34 | dl = download_dict[mirror_message.message_id]
35 | else:
36 | dl = None
37 | if not dl:
38 | sendMessage("This is not an active task!", context.bot, message)
39 | return
40 | elif len(context.args) == 0:
41 | msg = "Reply to an active /{cmd} which was used to start the qb-download or add gid along with {cmd}\n\n" \
42 | "This command mainly for selection incase you decided to select files from already added torrent. " \
43 | "But you can always use /{mir} with arg `s` to select files before download start."
44 | sendMessage(msg.format_map({'cmd': BotCommands.BtSelectCommand,'mir': BotCommands.MirrorCommand[0]}), context.bot, message)
45 | return
46 |
47 | if not CustomFilters.owner_query(user_id) and dl.message.from_user.id != user_id:
48 | sendMessage("This task is not for you!", context.bot, message)
49 | return
50 | if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]:
51 | sendMessage('Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!', context.bot, message)
52 | return
53 | if dl.name().startswith('[METADATA]'):
54 | sendMessage('Try after downloading metadata finished!', context.bot, message)
55 | return
56 |
57 | try:
58 | listener = dl.listener()
59 | if listener.isQbit:
60 | id_ = dl.hash()
61 | client = dl.client()
62 | client.torrents_pause(torrent_hashes=id_)
63 | else:
64 | id_ = dl.gid()
65 | try:
66 | aria2.client.force_pause(id_)
67 | except Exception as e:
68 | LOGGER.error(f"{e} Error in pause, this mostly happens after abuse aria2")
69 | listener.select = True
70 | except:
71 | sendMessage("This is not a bittorrent task!", context.bot, message)
72 | return
73 | SBUTTONS = bt_selection_buttons(id_, False)
74 | msg = f"Name: {dl.name()}\n\nYour download paused. Choose files then press Done Selecting button to resume downloading." \
75 | "\nYour download will not start automatically"
76 | sendMessage(msg, context.bot, message, SBUTTONS)
77 |
78 | @ratelimiter
79 | def get_confirm(update, context):
80 | query = update.callback_query
81 | user_id = query.from_user.id
82 | data = query.data
83 | data = data.split()
84 | dl = getDownloadByGid(data[2])
85 | if not dl:
86 | query.answer(text="This task has been cancelled!", show_alert=True)
87 | query.message.delete()
88 | return
89 | if hasattr(dl, 'listener'):
90 | listener = dl.listener()
91 | else:
92 | query.answer(text="Not in download state anymore! Keep this message to resume the seed if seed enabled!", show_alert=True)
93 | return
94 | if user_id != listener.message.from_user.id and not CustomFilters.owner_query(user_id):
95 | query.answer(text="This task is not for you!", show_alert=True)
96 | elif data[1] == "pin":
97 | query.answer(text=data[3], show_alert=True)
98 | elif data[1] == "done":
99 | query.answer()
100 | id_ = data[3]
101 | if len(id_) > 20:
102 | client = dl.client()
103 | tor_info = client.torrents_info(torrent_hash=id_)[0]
104 | path_ = tor_info.content_path.rsplit('/', 1)[0]
105 | res = client.torrents_files(torrent_hash=id_)
106 | for f in res:
107 | if f.priority == 0:
108 | f_paths = [f"{path_}/{f.name}", f"{path_}/{f.name}.!qB"]
109 | for f_path in f_paths:
110 | if path.exists(f_path):
111 | try:
112 | remove(f_path)
113 | except:
114 | pass
115 | client.torrents_resume(torrent_hashes=id_)
116 | else:
117 | res = aria2.client.get_files(id_)
118 | for f in res:
119 | if f['selected'] == 'false' and path.exists(f['path']):
120 | try:
121 | remove(f['path'])
122 | except:
123 | pass
124 | try:
125 | aria2.client.unpause(id_)
126 | except Exception as e:
127 | LOGGER.error(f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!")
128 | sendStatusMessage(listener.message, listener.bot)
129 | query.message.delete()
130 | elif data[1] == "rm":
131 | query.answer()
132 | dl.download().cancel_download()
133 | query.message.delete()
134 |
135 |
136 | select_handler = CommandHandler(BotCommands.BtSelectCommand, select,
137 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user))
138 | bts_handler = CallbackQueryHandler(get_confirm, pattern="btsel")
139 |
140 | dispatcher.add_handler(select_handler)
141 | dispatcher.add_handler(bts_handler)
142 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/telegram_downloader.py:
--------------------------------------------------------------------------------
1 | from logging import WARNING, getLogger
2 | from threading import Lock, RLock
3 | from time import time
4 |
5 | from bot import (LOGGER, app, config_dict, download_dict, download_dict_lock,
6 | non_queued_dl, non_queued_up, queue_dict_lock, queued_dl)
7 | from bot.helper.ext_utils.bot_utils import get_readable_file_size
8 | from bot.helper.ext_utils.fs_utils import check_storage_threshold
9 | from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
10 | from bot.helper.mirror_utils.status_utils.telegram_download_status import TelegramDownloadStatus
11 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
12 | from bot.helper.telegram_helper.message_utils import (sendMessage, delete_links,
13 | sendStatusMessage)
14 |
15 | global_lock = Lock()
16 | GLOBAL_GID = set()
17 | getLogger("pyrogram").setLevel(WARNING)
18 |
19 |
20 | class TelegramDownloadHelper:
21 |
22 | def __init__(self, listener):
23 | self.name = ""
24 | self.size = 0
25 | self.progress = 0
26 | self.downloaded_bytes = 0
27 | self.__start_time = time()
28 | self.__listener = listener
29 | self.__id = ""
30 | self.__is_cancelled = False
31 | self.__resource_lock = RLock()
32 |
33 | @property
34 | def download_speed(self):
35 | with self.__resource_lock:
36 | return self.downloaded_bytes / (time() - self.__start_time)
37 |
38 | def __onDownloadStart(self, name, size, file_id, from_queue):
39 | with global_lock:
40 | GLOBAL_GID.add(file_id)
41 | with self.__resource_lock:
42 | self.name = name
43 | self.size = size
44 | self.__id = file_id
45 | with download_dict_lock:
46 | download_dict[self.__listener.uid] = TelegramDownloadStatus(self, self.__listener, self.__id)
47 | with queue_dict_lock:
48 | non_queued_dl.add(self.__listener.uid)
49 | if not from_queue:
50 | self.__listener.onDownloadStart()
51 | sendStatusMessage(self.__listener.message, self.__listener.bot)
52 | LOGGER.info(f'Download from Telegram: {name}')
53 | else:
54 | LOGGER.info(f'Start Queued Download from Telegram: {name}')
55 |
56 | def __onDownloadProgress(self, current, total):
57 | if self.__is_cancelled:
58 | app.stop_transmission()
59 | return
60 | with self.__resource_lock:
61 | self.downloaded_bytes = current
62 | try:
63 | self.progress = current / self.size * 100
64 | except ZeroDivisionError:
65 | pass
66 |
67 | def __onDownloadError(self, error):
68 | with global_lock:
69 | try:
70 | GLOBAL_GID.remove(self.__id)
71 | except:
72 | pass
73 | self.__listener.onDownloadError(error)
74 |
75 | def __onDownloadComplete(self):
76 | with global_lock:
77 | GLOBAL_GID.remove(self.__id)
78 | self.__listener.onDownloadComplete()
79 |
80 | def __download(self, message, path):
81 | try:
82 | download = message.download(file_name=path, progress=self.__onDownloadProgress)
83 | if self.__is_cancelled:
84 | self.__onDownloadError('Cancelled by user!')
85 | return
86 | except Exception as e:
87 | LOGGER.error(str(e))
88 | return self.__onDownloadError(str(e))
89 | if download:
90 | self.__onDownloadComplete()
91 | elif not self.__is_cancelled:
92 | self.__onDownloadError('Internal error occurred')
93 |
94 | def add_download(self, message, path, filename, from_queue=False):
95 | _dmsg = app.get_messages(message.chat.id, reply_to_message_ids=message.message_id)
96 | media = _dmsg.document or _dmsg.video or _dmsg.audio or None
97 | if media:
98 | with global_lock:
99 | # For avoiding locking the thread lock for long time unnecessarily
100 | download = media.file_unique_id not in GLOBAL_GID
101 | if filename == "":
102 | name = media.file_name
103 | else:
104 | name = filename
105 | path = path + name
106 |
107 | if from_queue or download:
108 | size = media.file_size
109 | gid = media.file_unique_id
110 | if config_dict['STOP_DUPLICATE'] and not self.__listener.isLeech and not self.__listener.select:
111 | LOGGER.info('Checking File/Folder if already in Drive...')
112 | smsg, button = GoogleDriveHelper().drive_list(name, True, True)
113 | if smsg:
114 | delete_links(self.__listener.bot, self.__listener.message)
115 | msg = "File/Folder is already available in Drive.\nHere are the search results:"
116 | return sendMessage(msg, self.__listener.bot, self.__listener.message, button)
117 | if STORAGE_THRESHOLD:= config_dict['STORAGE_THRESHOLD']:
118 | limit = STORAGE_THRESHOLD * 1024**3
119 | arch = any([self.__listener.isZip, self.__listener.extract])
120 | acpt = check_storage_threshold(size, limit, arch)
121 | if not acpt:
122 | msg = f'You must leave {get_readable_file_size(limit)} free storage.'
123 | msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
124 | return sendMessage(msg, self.__listener.bot, self.__listener.message)
125 | all_limit = config_dict['QUEUE_ALL']
126 | dl_limit = config_dict['QUEUE_DOWNLOAD']
127 | if all_limit or dl_limit:
128 | added_to_queue = False
129 | with queue_dict_lock:
130 | dl = len(non_queued_dl)
131 | up = len(non_queued_up)
132 | if (all_limit and dl + up >= all_limit and (not dl_limit or dl >= dl_limit)) or (dl_limit and dl >= dl_limit):
133 | added_to_queue = True
134 | queued_dl[self.__listener.uid] = ['tg', message, path, filename, self.__listener]
135 | if added_to_queue:
136 | LOGGER.info(f"Added to Queue/Download: {name}")
137 | with download_dict_lock:
138 | download_dict[self.__listener.uid] = QueueStatus(name, size, gid, self.__listener, 'Dl')
139 | self.__listener.onDownloadStart()
140 | sendStatusMessage(self.__listener.message, self.__listener.bot)
141 | with global_lock:
142 | GLOBAL_GID.add(gid)
143 | return
144 | self.__onDownloadStart(name, size, gid, from_queue)
145 | self.__download(_dmsg, path)
146 | else:
147 | self.__onDownloadError('File already being downloaded!')
148 | else:
149 | self.__onDownloadError('No document in the replied message')
150 |
151 | def cancel_download(self):
152 | LOGGER.info(f'Cancelling download on user request: {self.__id}')
153 | self.__is_cancelled = True
154 |
--------------------------------------------------------------------------------
/bot/modules/cancel_mirror.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | from time import sleep
3 |
4 | from telegram.ext import CallbackQueryHandler, CommandHandler
5 |
6 | from bot import dispatcher, download_dict, download_dict_lock
7 | from bot.helper.ext_utils.bot_utils import (MirrorStatus, getAllDownload,
8 | getDownloadByGid)
9 | from bot.helper.ext_utils.rate_limiter import ratelimiter
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.telegram_helper.button_build import ButtonMaker
12 | from bot.helper.telegram_helper.filters import CustomFilters
13 | from bot.helper.telegram_helper.message_utils import (anno_checker,
14 | editMessage, sendMessage)
15 |
16 | @ratelimiter
17 | def cancel_mirror(update, context):
18 | message = update.message
19 | if message.from_user.id in [1087968824, 136817688]:
20 | message.from_user.id = anno_checker(message)
21 | if not message.from_user.id:
22 | return
23 | user_id = message.from_user.id
24 | if len(context.args) == 1:
25 | gid = context.args[0]
26 | dl = getDownloadByGid(gid)
27 | if not dl:
28 | sendMessage(f"GID: {gid} Not Found.", context.bot, message)
29 | return
30 | elif mirror_message:= message.reply_to_message:
31 | with download_dict_lock:
32 | dl = download_dict.get(mirror_message.message_id)
33 | if not dl:
34 | return sendMessage("This is not an active task!", context.bot, message)
35 | elif len(context.args) == 0:
36 | msg = f"Reply to an active Command message which was used to start the download" \
37 | f" or send /{BotCommands.CancelMirror} GID to cancel it!"
38 | return sendMessage(msg, context.bot, message)
39 |
40 | if not CustomFilters.owner_query(user_id) and dl.message.from_user.id != user_id:
41 | sendMessage("This task is not for you!", context.bot, message)
42 | return
43 |
44 | if dl.status() == MirrorStatus.STATUS_CONVERTING:
45 | sendMessage("Converting... Can't cancel this task!", context.bot, message)
46 | return
47 |
48 | dl.download().cancel_download()
49 |
50 | cancel_listener = {}
51 |
52 | def cancel_all(status, info):
53 | user_id = info[0]
54 | msg = info[1]
55 | umsg = info[2]
56 | editMessage(f"Canceling tasks for {user_id or 'All'} in {status}", msg)
57 | if dls:= getAllDownload(status, user_id, False):
58 | canceled = 0
59 | cant_cancel = 0
60 | for dl in dls:
61 | try:
62 | if dl.status() == MirrorStatus.STATUS_CONVERTING:
63 | cant_cancel += 1
64 | continue
65 | dl.download().cancel_download()
66 | canceled += 1
67 | sleep(1)
68 | except:
69 | cant_cancel += 1
70 | continue
71 | editMessage(f"Canceling tasks for {user_id or 'All'} in {status} canceled {canceled}/{len(dls)}", msg)
72 | sleep(1)
73 | if umsg.from_user.username:
74 | tag = f"@{umsg.from_user.username}"
75 | else:
76 | tag = umsg.from_user.mention_html()
77 | _msg = "Canceling task Done\n"
78 | _msg += f"Success: {canceled}\n"
79 | _msg += f"Faild: {cant_cancel}\n"
80 | _msg += f"Total: {len(dls)}\n"
81 | _msg += f"#cancel_all : {tag}"
82 | editMessage(_msg, msg)
83 | else:
84 | editMessage(f"{user_id} Don't have any active task!", msg)
85 |
86 | @ratelimiter
87 | def cancell_all_buttons(update, context):
88 | message = update.message
89 | with download_dict_lock:
90 | count = len(download_dict)
91 | if count == 0:
92 | return sendMessage("No active tasks!", context.bot, message)
93 | if message.from_user.id in [1087968824, 136817688]:
94 | message.from_user.id = anno_checker(message)
95 | if not message.from_user.id:
96 | return
97 | user_id = message.from_user.id
98 | if CustomFilters.owner_query(user_id):
99 | if reply_to:= message.reply_to_message:
100 | user_id = reply_to.from_user.id
101 | elif context.args and context.args[0].lower() == 'all':
102 | user_id = None
103 | elif context.args and context.args[0].isdigit():
104 | try:
105 | user_id = int(context.args[0])
106 | except:
107 | return sendMessage("Invalid Argument! Send Userid or reply", context.bot, message)
108 | if user_id and not getAllDownload('all', user_id):
109 | return sendMessage(f"{user_id} Don't have any active task!", context.bot, message)
110 | msg_id = message.message_id
111 | buttons = ButtonMaker()
112 | buttons.sbutton("Downloading", f"cnall {MirrorStatus.STATUS_DOWNLOADING} {msg_id}")
113 | buttons.sbutton("Uploading", f"cnall {MirrorStatus.STATUS_UPLOADING} {msg_id}")
114 | buttons.sbutton("Seeding", f"cnall {MirrorStatus.STATUS_SEEDING} {msg_id}")
115 | buttons.sbutton("Cloning", f"cnall {MirrorStatus.STATUS_CLONING} {msg_id}")
116 | buttons.sbutton("Extracting", f"cnall {MirrorStatus.STATUS_EXTRACTING} {msg_id}")
117 | buttons.sbutton("Archiving", f"cnall {MirrorStatus.STATUS_ARCHIVING} {msg_id}")
118 | buttons.sbutton("QueuedDl", f"canall {MirrorStatus.STATUS_QUEUEDL}")
119 | buttons.sbutton("QueuedUp", f"canall {MirrorStatus.STATUS_QUEUEUP}")
120 | buttons.sbutton("Splitting", f"cnall {MirrorStatus.STATUS_SPLITTING} {msg_id}")
121 | buttons.sbutton("All", f"cnall all {msg_id}")
122 | buttons.sbutton("Close", f"cnall close {msg_id}")
123 | bmgs = sendMessage('Choose tasks to cancel. You have 30 Secounds only', context.bot, message, buttons.build_menu(2))
124 | cancel_listener[msg_id] = [user_id, bmgs, message]
125 | Thread(target=_auto_cancel, args=(bmgs, msg_id)).start()
126 |
127 | def cancel_all_update(update, context):
128 | query = update.callback_query
129 | user_id = query.from_user.id
130 | data = query.data
131 | data = data.split()
132 | message = query.message
133 | msg_id = int(data[2])
134 | try:
135 | info = cancel_listener[msg_id]
136 | except:
137 | return editMessage("This is an old message", message)
138 | if info[0] and info[2].from_user.id != user_id:
139 | return query.answer(text="You are not allowed to do this!", show_alert=True)
140 | elif data[1] == 'close':
141 | query.answer()
142 | del cancel_listener[msg_id]
143 | return editMessage("Cancellation Listener Closed.", message)
144 | if info[0] and not getAllDownload(data[1], info[0]):
145 | return query.answer(text=f"You don't have any active task in {data[1]}", show_alert=True)
146 | query.answer()
147 | del cancel_listener[msg_id]
148 | Thread(target=cancel_all, args=(data[1], info)).start()
149 |
150 | def _auto_cancel(msg, msg_id):
151 | sleep(30)
152 | try:
153 | if cancel_listener.get(msg_id):
154 | del cancel_listener[msg_id]
155 | editMessage('Timed out!', msg)
156 | except:
157 | pass
158 |
159 |
160 | cancel_mirror_handler = CommandHandler(BotCommands.CancelMirror, cancel_mirror,
161 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user))
162 |
163 | cancel_all_handler = CommandHandler(BotCommands.CancelAllCommand, cancell_all_buttons,
164 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
165 |
166 | cancel_all_buttons_handler = CallbackQueryHandler(cancel_all_update, pattern="cnall")
167 |
168 | dispatcher.add_handler(cancel_all_handler)
169 | dispatcher.add_handler(cancel_mirror_handler)
170 | dispatcher.add_handler(cancel_all_buttons_handler)
171 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/db_handler.py:
--------------------------------------------------------------------------------
1 | from os import environ, makedirs
2 | from os import path as ospath
3 |
4 | from pymongo import MongoClient
5 | from pymongo.errors import PyMongoError
6 |
7 | from bot import (DATABASE_URL, LOGGER, aria2_options, bot_id, botname,
8 | config_dict, qbit_options, rss_dict, user_data)
9 |
10 |
11 | class DbManger:
12 | def __init__(self):
13 | self.__err = False
14 | self.__db = None
15 | self.__conn = None
16 | self.__connect()
17 |
18 | def __connect(self):
19 | try:
20 | self.__conn = MongoClient(DATABASE_URL)
21 | self.__db = self.__conn.z
22 | except PyMongoError as e:
23 | LOGGER.error(f"Error in DB connection: {e}")
24 | self.__err = True
25 |
26 | def db_load(self):
27 | if self.__err:
28 | return
29 | # Save bot settings
30 | self.__db.settings.config.update_one({'_id': bot_id}, {'$set': config_dict}, upsert=True)
31 | # Save Aria2c options
32 | if self.__db.settings.aria2c.find_one({'_id': bot_id}) is None:
33 | self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': aria2_options}, upsert=True)
34 | # Save qbittorrent options
35 | if self.__db.settings.qbittorrent.find_one({'_id': bot_id}) is None:
36 | self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': qbit_options}, upsert=True)
37 | # User Data
38 | if self.__db.users[bot_id].find_one():
39 | rows = self.__db.users[bot_id].find({})
40 | # return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_ql, media_group, equal_splits, split_size}
41 | for row in rows:
42 | uid = row['_id']
43 | del row['_id']
44 | path = f"Thumbnails/{uid}.jpg"
45 | if row.get('thumb'):
46 | if not ospath.exists('Thumbnails'):
47 | makedirs('Thumbnails')
48 | with open(path, 'wb+') as f:
49 | f.write(row['thumb'])
50 | row['thumb'] = path
51 | user_data[uid] = row
52 | LOGGER.info("Users data has been imported from Database")
53 | # Rss Data
54 | if self.__db.rss[bot_id].find_one():
55 | rows = self.__db.rss[bot_id].find({}) # return a dict ==> {_id, link, last_feed, last_name, filters}
56 | for row in rows:
57 | title = row['_id']
58 | del row['_id']
59 | rss_dict[title] = row
60 | LOGGER.info("Rss data has been imported from Database.")
61 | self.__conn.close()
62 |
63 | def update_config(self, dict_):
64 | if self.__err:
65 | return
66 | self.__db.settings.config.update_one({'_id': bot_id}, {'$set': dict_}, upsert=True)
67 | self.__conn.close()
68 |
69 | def load_configs(self):
70 | if self.__err:
71 | return
72 | if db_dict := self.__db.settings.config.find_one({'_id': bot_id}):
73 | del db_dict['_id']
74 | for key, value in db_dict.items():
75 | environ[key] = str(value)
76 | if pf_dict := self.__db.settings.files.find_one({'_id': bot_id}):
77 | del pf_dict['_id']
78 | for key, value in pf_dict.items():
79 | if value:
80 | file_ = key.replace('__', '.')
81 | with open(file_, 'wb+') as f:
82 | f.write(value)
83 |
84 | def update_aria2(self, key, value):
85 | if self.__err:
86 | return
87 | self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True)
88 | self.__conn.close()
89 |
90 | def update_qbittorrent(self, key, value):
91 | if self.__err:
92 | return
93 | self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True)
94 | self.__conn.close()
95 |
96 | def update_private_file(self, path):
97 | if self.__err:
98 | return
99 | if ospath.exists(path):
100 | with open(path, 'rb+') as pf:
101 | pf_bin = pf.read()
102 | else:
103 | pf_bin = ''
104 | path = path.replace('.', '__')
105 | self.__db.settings.files.update_one({'_id': bot_id}, {'$set': {path: pf_bin}}, upsert=True)
106 | self.__conn.close()
107 |
108 | def update_user_data(self, user_id):
109 | if self.__err:
110 | return
111 | data = user_data[user_id]
112 | if data.get('thumb'):
113 | del data['thumb']
114 | self.__db.users[bot_id].update_one({'_id': user_id}, {'$set': data}, upsert=True)
115 | self.__conn.close()
116 |
117 | def update_thumb(self, user_id, path=None):
118 | if self.__err:
119 | return
120 | if path:
121 | with open(path, 'rb+') as image:
122 | image_bin = image.read()
123 | else:
124 | image_bin = ''
125 | self.__db.users[bot_id].update_one({'_id': user_id}, {'$set': {'thumb': image_bin}}, upsert=True)
126 | self.__conn.close()
127 |
128 | def rss_update(self, title):
129 | if self.__err:
130 | return
131 | self.__db.rss[bot_id].update_one({'_id': title}, {'$set': rss_dict[title]}, upsert=True)
132 | self.__conn.close()
133 |
134 | def rss_delete(self, title):
135 | if self.__err:
136 | return
137 | self.__db.rss[bot_id].delete_one({'_id': title})
138 | self.__conn.close()
139 |
140 | def add_incomplete_task(self, cid, link, tag):
141 | if self.__err:
142 | return
143 | self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag})
144 | self.__conn.close()
145 |
146 | def rm_complete_task(self, link):
147 | if self.__err:
148 | return
149 | self.__db.tasks[bot_id].delete_one({'_id': link})
150 | self.__conn.close()
151 |
152 | def get_incomplete_tasks(self):
153 | notifier_dict = {}
154 | if self.__err:
155 | return notifier_dict
156 | if self.__db.tasks[bot_id].find_one():
157 | rows = self.__db.tasks[bot_id].find({}) # return a dict ==> {_id, cid, tag}
158 | for row in rows:
159 | if row['cid'] in list(notifier_dict.keys()):
160 | if row['tag'] in list(notifier_dict[row['cid']]):
161 | notifier_dict[row['cid']][row['tag']].append(row['_id'])
162 | else:
163 | notifier_dict[row['cid']][row['tag']] = [row['_id']]
164 | else:
165 | usr_dict = {row['tag']: [row['_id']]}
166 | notifier_dict[row['cid']] = usr_dict
167 | self.__db.tasks[bot_id].drop()
168 | self.__conn.close()
169 | return notifier_dict # return a dict ==> {cid: {tag: [_id, _id, ...]}}
170 |
171 | def trunc_table(self, name):
172 | if self.__err:
173 | return
174 | self.__db[name][bot_id].drop()
175 | self.__conn.close()
176 |
177 | def add_download_url(self, url: str, tag: str):
178 | if self.__err:
179 | return
180 | download = {'_id': url, 'tag': tag, 'botname': botname}
181 | self.__db.download_links.update_one({'_id': url}, {'$set': download}, upsert=True)
182 | self.__conn.close()
183 |
184 | def check_download(self, url:str):
185 | if self.__err:
186 | return
187 | exist = self.__db.download_links.find_one({'_id': url})
188 | self.__conn.close()
189 | return exist
190 |
191 | def clear_download_links(self, bot_name=None):
192 | if self.__err:
193 | return
194 | if not bot_name:
195 | bot_name = botname
196 | self.__db.download_links.delete_many({'botname': bot_name})
197 | self.__conn.close()
198 |
199 | def remove_download(self, url: str):
200 | if self.__err:
201 | return
202 | self.__db.download_links.delete_one({'_id': url})
203 | self.__conn.close()
204 |
205 | if DATABASE_URL:
206 | DbManger().db_load()
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/mega_downloader.py:
--------------------------------------------------------------------------------
1 | from os import makedirs
2 | from random import SystemRandom
3 | from string import ascii_letters, digits
4 | from threading import Event
5 |
6 | from mega import MegaApi, MegaError, MegaListener, MegaRequest, MegaTransfer
7 |
8 | from bot import (LOGGER, config_dict, download_dict, download_dict_lock,
9 | non_queued_dl, non_queued_up, queue_dict_lock, queued_dl)
10 | from bot.helper.ext_utils.bot_utils import (get_mega_link_type,
11 | get_readable_file_size)
12 | from bot.helper.ext_utils.fs_utils import (check_storage_threshold,
13 | get_base_name)
14 | from bot.helper.mirror_utils.status_utils.mega_download_status import MegaDownloadStatus
15 | from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
16 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
17 | from bot.helper.telegram_helper.message_utils import (sendMessage, delete_links,
18 | sendStatusMessage)
19 |
20 |
21 | class MegaAppListener(MegaListener):
22 | _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN,MegaRequest.TYPE_FETCH_NODES)
23 | NO_ERROR = "no error"
24 |
25 | def __init__(self, continue_event: Event, listener):
26 | self.continue_event = continue_event
27 | self.node = None
28 | self.public_node = None
29 | self.listener = listener
30 | self.__bytes_transferred = 0
31 | self.is_cancelled = False
32 | self.__speed = 0
33 | self.__name = ''
34 | self.__size = 0
35 | self.error = None
36 | self.gid = ""
37 | super(MegaAppListener, self).__init__()
38 |
39 | @property
40 | def speed(self):
41 | """Returns speed of the download in bytes/second"""
42 | return self.__speed
43 |
44 | @property
45 | def name(self):
46 | """Returns name of the download"""
47 | return self.__name
48 |
49 | def setValues(self, name, size, gid):
50 | self.__name = name
51 | self.__size = size
52 | self.gid = gid
53 |
54 | @property
55 | def size(self):
56 | """Size of download in bytes"""
57 | return self.__size
58 |
59 | @property
60 | def downloaded_bytes(self):
61 | return self.__bytes_transferred
62 |
63 | def onRequestFinish(self, api, request, error):
64 | if str(error).lower() != "no error":
65 | self.error = error.copy()
66 | LOGGER.error(self.error)
67 | self.continue_event.set()
68 | return
69 | request_type = request.getType()
70 | if request_type == MegaRequest.TYPE_LOGIN:
71 | api.fetchNodes()
72 | elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE:
73 | self.public_node = request.getPublicMegaNode()
74 | elif request_type == MegaRequest.TYPE_FETCH_NODES:
75 | LOGGER.info("Fetching Root Node.")
76 | self.node = api.getRootNode()
77 | LOGGER.info(f"Node Name: {self.node.getName()}")
78 | if request_type not in self._NO_EVENT_ON or self.node and "cloud drive" not in self.node.getName().lower():
79 | self.continue_event.set()
80 |
81 | def onRequestTemporaryError(self, api, request, error: MegaError):
82 | LOGGER.error(f'Mega Request error in {error}')
83 | if not self.is_cancelled:
84 | self.is_cancelled = True
85 | self.listener.onDownloadError(f"RequestTempError: {error.toString()}")
86 | self.error = error.toString()
87 | self.continue_event.set()
88 |
89 | def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer):
90 | if self.is_cancelled:
91 | api.cancelTransfer(transfer, None)
92 | self.continue_event.set()
93 | return
94 | self.__speed = transfer.getSpeed()
95 | self.__bytes_transferred = transfer.getTransferredBytes()
96 |
97 | def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error):
98 | try:
99 | if self.is_cancelled:
100 | self.continue_event.set()
101 | elif transfer.isFinished() and (transfer.isFolderTransfer() or transfer.getFileName() == self.name):
102 | self.listener.onDownloadComplete()
103 | self.continue_event.set()
104 | except Exception as e:
105 | LOGGER.error(e)
106 |
107 | def onTransferTemporaryError(self, api, transfer, error):
108 | filen = transfer.getFileName()
109 | state = transfer.getState()
110 | errStr = error.toString()
111 | LOGGER.error(f'Mega download error in file {transfer} {filen}: {error}')
112 | if state in [1, 4]:
113 | # Sometimes MEGA (offical client) can't stream a node either and raises a temp failed error.
114 | # Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault]
115 | return
116 |
117 | self.error = errStr
118 | if not self.is_cancelled:
119 | self.is_cancelled = True
120 | self.listener.onDownloadError(f"TransferTempError: {errStr} ({filen})")
121 | self.continue_event.set()
122 |
123 | def cancel_download(self):
124 | self.is_cancelled = True
125 | self.listener.onDownloadError("Download Canceled by user")
126 |
127 |
128 | class AsyncExecutor:
129 |
130 | def __init__(self):
131 | self.continue_event = Event()
132 |
133 | def do(self, function, args):
134 | self.continue_event.clear()
135 | function(*args)
136 | self.continue_event.wait()
137 |
138 |
139 | def add_mega_download(mega_link, path, listener, name, from_queue=False):
140 | MEGA_API_KEY = config_dict['MEGA_API_KEY']
141 | executor = AsyncExecutor()
142 | api = MegaApi(MEGA_API_KEY, None, None, 'z')
143 | folder_api = None
144 | mega_listener = MegaAppListener(executor.continue_event, listener)
145 | api.addListener(mega_listener)
146 | if (MEGA_EMAIL_ID:= config_dict['MEGA_EMAIL_ID']) and (MEGA_PASSWORD:= config_dict['MEGA_PASSWORD']):
147 | executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD))
148 | if get_mega_link_type(mega_link) == "file":
149 | executor.do(api.getPublicNode, (mega_link,))
150 | node = mega_listener.public_node
151 | else:
152 | folder_api = MegaApi(MEGA_API_KEY, None, None, 'z')
153 | folder_api.addListener(mega_listener)
154 | executor.do(folder_api.loginToFolder, (mega_link,))
155 | node = folder_api.authorizeNode(mega_listener.node)
156 | if mega_listener.error:
157 | sendMessage(str(mega_listener.error), listener.bot, listener.message)
158 | listener.ismega.delete()
159 | api.removeListener(mega_listener)
160 | if folder_api:
161 | folder_api.removeListener(mega_listener)
162 | return
163 | mname = name or node.getName()
164 | if config_dict['STOP_DUPLICATE'] and not listener.isLeech and not listener.select:
165 | LOGGER.info('Checking File/Folder if already in Drive')
166 | if listener.isZip:
167 | mname = f"{mname}.zip"
168 | elif listener.extract:
169 | try:
170 | mname = get_base_name(mname)
171 | except:
172 | mname = None
173 | if mname:
174 | smsg, button = GoogleDriveHelper().drive_list(mname, True)
175 | if smsg:
176 | listener.ismega.delete()
177 | delete_links(listener.bot, listener.message)
178 | msg1 = "File/Folder is already available in Drive.\nHere are the search results:"
179 | sendMessage(msg1, listener.bot, listener.message, button)
180 | api.removeListener(mega_listener)
181 | if folder_api:
182 | folder_api.removeListener(mega_listener)
183 | return
184 | size = api.getSize(node)
185 | limit_exceeded = ''
186 | if not limit_exceeded and (STORAGE_THRESHOLD:= config_dict['STORAGE_THRESHOLD']):
187 | limit = STORAGE_THRESHOLD * 1024**3
188 | arch = any([listener.isZip, listener.extract])
189 | acpt = check_storage_threshold(size, limit, arch)
190 | if not acpt:
191 | limit_exceeded = f'You must leave {get_readable_file_size(limit)} free storage.'
192 | if not limit_exceeded and (MEGA_LIMIT:= config_dict['MEGA_LIMIT']):
193 | limit = MEGA_LIMIT * 1024**3
194 | if size > limit:
195 | limit_exceeded = f'Mega limit is {get_readable_file_size(limit)}'
196 | if not limit_exceeded and (LEECH_LIMIT:= config_dict['LEECH_LIMIT']) and listener.isLeech:
197 | limit = LEECH_LIMIT * 1024**3
198 | if size > limit:
199 | limit_exceeded = f'Leech limit is {get_readable_file_size(limit)}'
200 | if limit_exceeded:
201 | listener.ismega.delete()
202 | return sendMessage(f"{limit_exceeded}.\nYour File/Folder size is {get_readable_file_size(size)}.", listener.bot, listener.message)
203 | mname = name or node.getName()
204 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=8))
205 | all_limit = config_dict['QUEUE_ALL']
206 | dl_limit = config_dict['QUEUE_DOWNLOAD']
207 | if all_limit or dl_limit:
208 | added_to_queue = False
209 | with queue_dict_lock:
210 | dl = len(non_queued_dl)
211 | up = len(non_queued_up)
212 | if (all_limit and dl + up >= all_limit and (not dl_limit or dl >= dl_limit)) or (dl_limit and dl >= dl_limit):
213 | added_to_queue = True
214 | queued_dl[listener.uid] = ['mega', mega_link, path, listener, name]
215 | if added_to_queue:
216 | LOGGER.info(f"Added to Queue/Download: {mname}")
217 | with download_dict_lock:
218 | download_dict[listener.uid] = QueueStatus(mname, size, gid, listener, 'Dl')
219 | listener.ismega.delete()
220 | listener.onDownloadStart()
221 | sendStatusMessage(listener.message, listener.bot)
222 | api.removeListener(mega_listener)
223 | if folder_api is not None:
224 | folder_api.removeListener(mega_listener)
225 | return
226 | with download_dict_lock:
227 | download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener)
228 | with queue_dict_lock:
229 | non_queued_dl.add(listener.uid)
230 | makedirs(path, exist_ok=True)
231 | mega_listener.setValues(mname, size, gid)
232 | if not from_queue:
233 | listener.ismega.delete()
234 | listener.onDownloadStart()
235 | sendStatusMessage(listener.message, listener.bot)
236 | LOGGER.info(f"Download from Mega: {mname}")
237 | else:
238 | LOGGER.info(f'Start Queued Download from Mega: {mname}')
239 | executor.do(api.startDownload, (node, path, name, None, False, None))
240 | api.removeListener(mega_listener)
241 | if folder_api:
242 | folder_api.removeListener(mega_listener)
--------------------------------------------------------------------------------
/bot/helper/ext_utils/fs_utils.py:
--------------------------------------------------------------------------------
1 | from math import ceil
2 | from os import listdir, makedirs, mkdir
3 | from os import path as ospath
4 | from os import remove as osremove
5 | from os import rmdir, walk
6 | from re import I
7 | from re import split as re_split
8 | from shutil import disk_usage, rmtree
9 | from subprocess import Popen, check_output
10 | from subprocess import run as srun
11 | from sys import exit as sysexit
12 | from time import time
13 |
14 | from magic import Magic
15 | from PIL import Image
16 |
17 | from bot import (DOWNLOAD_DIR, LOGGER, MAX_SPLIT_SIZE, app, aria2, config_dict,
18 | get_client, user_data)
19 | from bot.helper.ext_utils.exceptions import NotSupportedExtractionArchive
20 | from bot.helper.ext_utils.telegraph_helper import telegraph
21 |
22 | ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
23 | ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
24 | ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr",
25 | ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"]
26 |
27 | def clean_target(path: str):
28 | if ospath.exists(path):
29 | LOGGER.info(f"Cleaning Target: {path}")
30 | if ospath.isdir(path):
31 | try:
32 | rmtree(path)
33 | except:
34 | pass
35 | elif ospath.isfile(path):
36 | try:
37 | osremove(path)
38 | except:
39 | pass
40 |
41 | def clean_download(path: str):
42 | if ospath.exists(path):
43 | LOGGER.info(f"Cleaning Download: {path}")
44 | try:
45 | rmtree(path)
46 | except:
47 | pass
48 |
49 | def start_cleanup():
50 | get_client().torrents_delete(torrent_hashes="all")
51 | try:
52 | rmtree(DOWNLOAD_DIR)
53 | except:
54 | pass
55 | makedirs(DOWNLOAD_DIR, exist_ok=True)
56 |
57 | def clean_all():
58 | aria2.remove_all(True)
59 | qb = get_client()
60 | qb.torrents_delete(torrent_hashes="all")
61 | qb.auth_log_out()
62 | app.stop()
63 | telegraph.revoke_access_token()
64 | try:
65 | rmtree(DOWNLOAD_DIR)
66 | except:
67 | pass
68 |
69 | def exit_clean_up(signal, frame):
70 | try:
71 | LOGGER.info("Please wait, while we clean up the downloads and stop running downloads")
72 | clean_all()
73 | sysexit(0)
74 | except KeyboardInterrupt:
75 | LOGGER.warning("Force Exiting before the cleanup finishes!")
76 | sysexit(1)
77 |
78 | def clean_unwanted(path: str):
79 | LOGGER.info(f"Cleaning unwanted files/folders: {path}")
80 | for dirpath, subdir, files in walk(path, topdown=False):
81 | for filee in files:
82 | if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'):
83 | osremove(ospath.join(dirpath, filee))
84 | if dirpath.endswith((".unwanted", "splited_files_z")):
85 | rmtree(dirpath)
86 | for dirpath, subdir, files in walk(path, topdown=False):
87 | if not listdir(dirpath):
88 | rmdir(dirpath)
89 |
90 | def get_path_size(path: str):
91 | if ospath.isfile(path):
92 | return ospath.getsize(path)
93 | total_size = 0
94 | for root, dirs, files in walk(path):
95 | for f in files:
96 | abs_path = ospath.join(root, f)
97 | total_size += ospath.getsize(abs_path)
98 | return total_size
99 |
100 | def get_base_name(orig_path: str):
101 | if ext := [ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)]:
102 | ext = ext[0]
103 | return re_split(f'{ext}$', orig_path, maxsplit=1, flags=I)[0]
104 | else:
105 | raise NotSupportedExtractionArchive('File format not supported for extraction')
106 |
107 | def get_mime_type(file_path):
108 | mime = Magic(mime=True)
109 | mime_type = mime.from_file(file_path)
110 | mime_type = mime_type or "text/plain"
111 | return mime_type
112 |
113 | def take_ss(video_file, duration):
114 | des_dir = 'Thumbnails'
115 | if not ospath.exists(des_dir):
116 | mkdir(des_dir)
117 | des_dir = ospath.join(des_dir, f"{time()}.jpg")
118 | if duration is None:
119 | duration = get_media_info(video_file)[0]
120 | if duration == 0:
121 | duration = 3
122 | duration = duration // 2
123 |
124 | status = srun(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
125 | "-i", video_file, "-frames:v", "1", des_dir])
126 |
127 | if status.returncode != 0 or not ospath.lexists(des_dir):
128 | return None
129 |
130 | with Image.open(des_dir) as img:
131 | img.convert("RGB").save(des_dir, "JPEG")
132 |
133 | return des_dir
134 |
135 | def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, noMap=False):
136 | if listener.seed and not listener.newDir:
137 | dirpath = f"{dirpath}/splited_files_z"
138 | if not ospath.exists(dirpath):
139 | mkdir(dirpath)
140 | user_id = listener.message.from_user.id
141 | user_dict = user_data.get(user_id, {})
142 | leech_split_size = user_dict.get('split_size') or config_dict['LEECH_SPLIT_SIZE']
143 | parts = ceil(size/leech_split_size)
144 | if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop:
145 | split_size = ceil(size/parts) + 1000
146 | if get_media_streams(path)[0]:
147 | duration = get_media_info(path)[0]
148 | base_name, extension = ospath.splitext(file_)
149 | split_size = split_size - 5000000
150 | while i <= parts or start_time < duration - 4:
151 | parted_name = f"{str(base_name)}.part{str(i).zfill(3)}{str(extension)}"
152 | out_path = ospath.join(dirpath, parted_name)
153 | cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time),
154 | "-i", path, "-fs", str(split_size), "-map_chapters", "-1", "-async", "1",
155 | "-strict", "-2","-c", "copy", out_path]
156 | if not noMap:
157 | cmd.insert(10, '-map')
158 | cmd.insert(11, '0')
159 | listener.suproc = Popen(cmd)
160 | listener.suproc.wait()
161 | if listener.suproc.returncode == -9:
162 | return False
163 | elif listener.suproc.returncode != 0 and not noMap:
164 | LOGGER.warning(f"Retrying without map, -map 0 not working in all situations. Path: {path}")
165 | try:
166 | osremove(out_path)
167 | except:
168 | pass
169 | return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, True)
170 | elif listener.suproc.returncode != 0:
171 | LOGGER.warning(f"Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}")
172 | try:
173 | osremove(out_path)
174 | except:
175 | pass
176 | return "errored"
177 | out_size = get_path_size(out_path)
178 | if out_size > MAX_SPLIT_SIZE:
179 | dif = out_size - MAX_SPLIT_SIZE
180 | split_size = split_size - dif + 5000000
181 | osremove(out_path)
182 | return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, noMap)
183 | lpd = get_media_info(out_path)[0]
184 | if lpd == 0:
185 | LOGGER.error(f'Something went wrong while splitting, mostly file is corrupted. Path: {path}')
186 | break
187 | elif duration == lpd:
188 | if not noMap:
189 | LOGGER.warning(f"Retrying without map. -map 0 not working in all situations. Path: {path}")
190 | try:
191 | osremove(out_path)
192 | except:
193 | pass
194 | return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, True)
195 | else:
196 | LOGGER.warning(f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. noMap={noMap}. Path: {path}")
197 | break
198 | elif lpd <= 3:
199 | osremove(out_path)
200 | break
201 | start_time += lpd - 3
202 | i = i + 1
203 | else:
204 | out_path = ospath.join(dirpath, f"{file_}.")
205 | listener.suproc = Popen(["split", "--numeric-suffixes=1", "--suffix-length=3",
206 | f"--bytes={split_size}", path, out_path])
207 | listener.suproc.wait()
208 | if listener.suproc.returncode == -9:
209 | return False
210 | return True
211 |
212 | def get_media_info(path):
213 |
214 | try:
215 | result = check_output(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
216 | "json", "-show_format", "-show_streams", path]).decode('utf-8')
217 | except Exception as e:
218 | LOGGER.error(f'{e}. Mostly file not found!')
219 | return 0, None, None
220 |
221 | fields = eval(result).get('format')
222 | if fields is None:
223 | LOGGER.error(f"get_media_info: {result}")
224 | return 0, None, None
225 |
226 | duration = round(float(fields.get('duration', 0)))
227 |
228 | if fields := fields.get('tags'):
229 | artist = fields.get('artist')
230 | if artist is None:
231 | artist = fields.get('ARTIST')
232 | title = fields.get('title')
233 | if title is None:
234 | title = fields.get('TITLE')
235 | else:
236 | title = None
237 | artist = None
238 |
239 | return duration, artist, title
240 |
241 | def get_media_streams(path):
242 |
243 | is_video = False
244 | is_audio = False
245 | is_image = False
246 |
247 | mime_type = get_mime_type(path)
248 | if mime_type.startswith('audio'):
249 | is_audio = True
250 | return is_video, is_audio, is_image
251 |
252 | if mime_type.startswith('image'):
253 | is_image = True
254 | return is_video, is_audio, is_image
255 |
256 | if path.endswith('.bin') or not mime_type.startswith('video') and not mime_type.endswith('octet-stream'):
257 | return is_video, is_audio, is_image
258 |
259 | try:
260 | result = check_output(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
261 | "json", "-show_streams", path]).decode('utf-8')
262 | except Exception as e:
263 | if not mime_type.endswith('octet-stream'):
264 | LOGGER.error(f'{e}. Mostly file not found!')
265 | return is_video, is_audio, is_image
266 |
267 | fields = eval(result).get('streams')
268 | if fields is None:
269 | LOGGER.error(f"get_media_streams: {result}")
270 | return is_video, is_audio, is_image
271 |
272 | for stream in fields:
273 | if stream.get('codec_type') == 'video':
274 | is_video = True
275 | elif stream.get('codec_type') == 'audio':
276 | is_audio = True
277 |
278 | return is_video, is_audio, is_image
279 |
280 | def check_storage_threshold(size, threshold, arch=False, alloc=False):
281 | if not alloc:
282 | if not arch:
283 | if disk_usage(DOWNLOAD_DIR).free - size < threshold:
284 | return False
285 | elif disk_usage(DOWNLOAD_DIR).free - (size * 2) < threshold:
286 | return False
287 | elif not arch:
288 | if disk_usage(DOWNLOAD_DIR).free < threshold:
289 | return False
290 | elif disk_usage(DOWNLOAD_DIR).free - size < threshold:
291 | return False
292 | return True
293 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/message_utils.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 | from time import sleep, time
3 |
4 | from pyrogram.errors import FloodWait
5 | from telegram.error import RetryAfter, Unauthorized
6 |
7 | from bot import (LOGGER, Interval, bot, btn_listener, categories, config_dict,
8 | rss_session, status_reply_dict, status_reply_dict_lock)
9 | from bot.helper.ext_utils.bot_utils import get_readable_message, setInterval
10 | from bot.helper.telegram_helper.button_build import ButtonMaker
11 |
12 |
13 | def sendMessage(text, bot, message, reply_markup=None):
14 | try:
15 | return bot.sendMessage(message.chat_id, reply_to_message_id=message.message_id,
16 | text=text, reply_markup=reply_markup)
17 | except RetryAfter as r:
18 | LOGGER.warning(str(r))
19 | sleep(r.retry_after * 1.5)
20 | return sendMessage(text, bot, message, reply_markup)
21 | except Exception as e:
22 | LOGGER.error(str(e))
23 | return
24 |
25 | def sendPhoto(text, bot, message, photo):
26 | try:
27 | return bot.sendPhoto(message.chat_id, photo, text, reply_to_message_id=message.message_id)
28 | except RetryAfter as r:
29 | LOGGER.warning(str(r))
30 | sleep(r.retry_after * 1.5)
31 | return sendPhoto(text, bot, message, photo)
32 | except Exception as e:
33 | LOGGER.error(str(e))
34 | return
35 |
36 | def editMessage(text, message, reply_markup=None):
37 | try:
38 | bot.editMessageText(text=text, message_id=message.message_id, chat_id=message.chat_id, reply_markup=reply_markup)
39 | except RetryAfter as r:
40 | LOGGER.warning(str(r))
41 | sleep(r.retry_after * 1.5)
42 | return editMessage(text, message, reply_markup)
43 | except Exception as e:
44 | LOGGER.error(str(e))
45 | return str(e)
46 |
47 | def sendRss(text, bot):
48 | if not rss_session:
49 | try:
50 | return bot.sendMessage(config_dict['RSS_CHAT_ID'], text)
51 | except RetryAfter as r:
52 | LOGGER.warning(str(r))
53 | sleep(r.retry_after * 1.5)
54 | return sendRss(text, bot)
55 | except Exception as e:
56 | LOGGER.error(str(e))
57 | return
58 | else:
59 | try:
60 | with rss_session:
61 | return rss_session.send_message(config_dict['RSS_CHAT_ID'], text, disable_web_page_preview=True)
62 | except FloodWait as e:
63 | LOGGER.warning(str(e))
64 | sleep(e.value * 1.5)
65 | return sendRss(text, bot)
66 | except Exception as e:
67 | LOGGER.error(str(e))
68 | return
69 |
70 | def deleteMessage(bot, message):
71 | try:
72 | bot.deleteMessage(chat_id=message.chat_id, message_id=message.message_id)
73 | except:
74 | pass
75 |
76 | def sendLogFile(bot, message):
77 | with open('log.txt', 'rb') as f:
78 | bot.sendDocument(document=f, filename=f.name,
79 | reply_to_message_id=message.message_id,
80 | chat_id=message.chat_id)
81 |
82 | def sendFile(bot, message, txt, fileName, caption=""):
83 | try:
84 | with BytesIO(str.encode(txt)) as document:
85 | document.name = fileName
86 | return bot.sendDocument(document=document, reply_to_message_id=message.message_id,
87 | caption=caption, chat_id=message.chat_id)
88 | except RetryAfter as r:
89 | LOGGER.warning(str(r))
90 | sleep(r.retry_after * 1.5)
91 | return sendFile(bot, message, txt, fileName, caption)
92 | except Exception as e:
93 | LOGGER.error(str(e))
94 | return
95 |
96 | def auto_delete_message(bot, cmd_message=None, bot_message=None):
97 | if config_dict['AUTO_DELETE_MESSAGE_DURATION'] != -1:
98 | sleep(config_dict['AUTO_DELETE_MESSAGE_DURATION'])
99 | if cmd_message:
100 | deleteMessage(bot, cmd_message)
101 | if bot_message:
102 | deleteMessage(bot, bot_message)
103 |
104 | def delete_all_messages():
105 | with status_reply_dict_lock:
106 | for data in list(status_reply_dict.values()):
107 | try:
108 | deleteMessage(bot, data[0])
109 | del status_reply_dict[data[0].chat_id]
110 | except Exception as e:
111 | LOGGER.error(str(e))
112 |
113 | def update_all_messages(force=False):
114 | with status_reply_dict_lock:
115 | if not status_reply_dict or not Interval or (not force and time() - list(status_reply_dict.values())[0][1] < 3):
116 | return
117 | for chat_id in status_reply_dict:
118 | status_reply_dict[chat_id][1] = time()
119 |
120 | msg, buttons = get_readable_message()
121 | if not msg:
122 | return
123 | with status_reply_dict_lock:
124 | for chat_id in status_reply_dict:
125 | if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id][0].text:
126 | rmsg = editMessage(msg, status_reply_dict[chat_id][0], buttons)
127 | if rmsg == "Message to edit not found":
128 | del status_reply_dict[chat_id]
129 | return
130 | status_reply_dict[chat_id][0].text = msg
131 | status_reply_dict[chat_id][1] = time()
132 |
133 | def sendStatusMessage(msg, bot):
134 | progress, buttons = get_readable_message()
135 | if not progress:
136 | return
137 | with status_reply_dict_lock:
138 | if msg.chat_id in status_reply_dict:
139 | message = status_reply_dict[msg.chat_id][0]
140 | deleteMessage(bot, message)
141 | del status_reply_dict[msg.chat_id]
142 | message = sendMessage(progress, bot, msg, buttons)
143 | status_reply_dict[msg.chat_id] = [message, time()]
144 | if not Interval:
145 | Interval.append(setInterval(config_dict['DOWNLOAD_STATUS_UPDATE_INTERVAL'], update_all_messages))
146 |
147 | def sendDmMessage(bot, message, dmMode, isLeech=False):
148 | if dmMode == 'mirror' and isLeech or dmMode == 'leech' and not isLeech:
149 | return
150 | try:
151 | return bot.sendMessage(message.from_user.id, disable_notification=True, text=message.link)
152 | except RetryAfter as r:
153 | LOGGER.warning(str(r))
154 | sleep(r.retry_after * 1.5)
155 | return sendDmMessage(bot, message, isLeech)
156 | except Unauthorized:
157 | delete_links(bot, message)
158 | buttons = ButtonMaker()
159 | buttons.buildbutton("Start", f"{bot.link}?start=start")
160 | tag = message.from_user.mention_html(message.from_user.username)
161 | sendMessage(f"Hey @{tag}!\nYou didn't START the me in DM.\nI'll send all files in DM.\n\nStart and try again.", bot, message, buttons.build_menu(1))
162 | return 'BotNotStarted'
163 | except Exception as e:
164 | LOGGER.error(str(e))
165 | return
166 |
167 | def sendLogMessage(bot, message, link, tag):
168 | if not (log_chat := config_dict['LOG_CHAT']):
169 | return
170 | try:
171 |
172 | if (reply_to := message.reply_to_message) or "https://api.telegram.org/file/" in link:
173 | if reply_to.document or reply_to.video or reply_to.audio or reply_to.photo:
174 | __forwarded = reply_to.forward(log_chat)
175 | __forwarded.delete()
176 | __temp = reply_to.copy(
177 | log_chat,
178 | caption=f'Source | #cc: {tag} ({message.from_user.id})'
179 | )
180 | __forwarded.message_id = __temp['message_id']
181 | return __forwarded
182 | msg = f'Source: {link}\n\n#cc: {tag} ({message.from_user.id})'
183 | return bot.sendMessage(log_chat, disable_notification=True, text=msg)
184 | except RetryAfter as r:
185 | LOGGER.warning(str(r))
186 | sleep(r.retry_after * 1.5)
187 | return sendLogMessage(bot, message, link, tag)
188 | except Exception as e:
189 | LOGGER.error(str(e))
190 | return
191 |
192 | def isAdmin(message, user_id=None):
193 | if message.chat.type != message.chat.PRIVATE:
194 | if user_id:
195 | member = message.chat.get_member(user_id)
196 | else:
197 | member = message.chat.get_member(message.from_user.id)
198 | return member.status in [member.ADMINISTRATOR, member.CREATOR] or member.is_anonymous
199 |
200 | def forcesub(bot, message, tag):
201 | if not (FSUB_IDS := config_dict['FSUB_IDS']):
202 | return
203 | join_button = {}
204 | for channel_id in FSUB_IDS.split():
205 | if not str(channel_id).startswith('-100'):
206 | continue
207 | chat = bot.get_chat(channel_id)
208 | member = chat.get_member(message.from_user.id)
209 | if member.status in [member.LEFT, member.KICKED]:
210 | delete_links(bot, message)
211 | join_button[chat.title] = chat.link or chat.invite_link
212 | if join_button:
213 | btn = ButtonMaker()
214 | for key, value in join_button.items():
215 | btn.buildbutton(key, value)
216 | return sendMessage(f'Hey {tag}!\nPlease join our channel to use me!\nJoin And Try Again!\nThank You.', bot, message, btn.build_menu(2))
217 |
218 | def message_filter(bot, message, tag):
219 | if not config_dict['ENABLE_MESSAGE_FILTER']:
220 | return
221 | _msg = ''
222 | if message.reply_to_message:
223 | if message.reply_to_message.forward_date:
224 | message.reply_to_message.delete()
225 | _msg = "You can't mirror or leech forward messages to this bot.\n\nRemove it and try again"
226 | elif message.reply_to_message.caption:
227 | message.reply_to_message.delete()
228 | _msg = "You can't mirror or leech with captions text to this bot.\n\nRemove it and try again"
229 | elif message.forward_date:
230 | message.delete()
231 | _msg = "You can't mirror or leech forward messages to this bot.\n\nRemove it and try again"
232 | if _msg:
233 | message.message_id = None
234 | return sendMessage(f"{tag} {_msg}", bot, message)
235 |
236 |
237 | def delete_links(bot, message):
238 | if config_dict['DELETE_LINKS']:
239 | if reply_to := message.reply_to_message:
240 | deleteMessage(bot, reply_to)
241 | deleteMessage(bot, message)
242 |
243 | def anno_checker(message):
244 | user_id = message.from_user.id
245 | msg_id = message.message_id
246 | buttons = ButtonMaker()
247 | if user_id == 1087968824:
248 | _msg = "Group Anonymous Admin"
249 | buttons.sbutton('Verify Anonymous', f'verify admin {msg_id}')
250 | elif user_id == 136817688:
251 | _msg = "Channel"
252 | buttons.sbutton('Verify Channel', f'verify channel {msg_id}')
253 | buttons.sbutton('Cancel', f'verify no {msg_id}')
254 | sendMessage(f'{_msg} Verification\nIf you hit Verify! Your username and id will expose in bot logs!', message.bot, message, buttons.build_menu(2))
255 | user_id = None
256 | btn_listener[msg_id] = [True, user_id]
257 | start_time = time()
258 | while btn_listener[msg_id][0] and time() - start_time <= 7:
259 | if btn_listener[msg_id][1]:
260 | user_id = btn_listener[msg_id][1]
261 | break
262 | del btn_listener[msg_id]
263 | return user_id
264 |
265 | def open_category_btns(message):
266 | user_id = message.from_user.id
267 | msg_id = message.message_id
268 | buttons = ButtonMaker()
269 | for _name in categories.keys():
270 | buttons.sbutton(f'{_name}', f'scat {user_id} {msg_id} {_name}')
271 | prompt = sendMessage('Select the category where you want to upload', message.bot, message, buttons.build_menu(2))
272 | drive_id = None
273 | index_link = None
274 | btn_listener[msg_id] = [True, drive_id, index_link]
275 | start_time = time()
276 | while btn_listener[msg_id][0] and time() - start_time <= 30:
277 | if btn_listener[msg_id][1]:
278 | drive_id = btn_listener[msg_id][1]
279 | index_link = btn_listener[msg_id][2]
280 | break
281 | deleteMessage(message.bot, prompt)
282 | del btn_listener[msg_id]
283 | return drive_id, index_link
284 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/aria2_download.py:
--------------------------------------------------------------------------------
1 | from os import path, remove
2 | from time import sleep, time
3 |
4 | from bot import (LOGGER, aria2, aria2_options, aria2c_global, config_dict,
5 | download_dict, download_dict_lock)
6 | from bot.helper.ext_utils.bot_utils import (bt_selection_buttons,
7 | get_readable_file_size,
8 | getDownloadByGid, is_magnet,
9 | new_thread)
10 | from bot.helper.ext_utils.fs_utils import (check_storage_threshold,
11 | clean_unwanted, get_base_name)
12 | from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus
13 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
14 | from bot.helper.telegram_helper.message_utils import (deleteMessage, delete_links,
15 | sendMessage,
16 | sendStatusMessage,
17 | update_all_messages)
18 |
19 |
20 | @new_thread
21 | def __onDownloadStarted(api, gid):
22 | download = api.get_download(gid)
23 | if download.is_metadata:
24 | LOGGER.info(f'onDownloadStarted: {gid} METADATA')
25 | sleep(1)
26 | if dl:= getDownloadByGid(gid):
27 | listener = dl.listener()
28 | if listener.select:
29 | metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait."
30 | meta = sendMessage(metamsg, listener.bot, listener.message)
31 | while True:
32 | if download.is_removed or download.followed_by_ids:
33 | deleteMessage(listener.bot, meta)
34 | break
35 | download = download.live
36 | return
37 | else:
38 | LOGGER.info(f'onDownloadStarted: {download.name} - Gid: {gid}')
39 | try:
40 | if config_dict['STOP_DUPLICATE']:
41 | sleep(1)
42 | if dl:= getDownloadByGid(gid):
43 | listener = dl.listener()
44 | download = api.get_download(gid)
45 | if not listener.isLeech and not listener.select:
46 | if not download.is_torrent:
47 | sleep(3)
48 | download = download.live
49 | LOGGER.info('Checking File/Folder if already in Drive...')
50 | sname = download.name
51 | if listener.isZip:
52 | sname = f"{sname}.zip"
53 | elif listener.extract:
54 | try:
55 | sname = get_base_name(sname)
56 | except:
57 | sname = None
58 | if sname:
59 | smsg, button = GoogleDriveHelper().drive_list(sname, True)
60 | if smsg:
61 | delete_links(listener.bot, listener.message)
62 | listener.onDownloadError('File/Folder already available in Drive.\nHere are the search results:\n', button)
63 | api.remove([download], force=True, files=True, clean=True)
64 | return
65 | if any([(DIRECT_LIMIT:= config_dict['DIRECT_LIMIT']),
66 | (TORRENT_LIMIT:= config_dict['TORRENT_LIMIT']),
67 | (LEECH_LIMIT:= config_dict['LEECH_LIMIT']),
68 | (STORAGE_THRESHOLD:= config_dict['STORAGE_THRESHOLD'])]):
69 | sleep(1)
70 | dl = getDownloadByGid(gid)
71 | if dl and hasattr(dl, 'listener'):
72 | listener = dl.listener()
73 | else:
74 | return
75 | download = api.get_download(gid)
76 | download = download.live
77 | if download.total_length == 0:
78 | start_time = time()
79 | while time() - start_time <= 15:
80 | download = api.get_download(gid)
81 | download = download.live
82 | if download.followed_by_ids:
83 | download = api.get_download(download.followed_by_ids[0])
84 | if download.total_length > 0:
85 | break
86 | size = download.total_length
87 | limit_exceeded = ''
88 | if not limit_exceeded and STORAGE_THRESHOLD:
89 | limit = STORAGE_THRESHOLD * 1024**3
90 | arch = any([listener.isZip, listener.extract])
91 | acpt = check_storage_threshold(size, limit, arch, True)
92 | if not acpt:
93 | limit_exceeded = f'You must leave {get_readable_file_size(limit)} free storage.'
94 | if not limit_exceeded and DIRECT_LIMIT and not download.is_torrent:
95 | limit = DIRECT_LIMIT * 1024**3
96 | if size > limit:
97 | limit_exceeded = f'Direct limit is {get_readable_file_size(limit)}'
98 | if not limit_exceeded and TORRENT_LIMIT and download.is_torrent:
99 | limit = TORRENT_LIMIT * 1024**3
100 | if size > limit:
101 | limit_exceeded = f'Torrent limit is {get_readable_file_size(limit)}'
102 | if not limit_exceeded and LEECH_LIMIT and listener.isLeech:
103 | limit = LEECH_LIMIT * 1024**3
104 | if size > limit:
105 | limit_exceeded = f'Leech limit is {get_readable_file_size(limit)}'
106 | if limit_exceeded:
107 | listener.onDownloadError(f'{limit_exceeded}.\nYour File/Folder size is {get_readable_file_size(size)}')
108 | api.remove([download], force=True, files=True, clean=True)
109 | return
110 | except Exception as e:
111 | LOGGER.error(f"{e} onDownloadStart: {gid} check duplicate didn't pass")
112 |
113 | @new_thread
114 | def __onDownloadComplete(api, gid):
115 | try:
116 | download = api.get_download(gid)
117 | except:
118 | return
119 | if download.followed_by_ids:
120 | new_gid = download.followed_by_ids[0]
121 | LOGGER.info(f'Gid changed from {gid} to {new_gid}')
122 | if dl:= getDownloadByGid(new_gid):
123 | listener = dl.listener()
124 | if config_dict['BASE_URL'] and listener.select:
125 | api.client.force_pause(new_gid)
126 | SBUTTONS = bt_selection_buttons(new_gid)
127 | msg = f"Name: {dl.name()}\n\nYour download paused. Choose files then press Done Selecting button to start downloading."
128 | sendMessage(msg, listener.bot, listener.message, SBUTTONS)
129 | elif download.is_torrent:
130 | if dl:= getDownloadByGid(gid):
131 | if hasattr(dl, 'listener') and dl.seeding:
132 | LOGGER.info(f"Cancelling Seed: {download.name} onDownloadComplete")
133 | dl.listener().onUploadError(f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}")
134 | api.remove([download], force=True, files=True, clean=True)
135 | else:
136 | LOGGER.info(f"onDownloadComplete: {download.name} - Gid: {gid}")
137 | if dl:= getDownloadByGid(gid):
138 | dl.listener().onDownloadComplete()
139 | api.remove([download], force=True, files=True, clean=True)
140 |
141 | @new_thread
142 | def __onBtDownloadComplete(api, gid):
143 | seed_start_time = time()
144 | sleep(1)
145 | download = api.get_download(gid)
146 | LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}")
147 | if dl:= getDownloadByGid(gid):
148 | listener = dl.listener()
149 | if listener.select:
150 | res = download.files
151 | for file_o in res:
152 | f_path = file_o.path
153 | if not file_o.selected and path.exists(f_path):
154 | try:
155 | remove(f_path)
156 | except:
157 | pass
158 | clean_unwanted(download.dir)
159 | if listener.seed:
160 | try:
161 | api.set_options({'max-upload-limit': '0'}, [download])
162 | except Exception as e:
163 | LOGGER.error(f'{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent GID: {gid}')
164 | else:
165 | try:
166 | api.client.force_pause(gid)
167 | except Exception as e:
168 | LOGGER.error(f"{e} GID: {gid}" )
169 | listener.onDownloadComplete()
170 | download = download.live
171 | if listener.seed:
172 | if download.is_complete:
173 | if dl:= getDownloadByGid(gid):
174 | LOGGER.info(f"Cancelling Seed: {download.name}")
175 | listener.onUploadError(f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}")
176 | api.remove([download], force=True, files=True, clean=True)
177 | else:
178 | with download_dict_lock:
179 | if listener.uid not in download_dict:
180 | api.remove([download], force=True, files=True, clean=True)
181 | return
182 | download_dict[listener.uid] = AriaDownloadStatus(gid, listener, True)
183 | download_dict[listener.uid].start_time = seed_start_time
184 | LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}")
185 | update_all_messages()
186 | else:
187 | api.remove([download], force=True, files=True, clean=True)
188 |
189 | @new_thread
190 | def __onDownloadStopped(api, gid):
191 | sleep(6)
192 | if dl:=getDownloadByGid(gid):
193 | dl.listener().onDownloadError('Dead Torrent! Find Torrent with good Seeders.\n\nYou Can Try With qBittorrent engine.')
194 |
195 | @new_thread
196 | def __onDownloadError(api, gid):
197 | LOGGER.info(f"onDownloadError: {gid}")
198 | error = "None"
199 | try:
200 | download = api.get_download(gid)
201 | error = download.error_message
202 | LOGGER.info(f"Download Error: {error}")
203 | except:
204 | pass
205 | if dl:= getDownloadByGid(gid):
206 | dl.listener().onDownloadError(error)
207 |
208 | def start_listener():
209 | aria2.listen_to_notifications(threaded=True,
210 | on_download_start=__onDownloadStarted,
211 | on_download_error=__onDownloadError,
212 | on_download_stop=__onDownloadStopped,
213 | on_download_complete=__onDownloadComplete,
214 | on_bt_download_complete=__onBtDownloadComplete,
215 | timeout=60)
216 |
217 | def add_aria2c_download(link: str, path, listener, filename, auth, ratio, seed_time):
218 | args = {'dir': path, 'max-upload-limit': '1K', 'netrc-path': '/usr/src/app/.netrc'}
219 | a2c_opt = {**aria2_options}
220 | [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options]
221 | args.update(a2c_opt)
222 | if filename:
223 | args['out'] = filename
224 | if auth:
225 | args['header'] = f"authorization: {auth}"
226 | if ratio:
227 | args['seed-ratio'] = ratio
228 | if seed_time:
229 | args['seed-time'] = seed_time
230 | if TORRENT_TIMEOUT:= config_dict['TORRENT_TIMEOUT']:
231 | args['bt-stop-timeout'] = str(TORRENT_TIMEOUT)
232 | if is_magnet(link):
233 | download = aria2.add_magnet(link, args)
234 | else:
235 | download = aria2.add_uris([link], args)
236 | if download.error_message:
237 | error = str(download.error_message).replace('<', ' ').replace('>', ' ')
238 | LOGGER.info(f"Download Error: {error}")
239 | return sendMessage(error, listener.bot, listener.message)
240 | with download_dict_lock:
241 | download_dict[listener.uid] = AriaDownloadStatus(download.gid, listener)
242 | LOGGER.info(f"Aria2Download started: {download.gid}")
243 | listener.onDownloadStart()
244 | if not listener.select:
245 | sendStatusMessage(listener.message, listener.bot)
246 |
247 | start_listener()
248 |
--------------------------------------------------------------------------------