├── .netrc ├── bot ├── modules │ ├── __init__.py │ ├── deleteme.txt │ ├── delete.py │ ├── mirror_status.py │ ├── count.py │ ├── shell.py │ ├── speedtest.py │ ├── list.py │ ├── watch.py │ ├── updates.py │ ├── cancel_mirror.py │ ├── clone.py │ ├── eval.py │ ├── authorize.py │ ├── config.py │ └── torrent_search.py ├── helper │ ├── ext_utils │ │ ├── __init__.py │ │ ├── delete-me.txt │ │ ├── exceptions.py │ │ ├── db_handler.py │ │ ├── fs_utils.py │ │ └── bot_utils.py │ ├── mirror_utils │ │ ├── __init__.py │ │ ├── download_utils │ │ │ ├── deleteme.txt │ │ │ ├── __init__.py │ │ │ ├── download_helper.py │ │ │ ├── telegram_downloader.py │ │ │ ├── aria2_download.py │ │ │ ├── youtube_dl_download_helper.py │ │ │ ├── mega_downloader.py │ │ │ └── qbit_downloader.py │ │ ├── status_utils │ │ │ ├── __init__.py │ │ │ ├── deleteme.txt │ │ │ ├── listeners.py │ │ │ ├── tar_status.py │ │ │ ├── extract_status.py │ │ │ ├── status.py │ │ │ ├── telegram_download_status.py │ │ │ ├── clone_status.py │ │ │ ├── upload_status.py │ │ │ ├── youtube_dl_download_status.py │ │ │ ├── gdownload_status.py │ │ │ ├── mega_download_status.py │ │ │ ├── qbit_download_status.py │ │ │ └── aria_download_status.py │ │ └── upload_utils │ │ │ ├── __init__.py │ │ │ ├── deleteme.txt │ │ │ └── gdtot_helper.py │ ├── telegram_helper │ │ ├── __init__.py │ │ ├── deleteme.txt │ │ ├── button_build.py │ │ ├── bot_commands.py │ │ ├── filters.py │ │ └── message_utils.py │ ├── custom_filters.py │ └── __init__.py ├── __main__.py └── __init__.py ├── _config.yml ├── heroku.yml ├── captain-definition ├── requirements-cli.txt ├── aria.bat ├── vps.md ├── début.sh ├── Dockerfile ├── .github └── workflows │ └── manual.yml ├── alive.py ├── generate_drive_token.py ├── qBittorrent.conf ├── aria.sh ├── config.env ├── heroku-guide.md ├── add_to_team_drive.py ├── extract ├── pextract └── nodes.py /.netrc: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/modules/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /bot/modules/deleteme.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /bot/helper/ext_utils/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /bot/helper/ext_utils/delete-me.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /bot/helper/telegram_helper/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /bot/helper/telegram_helper/deleteme.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/deleteme.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/deleteme.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/upload_utils/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /bot/helper/mirror_utils/upload_utils/deleteme.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/__init__.py: -------------------------------------------------------------------------------- 1 | #TEKEAYUSH -------------------------------------------------------------------------------- /heroku.yml: -------------------------------------------------------------------------------- 1 | build: 2 | docker: 3 | web: Dockerfile 4 | run: 5 | web: bash début.sh 6 | -------------------------------------------------------------------------------- /captain-definition: -------------------------------------------------------------------------------- 1 | { 2 | "schemaVersion": 2, 3 | "dockerfilePath": "./Dockerfile" 4 | } 5 | -------------------------------------------------------------------------------- /requirements-cli.txt: -------------------------------------------------------------------------------- 1 | oauth2client 2 | google-api-python-client 3 | progress 4 | progressbar2 5 | httplib2shim 6 | google_auth_oauthlib 7 | pyrogram 8 | -------------------------------------------------------------------------------- /aria.bat: -------------------------------------------------------------------------------- 1 | aria2c --enable-rpc --rpc-listen-all=false --rpc-listen-port 6800 --max-connection-per-server=10 --rpc-max-request-size=1024M --seed-time=0.01 --min-split-size=10M --follow-torrent=mem --split=10 --daemon=true --allow-overwrite=true 2 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/exceptions.py: -------------------------------------------------------------------------------- 1 | class DirectDownloadLinkException(Exception): 2 | """Not method found for extracting direct download link from the http link""" 3 | pass 4 | 5 | 6 | class NotSupportedExtractionArchive(Exception): 7 | """The archive format use is trying to extract is not supported""" 8 | pass 9 | -------------------------------------------------------------------------------- /vps.md: -------------------------------------------------------------------------------- 1 | ## Run the Following: 2 | 1.To clone the Repository: 3 | ``` 4 | git clone https://github.com/TheCaduceus/Dr.Torrent/ 5 | cd Dr.Torrent 6 | ``` 7 | 2.Install Requirements: 8 | ``` 9 | sudo apt install python3 10 | sudo snap install docker 11 | ``` 12 | 3.Configuration: 13 | ``` 14 | sudo pacman -S docker python 15 | ``` 16 | -------------------------------------------------------------------------------- /début.sh: -------------------------------------------------------------------------------- 1 | if [[ -n $TOKEN_PICKLE_URL ]]; then 2 | wget -q $TOKEN_PICKLE_URL -O /usr/src/app/token.pickle 3 | fi 4 | 5 | if [[ -n $ACCOUNTS_ZIP_URL ]]; then 6 | wget -q $ACCOUNTS_ZIP_URL -O /usr/src/app/accounts.zip 7 | unzip accounts.zip -d /usr/src/app/accounts 8 | rm accounts.zip 9 | fi 10 | 11 | gunicorn wserver:start_server --bind 0.0.0.0:$PORT --worker-class aiohttp.GunicornWebWorker & qbittorrent-nox -d --webui-port=8090 & python3 alive.py & ./aria.sh; python3 -m bot 12 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM breakdowns/mega-sdk-python:latest 2 | 3 | WORKDIR /usr/src/app 4 | RUN chmod 777 /usr/src/app 5 | 6 | COPY extract /usr/local/bin 7 | COPY pextract /usr/local/bin 8 | RUN chmod +x /usr/local/bin/extract && chmod +x /usr/local/bin/pextract 9 | 10 | COPY requirements.txt . 11 | RUN pip3 install --no-cache-dir -r requirements.txt 12 | 13 | COPY . . 14 | COPY .netrc /root/.netrc 15 | RUN chmod 600 /usr/src/app/.netrc 16 | RUN chmod +x aria.sh 17 | 18 | CMD ["bash","début.sh"] 19 | -------------------------------------------------------------------------------- /.github/workflows/manual.yml: -------------------------------------------------------------------------------- 1 | name: Deploy on Heroku 2 | on: workflow_dispatch 3 | 4 | jobs: 5 | deploy: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v2 9 | - uses: akhileshns/heroku-deploy@v3.12.12 10 | with: 11 | heroku_api_key: ${{secrets.HEROKU_API_KEY}} 12 | heroku_app_name: ${{secrets.HEROKU_APP_NAME}} 13 | heroku_email: ${{secrets.HEROKU_EMAIL}} 14 | usedocker: true 15 | docker_heroku_process_type: web 16 | stack: "container" 17 | region: "us" 18 | env: 19 | HD_CONFIG_FILE_URL: ${{secrets.CONFIG_FILE_URL}} 20 | -------------------------------------------------------------------------------- /alive.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | import time 4 | import requests 5 | import os 6 | from dotenv import load_dotenv 7 | 8 | load_dotenv('config.env') 9 | 10 | try: 11 | BASE_URL = os.environ.get('BASE_URL_OF_BOT', None) 12 | if len(BASE_URL) == 0: 13 | BASE_URL = None 14 | except KeyError: 15 | BASE_URL = None 16 | 17 | try: 18 | IS_VPS = os.environ.get('IS_VPS', 'False') 19 | if IS_VPS.lower() == 'true': 20 | IS_VPS = True 21 | else: 22 | IS_VPS = False 23 | except KeyError: 24 | IS_VPS = False 25 | 26 | if not IS_VPS and BASE_URL is not None: 27 | while True: 28 | time.sleep(1000) 29 | status = requests.get(BASE_URL).status_code 30 | -------------------------------------------------------------------------------- /bot/helper/custom_filters.py: -------------------------------------------------------------------------------- 1 | from pyrogram import filters 2 | 3 | def callback_data(data): 4 | def func(flt, client, callback_query): 5 | return callback_query.data in flt.data 6 | 7 | data = data if isinstance(data, list) else [data] 8 | return filters.create( 9 | func, 10 | 'CustomCallbackDataFilter', 11 | data=data 12 | ) 13 | 14 | def callback_chat(chats): 15 | def func(flt, client, callback_query): 16 | return callback_query.message.chat.id in flt.chats 17 | 18 | chats = chats if isinstance(chats, list) else [chats] 19 | return filters.create( 20 | func, 21 | 'CustomCallbackChatsFilter', 22 | chats=chats 23 | ) 24 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/button_build.py: -------------------------------------------------------------------------------- 1 | from telegram import InlineKeyboardButton 2 | 3 | 4 | class ButtonMaker: 5 | def __init__(self): 6 | self.button = [] 7 | 8 | def buildbutton(self, key, link): 9 | self.button.append(InlineKeyboardButton(text = key, url = link)) 10 | 11 | def sbutton(self, key, data): 12 | self.button.append(InlineKeyboardButton(text = key, callback_data = data)) 13 | 14 | def build_menu(self, n_cols, footer_buttons=None, header_buttons=None): 15 | menu = [self.button[i:i + n_cols] for i in range(0, len(self.button), n_cols)] 16 | if header_buttons: 17 | menu.insert(0, header_buttons) 18 | if footer_buttons: 19 | menu.append(footer_buttons) 20 | return menu 21 | -------------------------------------------------------------------------------- /generate_drive_token.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | import os 3 | from google_auth_oauthlib.flow import InstalledAppFlow 4 | from google.auth.transport.requests import Request 5 | 6 | credentials = None 7 | __G_DRIVE_TOKEN_FILE = "token.pickle" 8 | __OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"] 9 | if os.path.exists(__G_DRIVE_TOKEN_FILE): 10 | with open(__G_DRIVE_TOKEN_FILE, 'rb') as f: 11 | credentials = pickle.load(f) 12 | if credentials is None or not credentials.valid: 13 | if credentials and credentials.expired and credentials.refresh_token: 14 | credentials.refresh(Request()) 15 | else: 16 | flow = InstalledAppFlow.from_client_secrets_file( 17 | 'credentials.json', __OAUTH_SCOPE) 18 | credentials = flow.run_console(port=0) 19 | 20 | # Save the credentials for the next run 21 | with open(__G_DRIVE_TOKEN_FILE, 'wb') as token: 22 | pickle.dump(credentials, token) -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/listeners.py: -------------------------------------------------------------------------------- 1 | class MirrorListeners: 2 | def __init__(self, context, update): 3 | self.bot = context 4 | self.update = update 5 | self.message = update.message 6 | self.uid = self.message.message_id 7 | 8 | def onDownloadStarted(self): 9 | raise NotImplementedError 10 | 11 | def onDownloadProgress(self): 12 | raise NotImplementedError 13 | 14 | def onDownloadComplete(self): 15 | raise NotImplementedError 16 | 17 | def onDownloadError(self, error: str): 18 | raise NotImplementedError 19 | 20 | def onUploadStarted(self): 21 | raise NotImplementedError 22 | 23 | def onUploadProgress(self): 24 | raise NotImplementedError 25 | 26 | def onUploadComplete(self, link: str): 27 | raise NotImplementedError 28 | 29 | def onUploadError(self, error: str): 30 | raise NotImplementedError 31 | -------------------------------------------------------------------------------- /qBittorrent.conf: -------------------------------------------------------------------------------- 1 | [AutoRun] 2 | enabled=true 3 | program= 4 | 5 | [LegalNotice] 6 | Accepted=true 7 | 8 | [BitTorrent] 9 | Session\AsyncIOThreadsCount=8 10 | Session\SlowTorrentsDownloadRate=100 11 | Session\SlowTorrentsInactivityTimer=600 12 | 13 | [Preferences] 14 | Advanced\AnnounceToAllTrackers=true 15 | Advanced\AnonymousMode=false 16 | Advanced\IgnoreLimitsLAN=true 17 | Advanced\RecheckOnCompletion=true 18 | Advanced\LtTrackerExchange=true 19 | Bittorrent\MaxConnecs=3000 20 | Bittorrent\MaxConnecsPerTorrent=500 21 | Bittorrent\DHT=true 22 | Bittorrent\DHTPort=6881 23 | Bittorrent\PeX=true 24 | Bittorrent\LSD=true 25 | Bittorrent\sameDHTPortAsBT=true 26 | Downloads\DiskWriteCacheSize=32 27 | Downloads\PreAllocation=true 28 | Downloads\UseIncompleteExtension=true 29 | General\PreventFromSuspendWhenDownloading=true 30 | Queueing\IgnoreSlowTorrents=true 31 | Queueing\MaxActiveDownloads=15 32 | Queueing\MaxActiveTorrents=50 33 | Queueing\QueueingEnabled=false 34 | WebUI\Enabled=true 35 | WebUI\Port=8090 36 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/tar_status.py: -------------------------------------------------------------------------------- 1 | from .status import Status 2 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus 3 | 4 | 5 | class TarStatus(Status): 6 | def __init__(self, name, path, size): 7 | self.__name = name 8 | self.__path = path 9 | self.__size = size 10 | 11 | # The progress of Tar function cannot be tracked. So we just return dummy values. 12 | # If this is possible in future,we should implement it 13 | 14 | def progress(self): 15 | return '0' 16 | 17 | def speed(self): 18 | return '0' 19 | 20 | def name(self): 21 | return self.__name 22 | 23 | def path(self): 24 | return self.__path 25 | 26 | def size(self): 27 | return get_readable_file_size(self.__size) 28 | 29 | def eta(self): 30 | return '0s' 31 | 32 | def status(self): 33 | return MirrorStatus.STATUS_ARCHIVING 34 | 35 | def processed_bytes(self): 36 | return 0 37 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/extract_status.py: -------------------------------------------------------------------------------- 1 | from .status import Status 2 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus 3 | 4 | 5 | class ExtractStatus(Status): 6 | def __init__(self, name, path, size): 7 | self.__name = name 8 | self.__path = path 9 | self.__size = size 10 | 11 | # The progress of extract function cannot be tracked. So we just return dummy values. 12 | # If this is possible in future,we should implement it 13 | 14 | def progress(self): 15 | return '0' 16 | 17 | def speed(self): 18 | return '0' 19 | 20 | def name(self): 21 | return self.__name 22 | 23 | def path(self): 24 | return self.__path 25 | 26 | def size(self): 27 | return get_readable_file_size(self.__size) 28 | 29 | def eta(self): 30 | return '0s' 31 | 32 | def status(self): 33 | return MirrorStatus.STATUS_EXTRACTING 34 | 35 | def processed_bytes(self): 36 | return 0 37 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/download_helper.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | 4 | class MethodNotImplementedError(NotImplementedError): 5 | def __init__(self): 6 | super(self, 'Not implemented method') 7 | 8 | 9 | class DownloadHelper: 10 | def __init__(self): 11 | self.name = '' # Name of the download; empty string if no download has been started 12 | self.size = 0.0 # Size of the download 13 | self.downloaded_bytes = 0.0 # Bytes downloaded 14 | self.speed = 0.0 # Download speed in bytes per second 15 | self.progress = 0.0 16 | self.progress_string = '0.00%' 17 | self.eta = 0 # Estimated time of download complete 18 | self.eta_string = '0s' # A listener class which have event callbacks 19 | self._resource_lock = threading.Lock() 20 | 21 | def add_download(self, link: str, path): 22 | raise MethodNotImplementedError 23 | 24 | def cancel_download(self): 25 | # Returns None if successfully cancelled, else error string 26 | raise MethodNotImplementedError 27 | -------------------------------------------------------------------------------- /bot/modules/delete.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | import threading 3 | from telegram import Update 4 | from bot import dispatcher, LOGGER 5 | from bot.helper.telegram_helper.message_utils import auto_delete_message, sendMessage 6 | from bot.helper.telegram_helper.filters import CustomFilters 7 | from bot.helper.telegram_helper.bot_commands import BotCommands 8 | from bot.helper.mirror_utils.upload_utils import gdriveTools 9 | 10 | 11 | def deletefile(update, context): 12 | msg_args = update.message.text.split(None, 1) 13 | msg = '' 14 | try: 15 | link = msg_args[1] 16 | LOGGER.info(msg_args[1]) 17 | except IndexError: 18 | msg = 'Send a link along with command' 19 | 20 | if msg == '' : 21 | drive = gdriveTools.GoogleDriveHelper() 22 | msg = drive.deletefile(link) 23 | LOGGER.info(f"DeleteFileCmd : {msg}") 24 | reply_message = sendMessage(msg, context.bot, update) 25 | 26 | threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start() 27 | 28 | delete_handler = CommandHandler(command=BotCommands.DeleteCommand, callback=deletefile, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 29 | dispatcher.add_handler(delete_handler) 30 | -------------------------------------------------------------------------------- /aria.sh: -------------------------------------------------------------------------------- 1 | export MAX_DOWNLOAD_SPEED=0 2 | tracker_list=$(curl -Ns https://raw.githubusercontent.com/XIU2/TrackersListCollection/master/all.txt https://ngosang.github.io/trackerslist/trackers_all_http.txt https://raw.githubusercontent.com/DeSireFire/animeTrackerList/master/AT_all.txt https://raw.githubusercontent.com/hezhijie0327/Trackerslist/main/trackerslist_combine.txt | awk '$0' | tr '\n' ',') 3 | export MAX_CONCURRENT_DOWNLOADS=4 4 | 5 | aria2c --enable-rpc --rpc-listen-all=false --check-certificate=false \ 6 | --max-connection-per-server=10 --rpc-max-request-size=1024M \ 7 | --bt-tracker="[$tracker_list]" --bt-max-peers=0 --bt-tracker-connect-timeout=300 --bt-stop-timeout=1200 --min-split-size=10M \ 8 | --follow-torrent=mem --split=10 \ 9 | --daemon=true --allow-overwrite=true --max-overall-download-limit=$MAX_DOWNLOAD_SPEED \ 10 | --max-overall-upload-limit=1K --max-concurrent-downloads=$MAX_CONCURRENT_DOWNLOADS \ 11 | --peer-id-prefix=-qB4360- --user-agent=qBittorrent/4.3.6 --peer-agent=qBittorrent/4.3.6 \ 12 | --disk-cache=64M --file-allocation=prealloc --continue=true \ 13 | --max-file-not-found=0 --max-tries=20 --auto-file-renaming=true \ 14 | --bt-enable-lpd=true --seed-time=0.01 --seed-ratio=1.0 \ 15 | --content-disposition-default-utf8=true --http-accept-gzip=true --reuse-uri=true --netrc-path=/usr/src/app/.netrc 16 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/status.py: -------------------------------------------------------------------------------- 1 | # Generic status class. All other status classes must inherit this class 2 | 3 | 4 | class Status: 5 | 6 | def progress(self): 7 | """ 8 | Calculates the progress of the mirror (upload or download) 9 | :return: progress in percentage 10 | """ 11 | raise NotImplementedError 12 | 13 | def speed(self): 14 | """:return: speed in bytes per second""" 15 | raise NotImplementedError 16 | 17 | def name(self): 18 | """:return name of file/directory being processed""" 19 | raise NotImplementedError 20 | 21 | def path(self): 22 | """:return path of the file/directory""" 23 | raise NotImplementedError 24 | 25 | def size(self): 26 | """:return Size of file folder""" 27 | raise NotImplementedError 28 | 29 | def eta(self): 30 | """:return ETA of the process to complete""" 31 | raise NotImplementedError 32 | 33 | def status(self): 34 | """:return String describing what is the object of this class will be tracking (upload/download/something 35 | else) """ 36 | raise NotImplementedError 37 | 38 | def processed_bytes(self): 39 | """:return The size of file that has been processed (downloaded/uploaded/archived)""" 40 | raise NotImplementedError 41 | -------------------------------------------------------------------------------- /bot/modules/mirror_status.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | from bot import dispatcher, status_reply_dict, status_reply_dict_lock, download_dict, download_dict_lock 3 | from bot.helper.telegram_helper.message_utils import * 4 | from telegram.error import BadRequest 5 | from bot.helper.telegram_helper.filters import CustomFilters 6 | from bot.helper.telegram_helper.bot_commands import BotCommands 7 | import threading 8 | 9 | 10 | def mirror_status(update, context): 11 | with download_dict_lock: 12 | if len(download_dict) == 0: 13 | message = "No active downloads" 14 | reply_message = sendMessage(message, context.bot, update) 15 | threading.Thread(target=auto_delete_message, args=(bot, update.message, reply_message)).start() 16 | return 17 | index = update.effective_chat.id 18 | with status_reply_dict_lock: 19 | if index in status_reply_dict.keys(): 20 | deleteMessage(bot, status_reply_dict[index]) 21 | del status_reply_dict[index] 22 | sendStatusMessage(update, context.bot) 23 | deleteMessage(context.bot, update.message) 24 | 25 | 26 | mirror_status_handler = CommandHandler(BotCommands.StatusCommand, mirror_status, 27 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 28 | dispatcher.add_handler(mirror_status_handler) 29 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/bot_commands.py: -------------------------------------------------------------------------------- 1 | class _BotCommands: 2 | def __init__(self): 3 | self.StartCommand = 'start' 4 | self.MirrorCommand = 'mirror' 5 | self.UnzipMirrorCommand = 'unzip' 6 | self.TarMirrorCommand = 'tar' 7 | self.ZipMirrorCommand = 'zip' 8 | self.CancelMirror = 'cancel' 9 | self.CancelAllCommand = 'cnlall' 10 | self.ListCommand = 'list' 11 | self.StatusCommand = 'status' 12 | self.AuthorizedUsersCommand = 'users' 13 | self.AuthorizeCommand = 'auth' 14 | self.UnAuthorizeCommand = 'unauth' 15 | self.AddSudoCommand = 'addsudo' 16 | self.RmSudoCommand = 'rmsudo' 17 | self.PingCommand = 'ping' 18 | self.RestartCommand = 'restart' 19 | self.StatsCommand = 'stat' 20 | self.HelpCommand = 'hlp' 21 | self.LogCommand = 'log' 22 | self.SpeedCommand = 'speedtest' 23 | self.CloneCommand = 'clone' 24 | self.CountCommand = 'count' 25 | self.WatchCommand = 'watch' 26 | self.TarWatchCommand = 'tarwatch' 27 | self.DeleteCommand = 'del' 28 | self.ConfigMenuCommand = 'config' 29 | self.ShellCommand = 'shell' 30 | self.UpdateCommand = 'update' 31 | self.ExecHelpCommand = 'exechelp' 32 | self.TsHelpCommand = 'tshelp' 33 | self.GDTOTCommand = 'gdtot' 34 | 35 | BotCommands = _BotCommands() 36 | -------------------------------------------------------------------------------- /bot/modules/count.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper 3 | from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage 4 | from bot.helper.telegram_helper.filters import CustomFilters 5 | from bot.helper.telegram_helper.bot_commands import BotCommands 6 | from bot import dispatcher 7 | 8 | 9 | def countNode(update, context): 10 | args = update.message.text.split(" ", maxsplit=1) 11 | if len(args) > 1: 12 | link = args[1] 13 | msg = sendMessage(f"Counting: {link}", context.bot, update) 14 | gd = GoogleDriveHelper() 15 | result = gd.count(link) 16 | deleteMessage(context.bot, msg) 17 | if update.message.from_user.username: 18 | uname = f'@{update.message.from_user.username}' 19 | else: 20 | uname = f'{update.message.from_user.first_name}' 21 | if uname is not None: 22 | cc = f'\n\ncc: {uname}' 23 | sendMessage(result + cc, context.bot, update) 24 | else: 25 | sendMessage("Provide G-Drive Shareable Link to Count.", context.bot, update) 26 | 27 | count_handler = CommandHandler(BotCommands.CountCommand, countNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 28 | dispatcher.add_handler(count_handler) 29 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/telegram_download_status.py: -------------------------------------------------------------------------------- 1 | from bot import DOWNLOAD_DIR 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time 3 | from .status import Status 4 | 5 | 6 | class TelegramDownloadStatus(Status): 7 | def __init__(self, obj, listener): 8 | self.obj = obj 9 | self.uid = listener.uid 10 | self.message = listener.message 11 | 12 | def gid(self): 13 | return self.obj.gid 14 | 15 | def path(self): 16 | return f"{DOWNLOAD_DIR}{self.uid}" 17 | 18 | def processed_bytes(self): 19 | return self.obj.downloaded_bytes 20 | 21 | def size_raw(self): 22 | return self.obj.size 23 | 24 | def size(self): 25 | return get_readable_file_size(self.size_raw()) 26 | 27 | def status(self): 28 | return MirrorStatus.STATUS_DOWNLOADING 29 | 30 | def name(self): 31 | return self.obj.name 32 | 33 | def progress_raw(self): 34 | return self.obj.progress 35 | 36 | def progress(self): 37 | return f'{round(self.progress_raw(), 2)}%' 38 | 39 | def speed_raw(self): 40 | """ 41 | :return: Download speed in Bytes/Seconds 42 | """ 43 | return self.obj.download_speed 44 | 45 | def speed(self): 46 | return f'{get_readable_file_size(self.speed_raw())}/s' 47 | 48 | def eta(self): 49 | try: 50 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw() 51 | return f'{get_readable_time(seconds)}' 52 | except ZeroDivisionError: 53 | return '-' 54 | 55 | def download(self): 56 | return self.obj 57 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/clone_status.py: -------------------------------------------------------------------------------- 1 | from .status import Status 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time 3 | 4 | 5 | class CloneStatus(Status): 6 | def __init__(self, obj, size, update, gid): 7 | self.cobj = obj 8 | self.__csize = size 9 | self.message = update.message 10 | self.__cgid = gid 11 | 12 | def processed_bytes(self): 13 | return self.cobj.transferred_size 14 | 15 | def size_raw(self): 16 | return self.__csize 17 | 18 | def size(self): 19 | return get_readable_file_size(self.__csize) 20 | 21 | def status(self): 22 | return MirrorStatus.STATUS_CLONING 23 | 24 | def name(self): 25 | return self.cobj.name 26 | 27 | def gid(self) -> str: 28 | return self.__cgid 29 | 30 | def progress_raw(self): 31 | try: 32 | return self.cobj.transferred_size / self.__csize * 100 33 | except ZeroDivisionError: 34 | return 0 35 | 36 | def progress(self): 37 | return f'{round(self.progress_raw(), 2)}%' 38 | 39 | def speed_raw(self): 40 | """ 41 | :return: Download speed in Bytes/Seconds 42 | """ 43 | return self.cobj.cspeed() 44 | 45 | def speed(self): 46 | return f'{get_readable_file_size(self.speed_raw())}/s' 47 | 48 | def eta(self): 49 | try: 50 | seconds = (self.__csize - self.cobj.transferred_size) / self.speed_raw() 51 | return f'{get_readable_time(seconds)}' 52 | except ZeroDivisionError: 53 | return '-' 54 | 55 | def download(self): 56 | return self.cobj 57 | -------------------------------------------------------------------------------- /bot/modules/shell.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | from bot import LOGGER, dispatcher 3 | from telegram import ParseMode 4 | from telegram.ext import CommandHandler 5 | from bot.helper.telegram_helper.filters import CustomFilters 6 | from bot.helper.telegram_helper.bot_commands import BotCommands 7 | 8 | 9 | def shell(update, context): 10 | message = update.effective_message 11 | cmd = message.text.split(' ', 1) 12 | if len(cmd) == 1: 13 | message.reply_text('No command to execute was given.') 14 | return 15 | cmd = cmd[1] 16 | process = subprocess.Popen( 17 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) 18 | stdout, stderr = process.communicate() 19 | reply = '' 20 | stderr = stderr.decode() 21 | stdout = stdout.decode() 22 | if stdout: 23 | reply += f"*Stdout*\n`{stdout}`\n" 24 | LOGGER.info(f"Shell - {cmd} - {stdout}") 25 | if stderr: 26 | reply += f"*Stderr*\n`{stderr}`\n" 27 | LOGGER.error(f"Shell - {cmd} - {stderr}") 28 | if len(reply) > 3000: 29 | with open('shell_output.txt', 'w') as file: 30 | file.write(reply) 31 | with open('shell_output.txt', 'rb') as doc: 32 | context.bot.send_document( 33 | document=doc, 34 | filename=doc.name, 35 | reply_to_message_id=message.message_id, 36 | chat_id=message.chat_id) 37 | else: 38 | message.reply_text(reply, parse_mode=ParseMode.MARKDOWN) 39 | 40 | 41 | SHELL_HANDLER = CommandHandler(BotCommands.ShellCommand, shell, 42 | filters=CustomFilters.owner_filter, run_async=True) 43 | dispatcher.add_handler(SHELL_HANDLER) 44 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/upload_status.py: -------------------------------------------------------------------------------- 1 | from .status import Status 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time 3 | from bot import DOWNLOAD_DIR 4 | 5 | 6 | class UploadStatus(Status): 7 | def __init__(self, obj, size, gid, listener): 8 | self.obj = obj 9 | self.__size = size 10 | self.uid = listener.uid 11 | self.message = listener.message 12 | self.__gid = gid 13 | 14 | def path(self): 15 | return f"{DOWNLOAD_DIR}{self.uid}" 16 | 17 | def processed_bytes(self): 18 | return self.obj.uploaded_bytes 19 | 20 | def size_raw(self): 21 | return self.__size 22 | 23 | def size(self): 24 | return get_readable_file_size(self.__size) 25 | 26 | def status(self): 27 | return MirrorStatus.STATUS_UPLOADING 28 | 29 | def name(self): 30 | return self.obj.name 31 | 32 | def progress_raw(self): 33 | try: 34 | return self.obj.uploaded_bytes / self.__size * 100 35 | except ZeroDivisionError: 36 | return 0 37 | 38 | def progress(self): 39 | return f'{round(self.progress_raw(), 2)}%' 40 | 41 | def speed_raw(self): 42 | """ 43 | :return: Upload speed in Bytes/Seconds 44 | """ 45 | return self.obj.speed() 46 | 47 | def speed(self): 48 | return f'{get_readable_file_size(self.speed_raw())}/s' 49 | 50 | def eta(self): 51 | try: 52 | seconds = (self.__size - self.obj.uploaded_bytes) / self.speed_raw() 53 | return f'{get_readable_time(seconds)}' 54 | except ZeroDivisionError: 55 | return '-' 56 | 57 | def gid(self) -> str: 58 | return self.__gid 59 | 60 | def download(self): 61 | return self.obj 62 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/youtube_dl_download_status.py: -------------------------------------------------------------------------------- 1 | from bot import DOWNLOAD_DIR 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time 3 | from .status import Status 4 | from bot.helper.ext_utils.fs_utils import get_path_size 5 | 6 | class YoutubeDLDownloadStatus(Status): 7 | def __init__(self, obj, listener): 8 | self.obj = obj 9 | self.uid = listener.uid 10 | self.message = listener.message 11 | 12 | def gid(self): 13 | return self.obj.gid 14 | 15 | def path(self): 16 | return f"{DOWNLOAD_DIR}{self.uid}" 17 | 18 | def processed_bytes(self): 19 | if self.obj.downloaded_bytes != 0: 20 | return self.obj.downloaded_bytes 21 | else: 22 | return get_path_size(f"{DOWNLOAD_DIR}{self.uid}") 23 | 24 | def size_raw(self): 25 | return self.obj.size 26 | 27 | def size(self): 28 | return get_readable_file_size(self.size_raw()) 29 | 30 | def status(self): 31 | return MirrorStatus.STATUS_DOWNLOADING 32 | 33 | def name(self): 34 | return self.obj.name 35 | 36 | def progress_raw(self): 37 | return self.obj.progress 38 | 39 | def progress(self): 40 | return f'{round(self.progress_raw(), 2)}%' 41 | 42 | def speed_raw(self): 43 | """ 44 | :return: Download speed in Bytes/Seconds 45 | """ 46 | return self.obj.download_speed 47 | 48 | def speed(self): 49 | return f'{get_readable_file_size(self.speed_raw())}/s' 50 | 51 | def eta(self): 52 | try: 53 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw() 54 | return f'{get_readable_time(seconds)}' 55 | except: 56 | return '-' 57 | 58 | def download(self): 59 | return self.obj 60 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/gdownload_status.py: -------------------------------------------------------------------------------- 1 | from .status import Status 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time 3 | from bot import DOWNLOAD_DIR 4 | 5 | 6 | class DownloadStatus(Status): 7 | def __init__(self, obj, size, listener, gid): 8 | self.dobj = obj 9 | self.__dsize = size 10 | self.uid = listener.uid 11 | self.message = listener.message 12 | self.__dgid = gid 13 | 14 | def path(self): 15 | return f"{DOWNLOAD_DIR}{self.uid}" 16 | 17 | def processed_bytes(self): 18 | return self.dobj.downloaded_bytes 19 | 20 | def size_raw(self): 21 | return self.__dsize 22 | 23 | def size(self): 24 | return get_readable_file_size(self.__dsize) 25 | 26 | def status(self): 27 | return MirrorStatus.STATUS_DOWNLOADING 28 | 29 | def name(self): 30 | return self.dobj.name 31 | 32 | def gid(self) -> str: 33 | return self.__dgid 34 | 35 | def progress_raw(self): 36 | try: 37 | return self.dobj.downloaded_bytes / self.__dsize * 100 38 | except ZeroDivisionError: 39 | return 0 40 | 41 | def progress(self): 42 | return f'{round(self.progress_raw(), 2)}%' 43 | 44 | def speed_raw(self): 45 | """ 46 | :return: Download speed in Bytes/Seconds 47 | """ 48 | return self.dobj.dspeed() 49 | 50 | def speed(self): 51 | return f'{get_readable_file_size(self.speed_raw())}/s' 52 | 53 | def eta(self): 54 | try: 55 | seconds = (self.__dsize - self.dobj.downloaded_bytes) / self.speed_raw() 56 | return f'{get_readable_time(seconds)}' 57 | except ZeroDivisionError: 58 | return '-' 59 | 60 | def download(self): 61 | return self.dobj 62 | -------------------------------------------------------------------------------- /bot/modules/speedtest.py: -------------------------------------------------------------------------------- 1 | from speedtest import Speedtest 2 | from bot.helper.telegram_helper.filters import CustomFilters 3 | from bot import dispatcher 4 | from bot.helper.telegram_helper.bot_commands import BotCommands 5 | from bot.helper.telegram_helper.message_utils import sendMessage, editMessage 6 | from telegram.ext import CommandHandler 7 | 8 | 9 | def speedtest(update, context): 10 | speed = sendMessage("Running Speed Test . . . ", context.bot, update) 11 | test = Speedtest() 12 | test.get_best_server() 13 | test.download() 14 | test.upload() 15 | test.results.share() 16 | result = test.results.dict() 17 | string_speed = f''' 18 | Server 19 | Name: {result['server']['name']} 20 | Country: {result['server']['country']}, {result['server']['cc']} 21 | Sponsor: {result['server']['sponsor']} 22 | ISP: {result['client']['isp']} 23 | SpeedTest Results 24 | Upload: {speed_convert(result['upload'] / 8)} 25 | Download: {speed_convert(result['download'] / 8)} 26 | Ping: {result['ping']} ms 27 | ISP Rating: {result['client']['isprating']} 28 | ''' 29 | editMessage(string_speed, speed) 30 | 31 | 32 | def speed_convert(size): 33 | """Hi human, you can't read bytes?""" 34 | power = 2 ** 10 35 | zero = 0 36 | units = {0: "", 1: "Kb/s", 2: "MB/s", 3: "Gb/s", 4: "Tb/s"} 37 | while size > power: 38 | size /= power 39 | zero += 1 40 | return f"{round(size, 2)} {units[zero]}" 41 | 42 | 43 | SPEED_HANDLER = CommandHandler(BotCommands.SpeedCommand, speedtest, 44 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 45 | 46 | dispatcher.add_handler(SPEED_HANDLER) 47 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/mega_download_status.py: -------------------------------------------------------------------------------- 1 | from bot.helper.ext_utils.bot_utils import get_readable_file_size,MirrorStatus, get_readable_time 2 | from bot import DOWNLOAD_DIR 3 | from .status import Status 4 | 5 | 6 | class MegaDownloadStatus(Status): 7 | 8 | def __init__(self, obj, listener): 9 | self.uid = obj.uid 10 | self.listener = listener 11 | self.obj = obj 12 | self.message = listener.message 13 | 14 | def name(self) -> str: 15 | return self.obj.name 16 | 17 | def progress_raw(self): 18 | try: 19 | return round(self.processed_bytes() / self.obj.size * 100,2) 20 | except ZeroDivisionError: 21 | return 0.0 22 | 23 | def progress(self): 24 | """Progress of download in percentage""" 25 | return f"{self.progress_raw()}%" 26 | 27 | def status(self) -> str: 28 | return MirrorStatus.STATUS_DOWNLOADING 29 | 30 | def processed_bytes(self): 31 | return self.obj.downloaded_bytes 32 | 33 | def eta(self): 34 | try: 35 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw() 36 | return f'{get_readable_time(seconds)}' 37 | except ZeroDivisionError: 38 | return '-' 39 | 40 | def size_raw(self): 41 | return self.obj.size 42 | 43 | def size(self) -> str: 44 | return get_readable_file_size(self.size_raw()) 45 | 46 | def downloaded(self) -> str: 47 | return get_readable_file_size(self.obj.downloadedBytes) 48 | 49 | def speed_raw(self): 50 | return self.obj.speed 51 | 52 | def speed(self) -> str: 53 | return f'{get_readable_file_size(self.speed_raw())}/s' 54 | 55 | def gid(self) -> str: 56 | return self.obj.gid 57 | 58 | def path(self) -> str: 59 | return f"{DOWNLOAD_DIR}{self.uid}" 60 | 61 | def download(self): 62 | return self.obj 63 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/filters.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import MessageFilter 2 | from telegram import Message 3 | from bot import AUTHORIZED_CHATS, SUDO_USERS, OWNER_ID, download_dict, download_dict_lock 4 | 5 | 6 | class CustomFilters: 7 | class _OwnerFilter(MessageFilter): 8 | def filter(self, message): 9 | return bool(message.from_user.id == OWNER_ID) 10 | 11 | owner_filter = _OwnerFilter() 12 | 13 | class _AuthorizedUserFilter(MessageFilter): 14 | def filter(self, message): 15 | id = message.from_user.id 16 | return bool(id in AUTHORIZED_CHATS or id in SUDO_USERS or id == OWNER_ID) 17 | 18 | authorized_user = _AuthorizedUserFilter() 19 | 20 | class _AuthorizedChat(MessageFilter): 21 | def filter(self, message): 22 | return bool(message.chat.id in AUTHORIZED_CHATS) 23 | 24 | authorized_chat = _AuthorizedChat() 25 | 26 | class _SudoUser(MessageFilter): 27 | def filter(self,message): 28 | return bool(message.from_user.id in SUDO_USERS) 29 | 30 | sudo_user = _SudoUser() 31 | 32 | class _MirrorOwner(MessageFilter): 33 | def filter(self, message: Message): 34 | user_id = message.from_user.id 35 | if user_id == OWNER_ID: 36 | return True 37 | args = str(message.text).split(' ') 38 | if len(args) > 1: 39 | # Cancelling by gid 40 | with download_dict_lock: 41 | for message_id, status in download_dict.items(): 42 | if status.gid() == args[1] and status.message.from_user.id == user_id: 43 | return True 44 | else: 45 | return False 46 | if not message.reply_to_message and len(args) == 1: 47 | return True 48 | # Cancelling by replying to original mirror message 49 | reply_user = message.reply_to_message.from_user.id 50 | return bool(reply_user == user_id) 51 | mirror_owner_filter = _MirrorOwner() 52 | -------------------------------------------------------------------------------- /bot/helper/__init__.py: -------------------------------------------------------------------------------- 1 | import heroku3 2 | 3 | from functools import wraps 4 | from pyrogram.types import Message 5 | from bot import HEROKU_API_KEY, HEROKU_APP_NAME 6 | 7 | def get_text(message: Message) -> [None, str]: 8 | """Extract Text From Commands""" 9 | text_to_return = message.text 10 | if message.text is None: 11 | return None 12 | if " " in text_to_return: 13 | try: 14 | return message.text.split(None, 1)[1] 15 | except IndexError: 16 | return None 17 | else: 18 | return None 19 | 20 | heroku_client = None 21 | if HEROKU_API_KEY: 22 | heroku_client = heroku3.from_key(HEROKU_API_KEY) 23 | 24 | def check_heroku(func): 25 | @wraps(func) 26 | async def heroku_cli(client, message): 27 | heroku_app = None 28 | if not heroku_client: 29 | await message.reply_text("`Please Add HEROKU_API_KEY Key For This To Function To Work!`", parse_mode="markdown") 30 | elif not HEROKU_APP_NAME: 31 | await message.reply_text("`Please Add HEROKU_APP_NAME For This To Function To Work!`", parse_mode="markdown") 32 | if HEROKU_APP_NAME and heroku_client: 33 | try: 34 | heroku_app = heroku_client.app(HEROKU_APP_NAME) 35 | except: 36 | await message.reply_text(message, "`Heroku Api Key And App Name Doesn't Match!`", parse_mode="markdown") 37 | if heroku_app: 38 | await func(client, message, heroku_app) 39 | 40 | return heroku_cli 41 | 42 | def fetch_heroku_git_url(api_key, app_name): 43 | if not api_key: 44 | return None 45 | if not app_name: 46 | return None 47 | heroku = heroku3.from_key(api_key) 48 | try: 49 | heroku_applications = heroku.apps() 50 | except: 51 | return None 52 | heroku_app = None 53 | for app in heroku_applications: 54 | if app.name == app_name: 55 | heroku_app = app 56 | break 57 | if not heroku_app: 58 | return None 59 | return heroku_app.git_url.replace("https://", "https://api:" + api_key + "@") 60 | 61 | HEROKU_URL = fetch_heroku_git_url(HEROKU_API_KEY, HEROKU_APP_NAME) 62 | -------------------------------------------------------------------------------- /config.env: -------------------------------------------------------------------------------- 1 | # REQUIRED CONFIG 2 | BOT_TOKEN = "" 3 | GDRIVE_FOLDER_ID = "" 4 | OWNER_ID = 5 | DOWNLOAD_DIR = "/usr/src/app/downloads" 6 | DOWNLOAD_STATUS_UPDATE_INTERVAL = 5 # Keep this 5 or 7 7 | AUTO_DELETE_MESSAGE_DURATION = 20 # Change it to -1 to disable it 8 | IS_TEAM_DRIVE = "" # True if using Shared Drive 9 | TELEGRAM_API = 10 | TELEGRAM_HASH = "" 11 | UPSTREAM_REPO = "https://github.com/TheCaduceus/DrTorrent" # Don't touch this 12 | UPSTREAM_BRANCH = "master" # Don't touch this 13 | # OPTIONAL CONFIG 14 | DATABASE_URL = "" # Not required if Heroku 15 | AUTHORIZED_CHATS = "" # Split by space 16 | SUDO_USERS = "" # Split by space 17 | IGNORE_PENDING_REQUESTS = "False" 18 | USE_SERVICE_ACCOUNTS = "" # True if using Service Accounts 19 | INDEX_URL = "" # INDEX URL without / 20 | STATUS_LIMIT = "" # Recommend limit status to 4 tasks max 21 | UPTOBOX_TOKEN = "" 22 | # Mega Configurations 23 | MEGA_API_KEY = "" # Required to clone Mega files 24 | MEGA_EMAIL_ID = "" # Required to clone Mega files 25 | MEGA_PASSWORD = "" # Required to clone Mega files 26 | BLOCK_MEGA_FOLDER = "" 27 | BLOCK_MEGA_LINKS = "" 28 | STOP_DUPLICATE = "" 29 | # Link Shortener Configurations 30 | SHORTENER = "" 31 | SHORTENER_API = "" 32 | # GDTOT Configurations 33 | GDTOT_COOKIES = "crypt= ; PHPSESSID= " # Required to use GDTOT Links 34 | # VPS Configurations 35 | IS_VPS = "" # True if deployed on VPS 36 | SERVER_PORT = "80" # Only for VPS 37 | BASE_URL_OF_BOT = "" # Required for Heroku 38 | # If you want to use Credentials externally from Index Links, fill these vars with the direct links 39 | # These are optional, if you don't know, simply leave them, don't fill anything in them. 40 | ACCOUNTS_ZIP_URL = "" 41 | TOKEN_PICKLE_URL = "" 42 | # Limit Configurations 43 | TORRENT_DIRECT_LIMIT = "" 44 | TAR_UNZIP_LIMIT = "" 45 | CLONE_LIMIT = "" 46 | MEGA_LIMIT = "" 47 | # Heroku Configurations 48 | HEROKU_API_KEY = "" # Mandatory 49 | HEROKU_APP_NAME = "" # Mandatory 50 | VIEW_LINK = "False" # True if using Index 51 | # Add more buttons (Three buttons are already added of Drive Link, Index Link, and View Link, you can add extra buttons too, these are optional) 52 | # If you don't know what are below entries, simply leave them, Don't fill anything in them. 53 | BUTTON_FOUR_NAME = "Official Website" 54 | BUTTON_FOUR_URL = "https://www.caduceus.ml/" 55 | BUTTON_FIVE_NAME = "" 56 | BUTTON_FIVE_URL = "" 57 | BUTTON_SIX_NAME = "" 58 | BUTTON_SIX_URL = "" 59 | -------------------------------------------------------------------------------- /heroku-guide.md: -------------------------------------------------------------------------------- 1 | ## Deploying slam-mirrorbot on Heroku with Github Workflows. 2 | 3 | ## Pre-requisites 4 | 5 | - [Heroku](heroku.com) accounts 6 | - Recommended to use 1 App in 1 Heroku accounts 7 | - Don't use bin/fake credits card, because your Heroku account will banned 8 | 9 | ## Deployment 10 | 11 | 1. Give stars and Fork this repo then upload **token.pickle** to your forks, or you can upload your **token.pickle** to your Index and put your **token.pickle** link to **TOKEN_PICKLE_URL** (**NOTE**: If you didn't upload **token.pickle** uploading will not work). How to generate **token.pickle**? [Read here](https://github.com/breakdowns/slam-mirrorbot#getting-google-oauth-api-credential-file) 12 | 13 | 2. Go to Repository `Settings` -> `Secrets` 14 | 15 | ![secrets](https://telegra.ph/file/bb8cb0eced5caad68a41b.jpg) 16 | 17 | 3. Add the below Required Variables one by one by clicking `New Repository Secret` everytime. 18 | 19 | ``` 20 | HEROKU_EMAIL 21 | HEROKU_API_KEY 22 | HEROKU_APP_NAME 23 | CONFIG_FILE_URL 24 | ``` 25 | 26 | ### Description of the above Required Variables 27 | * `HEROKU_EMAIL` Heroku Account email Id in which the above app will be deployed 28 | * `HEROKU_API_KEY` Go to your Heroku account and go to Account Settings. Scroll to the bottom until you see API Key. Copy this key and add it 29 | * `HEROKU_APP_NAME` Your Heroku app name, Name Must be unique 30 | * `CONFIG_FILE_URL` Fill This in any text editor. Remove the _____REMOVE_THIS_LINE_____=True line and fill the variables. For details about config you can see Here. Go to https://gist.github.com and paste your config data. Rename the file to config.env then create secret gist. Click on Raw, copy the link. This will be your CONFIG_FILE_URL. 31 | 4. After adding all the above Required Variables go to Github Actions tab in your repo 32 | 33 | 5. Select `Manually Deploy to heroku` workflow as shown below: 34 | 35 | ![Example Manually Deploy to Heroku](https://telegra.ph/file/38ffda0165d9671f1d5dc.jpg) 36 | 37 | 6. Then click on Run workflow 38 | 39 | ![Run workflow](https://telegra.ph/file/c5b4c2e02f585cb59fe5c.jpg) 40 | 41 | 7. _Done!_ your bot will be deployed now. 42 | 43 | ## NOTE 44 | - Don't change/edit variables from Heroku if you want to change/edit do it from Github Secrets 45 | - If you want to set optional variables, go to your Heroku app settings and add the variables 46 | 47 | ## Credits 48 | - [arghyac35](https://github.com/arghyac35) for Tutorial 49 | -------------------------------------------------------------------------------- /bot/modules/list.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper 3 | from bot import LOGGER, dispatcher 4 | from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, editMessage 5 | from bot.helper.telegram_helper.filters import CustomFilters 6 | import threading 7 | from bot.helper.telegram_helper.bot_commands import BotCommands 8 | from bot.helper.mirror_utils.upload_utils.gdtot_helper import GDTOT 9 | 10 | def list_drive(update, context): 11 | try: 12 | search = update.message.text.split(' ',maxsplit=1)[1] 13 | LOGGER.info(f"Searching: {search}") 14 | reply = sendMessage('Searching..... Please wait!', context.bot, update) 15 | gdrive = GoogleDriveHelper(None) 16 | msg, button = gdrive.drive_list(search) 17 | 18 | if button: 19 | editMessage(msg, reply, button) 20 | else: 21 | editMessage(f'No result found for {search}', reply, button) 22 | 23 | except IndexError: 24 | sendMessage('Send a search key along with command', context.bot, update) 25 | 26 | 27 | def gdtot(update, context): 28 | try: 29 | search = update.message.text.split(' ', 1)[1] 30 | search_list = search.split(' ') 31 | for glink in search_list: 32 | LOGGER.info(f"Extracting gdtot link: {glink}") 33 | button = None 34 | reply = sendMessage('Getting Your GDTOT File Wait....', context.bot, update) 35 | file_name, file_url = GDTOT().parse(url=glink) 36 | if file_name == 404: 37 | sendMessage(file_url, context.bot, update) 38 | return 39 | if file_url != 404: 40 | gdrive = GoogleDriveHelper(None) 41 | msg, button = gdrive.clone(file_url) 42 | if button: 43 | editMessage(msg, reply, button) 44 | else: 45 | editMessage(file_name, reply, button) 46 | except IndexError: 47 | sendMessage('Send cmd along with url', context.bot, update) 48 | except Exception as e: 49 | LOGGER.info(e) 50 | 51 | list_handler = CommandHandler(BotCommands.ListCommand, list_drive, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 52 | gdtot_handler = CommandHandler(BotCommands.GDTOTCommand, gdtot, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 53 | 54 | 55 | 56 | dispatcher.add_handler(list_handler) 57 | dispatcher.add_handler(gdtot_handler) 58 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/qbit_download_status.py: -------------------------------------------------------------------------------- 1 | from bot import DOWNLOAD_DIR, LOGGER, get_client 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time 3 | from .status import Status 4 | 5 | 6 | class QbDownloadStatus(Status): 7 | 8 | def __init__(self, gid, listener, qbhash, client): 9 | super().__init__() 10 | self.__gid = gid 11 | self.__hash = qbhash 12 | self.client = client 13 | self.__uid = listener.uid 14 | self.listener = listener 15 | self.message = listener.message 16 | 17 | 18 | def progress(self): 19 | """ 20 | Calculates the progress of the mirror (upload or download) 21 | :return: returns progress in percentage 22 | """ 23 | return f'{round(self.torrent_info().progress*100,2)}%' 24 | 25 | def size_raw(self): 26 | """ 27 | Gets total size of the mirror file/folder 28 | :return: total size of mirror 29 | """ 30 | return self.torrent_info().size 31 | 32 | def processed_bytes(self): 33 | return self.torrent_info().downloaded 34 | 35 | def speed(self): 36 | return f"{get_readable_file_size(self.torrent_info().dlspeed)}/s" 37 | 38 | def name(self): 39 | return self.torrent_info().name 40 | 41 | def path(self): 42 | return f"{DOWNLOAD_DIR}{self.__uid}" 43 | 44 | def size(self): 45 | return get_readable_file_size(self.torrent_info().size) 46 | 47 | def eta(self): 48 | return get_readable_time(self.torrent_info().eta) 49 | 50 | def status(self): 51 | download = self.torrent_info().state 52 | if download == "queuedDL": 53 | status = MirrorStatus.STATUS_WAITING 54 | elif download == "metaDL" or download == "checkingResumeData": 55 | status = MirrorStatus.STATUS_DOWNLOADING + " (Metadata)" 56 | elif download == "pausedDL": 57 | status = MirrorStatus.STATUS_PAUSE 58 | else: 59 | status = MirrorStatus.STATUS_DOWNLOADING 60 | return status 61 | 62 | def torrent_info(self): 63 | return self.client.torrents_info(torrent_hashes=self.__hash)[0] 64 | 65 | def download(self): 66 | return self 67 | 68 | def uid(self): 69 | return self.__uid 70 | 71 | def gid(self): 72 | return self.__gid 73 | 74 | def cancel_download(self): 75 | LOGGER.info(f"Cancelling Download: {self.name()}") 76 | self.listener.onDownloadError('Download stopped by user!') 77 | self.client.torrents_delete(torrent_hashes=self.__hash, delete_files=True) 78 | -------------------------------------------------------------------------------- /bot/modules/watch.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | from telegram import Bot, Update 3 | from bot import DOWNLOAD_DIR, dispatcher, LOGGER 4 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage 5 | from .mirror import MirrorListener 6 | from bot.helper.mirror_utils.download_utils.youtube_dl_download_helper import YoutubeDLHelper 7 | from bot.helper.telegram_helper.bot_commands import BotCommands 8 | from bot.helper.telegram_helper.filters import CustomFilters 9 | import threading 10 | 11 | 12 | def _watch(bot: Bot, update, isTar=False): 13 | mssg = update.message.text 14 | message_args = mssg.split(' ') 15 | name_args = mssg.split('|') 16 | 17 | try: 18 | link = message_args[1] 19 | except IndexError: 20 | msg = f"/{BotCommands.WatchCommand} [youtube-dl supported link] [quality] |[CustomName] to mirror with youtube-dl.\n\n" 21 | msg += "Note: Quality and custom name are optional\n\nExample of quality: audio, 144, 240, 360, 480, 720, 1080, 2160." 22 | msg += "\n\nIf you want to use custom filename, enter it after |" 23 | msg += f"\n\nExample:\n/{BotCommands.WatchCommand} https://youtu.be/Pk_TthHfLeE 720 |Slam\n\n" 24 | msg += "This file will be downloaded in 720p quality and it's name will be Slam" 25 | sendMessage(msg, bot, update) 26 | return 27 | 28 | try: 29 | if "|" in mssg: 30 | mssg = mssg.split("|") 31 | qual = mssg[0].split(" ")[2] 32 | if qual == "": 33 | raise IndexError 34 | else: 35 | qual = message_args[2] 36 | if qual != "audio": 37 | qual = f'bestvideo[height<={qual}]+bestaudio/best[height<={qual}]' 38 | except IndexError: 39 | qual = "bestvideo+bestaudio/best" 40 | 41 | try: 42 | name = name_args[1] 43 | except IndexError: 44 | name = "" 45 | 46 | pswd = "" 47 | listener = MirrorListener(bot, update, pswd, isTar) 48 | ydl = YoutubeDLHelper(listener) 49 | threading.Thread(target=ydl.add_download,args=(link, f'{DOWNLOAD_DIR}{listener.uid}', qual, name)).start() 50 | sendStatusMessage(update, bot) 51 | 52 | 53 | def watchTar(update, context): 54 | _watch(context.bot, update, True) 55 | 56 | 57 | def watch(update, context): 58 | _watch(context.bot, update) 59 | 60 | 61 | mirror_handler = CommandHandler(BotCommands.WatchCommand, watch, 62 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 63 | tar_mirror_handler = CommandHandler(BotCommands.TarWatchCommand, watchTar, 64 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 65 | 66 | 67 | dispatcher.add_handler(mirror_handler) 68 | dispatcher.add_handler(tar_mirror_handler) 69 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/db_handler.py: -------------------------------------------------------------------------------- 1 | import psycopg2 2 | from psycopg2 import Error 3 | from bot import AUTHORIZED_CHATS, SUDO_USERS, DB_URI, LOGGER 4 | 5 | class DbManger: 6 | def __init__(self): 7 | self.err = False 8 | 9 | def connect(self): 10 | try: 11 | self.conn = psycopg2.connect(DB_URI) 12 | self.cur = self.conn.cursor() 13 | except psycopg2.DatabaseError as error : 14 | LOGGER.error("Error in dbMang : ", error) 15 | self.err = True 16 | 17 | def disconnect(self): 18 | self.cur.close() 19 | self.conn.close() 20 | 21 | def db_auth(self,chat_id: int): 22 | self.connect() 23 | if self.err : 24 | return "There's some error check log for details" 25 | else: 26 | sql = 'INSERT INTO users VALUES ({});'.format(chat_id) 27 | self.cur.execute(sql) 28 | self.conn.commit() 29 | self.disconnect() 30 | AUTHORIZED_CHATS.add(chat_id) 31 | return 'Authorized successfully' 32 | 33 | def db_unauth(self,chat_id: int): 34 | self.connect() 35 | if self.err : 36 | return "There's some error check log for details" 37 | else: 38 | sql = 'DELETE from users where uid = {};'.format(chat_id) 39 | self.cur.execute(sql) 40 | self.conn.commit() 41 | self.disconnect() 42 | AUTHORIZED_CHATS.remove(chat_id) 43 | return 'Unauthorized successfully' 44 | 45 | def db_addsudo(self,chat_id: int): 46 | self.connect() 47 | if self.err : 48 | return "There's some error check log for details" 49 | else: 50 | if chat_id in AUTHORIZED_CHATS: 51 | sql = 'UPDATE users SET sudo = TRUE where uid = {};'.format(chat_id) 52 | self.cur.execute(sql) 53 | self.conn.commit() 54 | self.disconnect() 55 | SUDO_USERS.add(chat_id) 56 | return 'Successfully promoted as Sudo' 57 | else: 58 | sql = 'INSERT INTO users VALUES ({},TRUE);'.format(chat_id) 59 | self.cur.execute(sql) 60 | self.conn.commit() 61 | self.disconnect() 62 | SUDO_USERS.add(chat_id) 63 | return 'Successfully Authorized and promoted as Sudo' 64 | 65 | def db_rmsudo(self,chat_id: int): 66 | self.connect() 67 | if self.err : 68 | return "There's some error check log for details" 69 | else: 70 | sql = 'UPDATE users SET sudo = FALSE where uid = {};'.format(chat_id) 71 | self.cur.execute(sql) 72 | self.conn.commit() 73 | self.disconnect() 74 | SUDO_USERS.remove(chat_id) 75 | return 'Successfully removed from Sudo' 76 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/status_utils/aria_download_status.py: -------------------------------------------------------------------------------- 1 | from bot import aria2, DOWNLOAD_DIR, LOGGER 2 | from bot.helper.ext_utils.bot_utils import MirrorStatus 3 | from .status import Status 4 | 5 | def get_download(gid): 6 | return aria2.get_download(gid) 7 | 8 | 9 | class AriaDownloadStatus(Status): 10 | 11 | def __init__(self, gid, listener): 12 | super().__init__() 13 | self.upload_name = None 14 | self.__gid = gid 15 | self.__download = get_download(self.__gid) 16 | self.__uid = listener.uid 17 | self.__listener = listener 18 | self.message = listener.message 19 | 20 | def __update(self): 21 | self.__download = get_download(self.__gid) 22 | download = self.__download 23 | if download.followed_by_ids: 24 | self.__gid = download.followed_by_ids[0] 25 | 26 | def progress(self): 27 | """ 28 | Calculates the progress of the mirror (upload or download) 29 | :return: returns progress in percentage 30 | """ 31 | self.__update() 32 | return self.__download.progress_string() 33 | 34 | def size_raw(self): 35 | """ 36 | Gets total size of the mirror file/folder 37 | :return: total size of mirror 38 | """ 39 | return self.aria_download().total_length 40 | 41 | def processed_bytes(self): 42 | return self.aria_download().completed_length 43 | 44 | def speed(self): 45 | return self.aria_download().download_speed_string() 46 | 47 | def name(self): 48 | return self.aria_download().name 49 | 50 | def path(self): 51 | return f"{DOWNLOAD_DIR}{self.__uid}" 52 | 53 | def size(self): 54 | return self.aria_download().total_length_string() 55 | 56 | def eta(self): 57 | return self.aria_download().eta_string() 58 | 59 | def status(self): 60 | download = self.aria_download() 61 | if download.is_waiting: 62 | status = MirrorStatus.STATUS_WAITING 63 | elif download.has_failed: 64 | status = MirrorStatus.STATUS_FAILED 65 | else: 66 | status = MirrorStatus.STATUS_DOWNLOADING 67 | return status 68 | 69 | def aria_download(self): 70 | self.__update() 71 | return self.__download 72 | 73 | def download(self): 74 | return self 75 | 76 | def getListener(self): 77 | return self.__listener 78 | 79 | def uid(self): 80 | return self.__uid 81 | 82 | def gid(self): 83 | self.__update() 84 | return self.__gid 85 | 86 | def cancel_download(self): 87 | LOGGER.info(f"Cancelling Download: {self.name()}") 88 | download = self.aria_download() 89 | if download.is_waiting: 90 | self.__listener.onDownloadError("★ 𝗗𝗼𝘄𝗻𝗹𝗼𝗮𝗱 𝗖𝗮𝗻𝗰𝗲𝗹𝗹𝗲𝗱 𝗕𝘆 𝗨𝘀𝗲𝗿!! ★") 91 | aria2.remove([download], force=True) 92 | return 93 | if len(download.followed_by_ids) != 0: 94 | downloads = aria2.get_downloads(download.followed_by_ids) 95 | aria2.remove(downloads, force=True) 96 | self.__listener.onDownloadError('★ 𝗗𝗼𝘄𝗻𝗹𝗼𝗮𝗱 𝗖𝗮𝗻𝗰𝗲𝗹𝗹𝗲𝗱 𝗕𝘆 𝗨𝘀𝗲𝗿!! ★') 97 | aria2.remove([download], force=True) 98 | -------------------------------------------------------------------------------- /bot/modules/updates.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import subprocess 3 | import heroku3 4 | 5 | from datetime import datetime 6 | from os import environ, execle, path, remove 7 | 8 | from git import Repo 9 | from git.exc import GitCommandError, InvalidGitRepositoryError, NoSuchPathError 10 | 11 | from pyrogram import filters 12 | 13 | from bot import app, OWNER_ID, UPSTREAM_REPO, UPSTREAM_BRANCH, bot 14 | from bot.helper import get_text, HEROKU_URL 15 | from bot.helper.telegram_helper.bot_commands import BotCommands 16 | 17 | REPO_ = UPSTREAM_REPO 18 | BRANCH_ = UPSTREAM_BRANCH 19 | 20 | 21 | # Update Command 22 | 23 | @app.on_message(filters.command([BotCommands.UpdateCommand, f'{BotCommands.UpdateCommand}@{bot.username}']) & filters.user(OWNER_ID)) 24 | async def update_it(client, message): 25 | msg_ = await message.reply_text("`Updating Please Wait!`") 26 | try: 27 | repo = Repo() 28 | except GitCommandError: 29 | return await msg_.edit( 30 | "**Invalid Git Command. Please Report This Bug To [Support Group](https://t.me/SlamMirrorSupport)**" 31 | ) 32 | except InvalidGitRepositoryError: 33 | repo = Repo.init() 34 | if "upstream" in repo.remotes: 35 | origin = repo.remote("upstream") 36 | else: 37 | origin = repo.create_remote("upstream", REPO_) 38 | origin.fetch() 39 | repo.create_head(UPSTREAM_BRANCH, origin.refs.master) 40 | repo.heads.master.set_tracking_branch(origin.refs.master) 41 | repo.heads.master.checkout(True) 42 | if repo.active_branch.name != UPSTREAM_BRANCH: 43 | return await msg_.edit( 44 | f"`Seems Like You Are Using Custom Branch - {repo.active_branch.name}! Please Switch To {UPSTREAM_BRANCH} To Make This Updater Function!`" 45 | ) 46 | try: 47 | repo.create_remote("upstream", REPO_) 48 | except BaseException: 49 | pass 50 | ups_rem = repo.remote("upstream") 51 | ups_rem.fetch(UPSTREAM_BRANCH) 52 | if not HEROKU_URL: 53 | try: 54 | ups_rem.pull(UPSTREAM_BRANCH) 55 | except GitCommandError: 56 | repo.git.reset("--hard", "FETCH_HEAD") 57 | subprocess.run(["pip3", "install", "--no-cache-dir", "-r", "requirements.txt"]) 58 | await msg_.edit("`Updated Sucessfully! Give Me Some Time To Restart!`") 59 | with open("./aria.sh", 'rb') as file: 60 | script = file.read() 61 | subprocess.call("./aria.sh", shell=True) 62 | args = [sys.executable, "-m", "bot"] 63 | execle(sys.executable, *args, environ) 64 | exit() 65 | return 66 | else: 67 | await msg_.edit("`Heroku Detected! Pushing, Please wait!`") 68 | ups_rem.fetch(UPSTREAM_BRANCH) 69 | repo.git.reset("--hard", "FETCH_HEAD") 70 | if "heroku" in repo.remotes: 71 | remote = repo.remote("heroku") 72 | remote.set_url(HEROKU_URL) 73 | else: 74 | remote = repo.create_remote("heroku", HEROKU_URL) 75 | try: 76 | remote.push(refspec="HEAD:refs/heads/master", force=True) 77 | except BaseException as error: 78 | await msg_.edit(f"**Updater Error** \nTraceBack : `{error}`") 79 | return repo.__del__() 80 | await msg_.edit(f"`Updated Sucessfully! \n\nCheck your config with` `/{BotCommands.ConfigMenuCommand}`") 81 | -------------------------------------------------------------------------------- /add_to_team_drive.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | from google.oauth2.service_account import Credentials 3 | import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time 4 | from google_auth_oauthlib.flow import InstalledAppFlow 5 | from google.auth.transport.requests import Request 6 | import os, pickle 7 | 8 | stt = time.time() 9 | 10 | parse = argparse.ArgumentParser( 11 | description='A tool to add service accounts to a shared drive from a folder containing credential files.') 12 | parse.add_argument('--path', '-p', default='accounts', 13 | help='Specify an alternative path to the service accounts folder.') 14 | parse.add_argument('--credentials', '-c', default='./credentials.json', 15 | help='Specify the relative path for the credentials file.') 16 | parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.') 17 | parsereq = parse.add_argument_group('required arguments') 18 | parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True) 19 | 20 | args = parse.parse_args() 21 | acc_dir = args.path 22 | did = args.drive_id 23 | credentials = glob.glob(args.credentials) 24 | 25 | try: 26 | open(credentials[0], 'r') 27 | print('>> Found credentials.') 28 | except IndexError: 29 | print('>> No credentials found.') 30 | sys.exit(0) 31 | 32 | if not args.yes: 33 | # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open( 34 | # credentials[0],'r').read()))['installed']['client_id']) 35 | input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive ' 36 | '(shared drive) as Manager\n>> (Press any key to continue)') 37 | 38 | creds = None 39 | if os.path.exists('token_sa.pickle'): 40 | with open('token_sa.pickle', 'rb') as token: 41 | creds = pickle.load(token) 42 | # If there are no (valid) credentials available, let the user log in. 43 | if not creds or not creds.valid: 44 | if creds and creds.expired and creds.refresh_token: 45 | creds.refresh(Request()) 46 | else: 47 | flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[ 48 | 'https://www.googleapis.com/auth/admin.directory.group', 49 | 'https://www.googleapis.com/auth/admin.directory.group.member' 50 | ]) 51 | # creds = flow.run_local_server(port=0) 52 | creds = flow.run_console() 53 | # Save the credentials for the next run 54 | with open('token_sa.pickle', 'wb') as token: 55 | pickle.dump(creds, token) 56 | 57 | drive = googleapiclient.discovery.build("drive", "v3", credentials=creds) 58 | batch = drive.new_batch_http_request() 59 | 60 | aa = glob.glob('%s/*.json' % acc_dir) 61 | pbar = progress.bar.Bar("Readying accounts", max=len(aa)) 62 | for i in aa: 63 | ce = json.loads(open(i, 'r').read())['client_email'] 64 | batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={ 65 | "role": "organizer", 66 | "type": "user", 67 | "emailAddress": ce 68 | })) 69 | pbar.next() 70 | pbar.finish() 71 | print('Adding...') 72 | batch.execute() 73 | 74 | print('Complete.') 75 | hours, rem = divmod((time.time() - stt), 3600) 76 | minutes, sec = divmod(rem, 60) 77 | print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec)) 78 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/upload_utils/gdtot_helper.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import requests as rq 4 | import re 5 | import json as js 6 | from bs4 import BeautifulSoup as bt 7 | import random 8 | from bot import GDTOT_COOKIES 9 | 10 | 11 | def cookie_checker(): 12 | """added support to use many GDtot cookies to Bypass limit""" 13 | GDTOT_COOKIES_SET = set() 14 | try: 15 | a_gdtot_cookies = GDTOT_COOKIES.split("||") 16 | for cookie in a_gdtot_cookies: 17 | GDTOT_COOKIES_SET.add(str(cookie)) 18 | GDTOT_COOKIES_LIST = list(GDTOT_COOKIES_SET) 19 | return {"cookie": random.choice(GDTOT_COOKIES_LIST)} 20 | except: 21 | return "" 22 | 23 | class GDTOT: 24 | def __init__(self): 25 | self.r = 'https://new.gdtot.top/' 26 | self.COOKIES = cookie_checker() 27 | self.c = GDTOT.check(self) 28 | self.h = { 29 | 'upgrade-insecure-requests': '1', 30 | 'save-data': 'on', 31 | 'user-agent': 'Mozilla/5.0 (Linux; Android 10; Redmi 8A Dual) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.101 Mobile Safari/537.36', 32 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 33 | 'sec-fetch-site': 'same-origin', 34 | 'sec-fetch-mode': 'navigate', 35 | 'sec-fetch-dest': 'document', 36 | 'referer': self.r, 37 | 'prefetchAd_3621940': 'true', 38 | 'accept-language': 'en-IN,en-GB;q=0.9,en-US;q=0.8,en;q=0.7' 39 | } 40 | 41 | def check(self): 42 | """Check cookies and Ready it for work""" 43 | try: 44 | j = js.loads(js.dumps(self.COOKIES))['cookie'].replace('=',': ').replace(';',',') 45 | f = re.sub(r'([a-zA-Z_0-9.%]+)', r'"\1"', "{%s}" %j) 46 | c = js.loads(f) 47 | return c 48 | except Exception as e: 49 | print(e) 50 | return "" 51 | 52 | def parse(self, url): 53 | """Main function to get the URL""" 54 | if url == "": 55 | return 56 | if self.c == "": 57 | print("Please provide cookies") 58 | return 59 | else: 60 | try: 61 | r1 = rq.get(url, headers=self.h, cookies=self.c).content 62 | p = bt(r1, 'html.parser').find('button', id="down").get('onclick').split("'")[1] 63 | self.r = url 64 | r2 = bt(rq.get(p, headers=self.h, cookies=self.c).content, 'html.parser').find('meta').get('content').split('=',1)[1] 65 | self.r = p 66 | r3 = bt(rq.get(r2, headers=self.h, cookies=self.c).content, 'html.parser').find('div', align="center") 67 | if r3 == None: 68 | r3 = bt(rq.get(r2, headers=self.h, cookies=self.c).content, 'html.parser') 69 | f = r3.find('h4').text 70 | return 404, f 71 | else: 72 | s = r3.find('h6').text 73 | i = r3.find('a', class_="btn btn-outline-light btn-user font-weight-bold").get('href') 74 | return s,i 75 | except Exception as e: 76 | print(e) 77 | return 404, 404 78 | -------------------------------------------------------------------------------- /bot/modules/cancel_mirror.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | from bot import download_dict, dispatcher, download_dict_lock, DOWNLOAD_DIR 3 | from bot.helper.ext_utils.fs_utils import clean_download 4 | from bot.helper.telegram_helper.bot_commands import BotCommands 5 | from bot.helper.telegram_helper.filters import CustomFilters 6 | from bot.helper.telegram_helper.message_utils import * 7 | 8 | from time import sleep 9 | from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, getAllDownload 10 | 11 | 12 | def cancel_mirror(update, context): 13 | args = update.message.text.split(" ", maxsplit=1) 14 | mirror_message = None 15 | if len(args) > 1: 16 | gid = args[1] 17 | dl = getDownloadByGid(gid) 18 | if not dl: 19 | sendMessage(f"GID: {gid} Not Found.", context.bot, update) 20 | return 21 | mirror_message = dl.message 22 | elif update.message.reply_to_message: 23 | mirror_message = update.message.reply_to_message 24 | with download_dict_lock: 25 | keys = list(download_dict.keys()) 26 | try: 27 | dl = download_dict[mirror_message.message_id] 28 | except: 29 | pass 30 | if len(args) == 1: 31 | msg = f"Please reply to the /{BotCommands.MirrorCommand} message which was used to start the download or send /{BotCommands.CancelMirror} GID to cancel it!" 32 | if mirror_message and mirror_message.message_id not in keys: 33 | if BotCommands.MirrorCommand in mirror_message.text or \ 34 | BotCommands.TarMirrorCommand in mirror_message.text or \ 35 | BotCommands.UnzipMirrorCommand in mirror_message.text: 36 | msg1 = "Mirror Already Have Been Cancelled" 37 | sendMessage(msg1, context.bot, update) 38 | return 39 | else: 40 | sendMessage(msg, context.bot, update) 41 | return 42 | elif not mirror_message: 43 | sendMessage(msg, context.bot, update) 44 | return 45 | if dl.status() == MirrorStatus.STATUS_ARCHIVING: 46 | sendMessage("Archival in Progress, You Can't Cancel It.", context.bot, update) 47 | elif dl.status() == MirrorStatus.STATUS_EXTRACTING: 48 | sendMessage("Extract in Progress, You Can't Cancel It.", context.bot, update) 49 | else: 50 | dl.download().cancel_download() 51 | sleep(3) # incase of any error with ondownloaderror listener 52 | clean_download(f'{DOWNLOAD_DIR}{mirror_message.message_id}/') 53 | 54 | 55 | def cancel_all(update, context): 56 | count = 0 57 | gid = 0 58 | while True: 59 | dl = getAllDownload() 60 | if dl: 61 | if dl.gid() != gid: 62 | gid = dl.gid() 63 | dl.download().cancel_download() 64 | count += 1 65 | sleep(0.3) 66 | else: 67 | break 68 | sendMessage(f'{count} Download(s) has been Cancelled!', context.bot, update) 69 | 70 | 71 | 72 | cancel_mirror_handler = CommandHandler(BotCommands.CancelMirror, cancel_mirror, 73 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user) & CustomFilters.mirror_owner_filter | CustomFilters.sudo_user, run_async=True) 74 | cancel_all_handler = CommandHandler(BotCommands.CancelAllCommand, cancel_all, 75 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 76 | dispatcher.add_handler(cancel_all_handler) 77 | dispatcher.add_handler(cancel_mirror_handler) 78 | -------------------------------------------------------------------------------- /bot/modules/clone.py: -------------------------------------------------------------------------------- 1 | from telegram.ext import CommandHandler 2 | from bot.helper.mirror_utils.upload_utils import gdriveTools 3 | from bot.helper.telegram_helper.message_utils import * 4 | from bot.helper.telegram_helper.filters import CustomFilters 5 | from bot.helper.telegram_helper.bot_commands import BotCommands 6 | from bot.helper.mirror_utils.status_utils.clone_status import CloneStatus 7 | from bot import dispatcher, LOGGER, CLONE_LIMIT, STOP_DUPLICATE, download_dict, download_dict_lock, Interval 8 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, check_limit 9 | import random 10 | import string 11 | 12 | 13 | def cloneNode(update, context): 14 | args = update.message.text.split(" ", maxsplit=1) 15 | if len(args) > 1: 16 | link = args[1] 17 | gd = gdriveTools.GoogleDriveHelper() 18 | res, size, name, files = gd.clonehelper(link) 19 | if res != "": 20 | sendMessage(res, context.bot, update) 21 | return 22 | if STOP_DUPLICATE: 23 | LOGGER.info(f"Checking File/Folder if already in Drive...") 24 | smsg, button = gd.drive_list(name) 25 | if smsg: 26 | msg3 = "File/Folder is already available in Drive.\nHere are the search results:" 27 | sendMarkup(msg3, context.bot, update, button) 28 | return 29 | if CLONE_LIMIT is not None: 30 | result = check_limit(size, CLONE_LIMIT) 31 | if result: 32 | msg2 = f'Failed, Clone limit is {CLONE_LIMIT}.\nYour File/Folder size is {get_readable_file_size(clonesize)}.' 33 | sendMessage(msg2, context.bot, update) 34 | return 35 | if files < 15: 36 | msg = sendMessage(f"Cloning: {link}", context.bot, update) 37 | result, button = gd.clone(link) 38 | deleteMessage(context.bot, msg) 39 | else: 40 | drive = gdriveTools.GoogleDriveHelper(name) 41 | gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) 42 | clone_status = CloneStatus(drive, size, update, gid) 43 | with download_dict_lock: 44 | download_dict[update.message.message_id] = clone_status 45 | sendStatusMessage(update, context.bot) 46 | result, button = drive.clone(link) 47 | with download_dict_lock: 48 | del download_dict[update.message.message_id] 49 | count = len(download_dict) 50 | try: 51 | if count == 0: 52 | Interval[0].cancel() 53 | del Interval[0] 54 | delete_all_messages() 55 | else: 56 | update_all_messages() 57 | except IndexError: 58 | pass 59 | if update.message.from_user.username: 60 | uname = f'@{update.message.from_user.username}' 61 | else: 62 | uname = f'{update.message.from_user.first_name}' 63 | if uname is not None: 64 | cc = f'\n\ncc: {uname}' 65 | men = f'{uname} ' 66 | if button == "cancelled" or button == "": 67 | sendMessage(men + result, context.bot, update) 68 | else: 69 | sendMarkup(result + cc, context.bot, update, button) 70 | else: 71 | sendMessage('Provide G-Drive Shareable Link to Clone.', context.bot, update) 72 | 73 | clone_handler = CommandHandler(BotCommands.CloneCommand, cloneNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 74 | dispatcher.add_handler(clone_handler) 75 | -------------------------------------------------------------------------------- /bot/modules/eval.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | # Common imports for eval 4 | import textwrap 5 | import traceback 6 | from contextlib import redirect_stdout 7 | from bot.helper.telegram_helper.filters import CustomFilters 8 | from bot.helper.telegram_helper.bot_commands import BotCommands 9 | from bot.helper.telegram_helper.message_utils import sendMessage 10 | from bot import LOGGER, dispatcher 11 | from telegram import ParseMode 12 | from telegram.ext import CommandHandler 13 | 14 | namespaces = {} 15 | 16 | 17 | def namespace_of(chat, update, bot): 18 | if chat not in namespaces: 19 | namespaces[chat] = { 20 | '__builtins__': globals()['__builtins__'], 21 | 'bot': bot, 22 | 'effective_message': update.effective_message, 23 | 'effective_user': update.effective_user, 24 | 'effective_chat': update.effective_chat, 25 | 'update': update 26 | } 27 | 28 | return namespaces[chat] 29 | 30 | 31 | def log_input(update): 32 | user = update.effective_user.id 33 | chat = update.effective_chat.id 34 | LOGGER.info( 35 | f"IN: {update.effective_message.text} (user={user}, chat={chat})") 36 | 37 | 38 | def send(msg, bot, update): 39 | if len(str(msg)) > 2000: 40 | with io.BytesIO(str.encode(msg)) as out_file: 41 | out_file.name = "output.txt" 42 | bot.send_document( 43 | chat_id=update.effective_chat.id, document=out_file) 44 | else: 45 | LOGGER.info(f"OUT: '{msg}'") 46 | bot.send_message( 47 | chat_id=update.effective_chat.id, 48 | text=f"`{msg}`", 49 | parse_mode=ParseMode.MARKDOWN) 50 | 51 | 52 | def evaluate(update, context): 53 | bot = context.bot 54 | send(do(eval, bot, update), bot, update) 55 | 56 | 57 | def execute(update, context): 58 | bot = context.bot 59 | send(do(exec, bot, update), bot, update) 60 | 61 | 62 | def cleanup_code(code): 63 | if code.startswith('```') and code.endswith('```'): 64 | return '\n'.join(code.split('\n')[1:-1]) 65 | return code.strip('` \n') 66 | 67 | 68 | def do(func, bot, update): 69 | log_input(update) 70 | content = update.message.text.split(' ', 1)[-1] 71 | body = cleanup_code(content) 72 | env = namespace_of(update.message.chat_id, update, bot) 73 | 74 | os.chdir(os.getcwd()) 75 | with open( 76 | os.path.join(os.getcwd(), 77 | 'bot/modules/temp.txt'), 78 | 'w') as temp: 79 | temp.write(body) 80 | 81 | stdout = io.StringIO() 82 | 83 | to_compile = f'def func():\n{textwrap.indent(body, " ")}' 84 | 85 | try: 86 | exec(to_compile, env) 87 | except Exception as e: 88 | return f'{e.__class__.__name__}: {e}' 89 | 90 | func = env['func'] 91 | 92 | try: 93 | with redirect_stdout(stdout): 94 | func_return = func() 95 | except Exception as e: 96 | value = stdout.getvalue() 97 | return f'{value}{traceback.format_exc()}' 98 | else: 99 | value = stdout.getvalue() 100 | result = None 101 | if func_return is None: 102 | if value: 103 | result = f'{value}' 104 | else: 105 | try: 106 | result = f'{repr(eval(body, env))}' 107 | except: 108 | pass 109 | else: 110 | result = f'{value}{func_return}' 111 | if result: 112 | return result 113 | 114 | 115 | def clear(update, context): 116 | bot = context.bot 117 | log_input(update) 118 | global namespaces 119 | if update.message.chat_id in namespaces: 120 | del namespaces[update.message.chat_id] 121 | send("Cleared locals.", bot, update) 122 | 123 | 124 | def exechelp(update, context): 125 | help_string = ''' 126 | Executor 127 | • /eval Run Python Code Line | Lines 128 | • /exec Run Commands In Exec 129 | • /clearlocals Cleared locals 130 | ''' 131 | sendMessage(help_string, context.bot, update) 132 | 133 | 134 | EVAL_HANDLER = CommandHandler(('eval'), evaluate, filters=CustomFilters.owner_filter, run_async=True) 135 | EXEC_HANDLER = CommandHandler(('exec'), execute, filters=CustomFilters.owner_filter, run_async=True) 136 | CLEAR_HANDLER = CommandHandler('clearlocals', clear, filters=CustomFilters.owner_filter, run_async=True) 137 | EXECHELP_HANDLER = CommandHandler(BotCommands.ExecHelpCommand, exechelp, filters=CustomFilters.owner_filter, run_async=True) 138 | 139 | dispatcher.add_handler(EVAL_HANDLER) 140 | dispatcher.add_handler(EXEC_HANDLER) 141 | dispatcher.add_handler(CLEAR_HANDLER) 142 | dispatcher.add_handler(EXECHELP_HANDLER) 143 | -------------------------------------------------------------------------------- /extract: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ $# -lt 1 ]; then 4 | echo "Usage: $(basename $0) FILES" 5 | exit 1 6 | fi 7 | 8 | extract() { 9 | arg="$1" 10 | cd "$(dirname "$arg")" || exit 11 | case "$arg" in 12 | *.tar.bz2) 13 | tar xjf "$arg" --one-top-level 14 | local code=$? 15 | ;; 16 | *.tar.gz) 17 | tar xzf "$arg" --one-top-level 18 | local code=$? 19 | ;; 20 | *.bz2) 21 | bunzip2 "$arg" 22 | local code=$? 23 | ;; 24 | *.gz) 25 | gunzip "$arg" 26 | local code=$? 27 | ;; 28 | *.tar) 29 | tar xf "$arg" --one-top-level 30 | local code=$? 31 | ;; 32 | *.tbz2) 33 | (tar xjf "$arg" --one-top-level) 34 | local code=$? 35 | ;; 36 | *.tgz) 37 | tar xzf "$arg" --one-top-level 38 | local code=$? 39 | ;; 40 | *.tar.xz) 41 | a_dir=$(expr "$arg" : '\(.*\).tar.xz') 42 | 7z x "$arg" -o"$a_dir" 43 | local code=$? 44 | ;; 45 | *.zip) 46 | a_dir=$(expr "$arg" : '\(.*\).zip') 47 | 7z x "$arg" -o"$a_dir" 48 | local code=$? 49 | ;; 50 | *.7z) 51 | a_dir=$(expr "$arg" : '\(.*\).7z') 52 | 7z x "$arg" -o"$a_dir" 53 | local code=$? 54 | ;; 55 | *.Z) 56 | uncompress "$arg" 57 | local code=$? 58 | ;; 59 | *.rar) 60 | a_dir=$(expr "$arg" : '\(.*\).rar') 61 | mkdir "$a_dir" 62 | 7z x "$arg" -o"$a_dir" 63 | local code=$? 64 | ;; 65 | *.iso) 66 | a_dir=$(expr "$arg" : '\(.*\).iso') 67 | 7z x "$arg" -o"$a_dir" 68 | local code=$? 69 | ;; 70 | *.wim) 71 | a_dir=$(expr "$arg" : '\(.*\).wim') 72 | 7z x "$arg" -o"$a_dir" 73 | local code=$? 74 | ;; 75 | *.cab) 76 | a_dir=$(expr "$arg" : '\(.*\).cab') 77 | 7z x "$arg" -o"$a_dir" 78 | local code=$? 79 | ;; 80 | *.apm) 81 | a_dir=$(expr "$arg" : '\(.*\).apm') 82 | 7z x "$arg" -o"$a_dir" 83 | local code=$? 84 | ;; 85 | *.arj) 86 | a_dir=$(expr "$arg" : '\(.*\).arj') 87 | 7z x "$arg" -o"$a_dir" 88 | local code=$? 89 | ;; 90 | *.chm) 91 | a_dir=$(expr "$arg" : '\(.*\).chm') 92 | 7z x "$arg" -o"$a_dir" 93 | local code=$? 94 | ;; 95 | *.cpio) 96 | a_dir=$(expr "$arg" : '\(.*\).cpio') 97 | 7z x "$arg" -o"$a_dir" 98 | local code=$? 99 | ;; 100 | *.cramfs) 101 | a_dir=$(expr "$arg" : '\(.*\).cramfs') 102 | 7z x "$arg" -o"$a_dir" 103 | local code=$? 104 | ;; 105 | *.deb) 106 | a_dir=$(expr "$arg" : '\(.*\).deb') 107 | 7z x "$arg" -o"$a_dir" 108 | local code=$? 109 | ;; 110 | *.dmg) 111 | a_dir=$(expr "$arg" : '\(.*\).dmg') 112 | 7z x "$arg" -o"$a_dir" 113 | local code=$? 114 | ;; 115 | *.fat) 116 | a_dir=$(expr "$arg" : '\(.*\).fat') 117 | 7z x "$arg" -o"$a_dir" 118 | local code=$? 119 | ;; 120 | *.hfs) 121 | a_dir=$(expr "$arg" : '\(.*\).hfs') 122 | 7z x "$arg" -o"$a_dir" 123 | local code=$? 124 | ;; 125 | *.lzh) 126 | a_dir=$(expr "$arg" : '\(.*\).lzh') 127 | 7z x "$arg" -o"$a_dir" 128 | local code=$? 129 | ;; 130 | *.lzma) 131 | a_dir=$(expr "$arg" : '\(.*\).lzma') 132 | 7z x "$arg" -o"$a_dir" 133 | local code=$? 134 | ;; 135 | *.lzma2) 136 | a_dir=$(expr "$arg" : '\(.*\).lzma2') 137 | 7z x "$arg" -o"$a_dir" 138 | local code=$? 139 | ;; 140 | *.mbr) 141 | a_dir=$(expr "$arg" : '\(.*\).mbr') 142 | 7z x "$arg" -o"$a_dir" 143 | local code=$? 144 | ;; 145 | *.msi) 146 | a_dir=$(expr "$arg" : '\(.*\).msi') 147 | 7z x "$arg" -o"$a_dir" 148 | local code=$? 149 | ;; 150 | *.mslz) 151 | a_dir=$(expr "$arg" : '\(.*\).mslz') 152 | 7z x "$arg" -o"$a_dir" 153 | local code=$? 154 | ;; 155 | *.nsis) 156 | a_dir=$(expr "$arg" : '\(.*\).nsis') 157 | 7z x "$arg" -o"$a_dir" 158 | local code=$? 159 | ;; 160 | *.ntfs) 161 | a_dir=$(expr "$arg" : '\(.*\).ntfs') 162 | 7z x "$arg" -o"$a_dir" 163 | local code=$? 164 | ;; 165 | *.rpm) 166 | a_dir=$(expr "$arg" : '\(.*\).rpm') 167 | 7z x "$arg" -o"$a_dir" 168 | local code=$? 169 | ;; 170 | *.squashfs) 171 | a_dir=$(expr "$arg" : '\(.*\).squashfs') 172 | 7z x "$arg" -o"$a_dir" 173 | local code=$? 174 | ;; 175 | *.udf) 176 | a_dir=$(expr "$arg" : '\(.*\).udf') 177 | 7z x "$arg" -o"$a_dir" 178 | local code=$? 179 | ;; 180 | *.vhd) 181 | a_dir=$(expr "$arg" : '\(.*\).vhd') 182 | 7z x "$arg" -o"$a_dir" 183 | local code=$? 184 | ;; 185 | *.xar) 186 | a_dir=$(expr "$arg" : '\(.*\).xar') 187 | 7z x "$arg" -o"$a_dir" 188 | local code=$? 189 | ;; 190 | *) 191 | echo "'$arg' cannot be extracted via extract()" 1>&2 192 | exit 1 193 | ;; 194 | esac 195 | cd - || exit $? 196 | exit $code 197 | } 198 | 199 | extract "$1" 200 | -------------------------------------------------------------------------------- /pextract: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ $# -lt 1 ]; then 4 | echo "Usage: $(basename $0) FILES" 5 | exit 1 6 | fi 7 | 8 | extract() { 9 | arg="$1" 10 | pswd="$2" 11 | cd "$(dirname "$arg")" || exit 12 | case "$arg" in 13 | *.tar.bz2) 14 | tar xjf "$arg" --one-top-level 15 | local code=$? 16 | ;; 17 | *.tar.gz) 18 | tar xzf "$arg" --one-top-level 19 | local code=$? 20 | ;; 21 | *.bz2) 22 | bunzip2 "$arg" 23 | local code=$? 24 | ;; 25 | *.gz) 26 | gunzip "$arg" 27 | local code=$? 28 | ;; 29 | *.tar) 30 | tar xf "$arg" --one-top-level 31 | local code=$? 32 | ;; 33 | *.tbz2) 34 | (tar xjf "$arg" --one-top-level) 35 | local code=$? 36 | ;; 37 | *.tgz) 38 | tar xzf "$arg" --one-top-level 39 | local code=$? 40 | ;; 41 | *.tar.xz) 42 | a_dir=$(expr "$arg" : '\(.*\).tar.xz') 43 | 7z x "$arg" -o"$a_dir" -p"$pswd" 44 | local code=$? 45 | ;; 46 | *.zip) 47 | a_dir=$(expr "$arg" : '\(.*\).zip') 48 | 7z x "$arg" -o"$a_dir" -p"$pswd" 49 | local code=$? 50 | ;; 51 | *.7z) 52 | a_dir=$(expr "$arg" : '\(.*\).7z') 53 | 7z x "$arg" -o"$a_dir" -p"$pswd" 54 | local code=$? 55 | ;; 56 | *.Z) 57 | uncompress "$arg" 58 | local code=$? 59 | ;; 60 | *.rar) 61 | a_dir=$(expr "$arg" : '\(.*\).rar') 62 | mkdir "$a_dir" 63 | 7z x "$arg" -o"$a_dir" -p"$pswd" 64 | local code=$? 65 | ;; 66 | *.iso) 67 | a_dir=$(expr "$arg" : '\(.*\).iso') 68 | 7z x "$arg" -o"$a_dir" -p"$pswd" 69 | local code=$? 70 | ;; 71 | *.wim) 72 | a_dir=$(expr "$arg" : '\(.*\).wim') 73 | 7z x "$arg" -o"$a_dir" -p"$pswd" 74 | local code=$? 75 | ;; 76 | *.cab) 77 | a_dir=$(expr "$arg" : '\(.*\).cab') 78 | 7z x "$arg" -o"$a_dir" -p"$pswd" 79 | local code=$? 80 | ;; 81 | *.apm) 82 | a_dir=$(expr "$arg" : '\(.*\).apm') 83 | 7z x "$arg" -o"$a_dir" -p"$pswd" 84 | local code=$? 85 | ;; 86 | *.arj) 87 | a_dir=$(expr "$arg" : '\(.*\).arj') 88 | 7z x "$arg" -o"$a_dir" -p"$pswd" 89 | local code=$? 90 | ;; 91 | *.chm) 92 | a_dir=$(expr "$arg" : '\(.*\).chm') 93 | 7z x "$arg" -o"$a_dir" -p"$pswd" 94 | local code=$? 95 | ;; 96 | *.cpio) 97 | a_dir=$(expr "$arg" : '\(.*\).cpio') 98 | 7z x "$arg" -o"$a_dir" -p"$pswd" 99 | local code=$? 100 | ;; 101 | *.cramfs) 102 | a_dir=$(expr "$arg" : '\(.*\).cramfs') 103 | 7z x "$arg" -o"$a_dir" -p"$pswd" 104 | local code=$? 105 | ;; 106 | *.deb) 107 | a_dir=$(expr "$arg" : '\(.*\).deb') 108 | 7z x "$arg" -o"$a_dir" -p"$pswd" 109 | local code=$? 110 | ;; 111 | *.dmg) 112 | a_dir=$(expr "$arg" : '\(.*\).dmg') 113 | 7z x "$arg" -o"$a_dir" -p"$pswd" 114 | local code=$? 115 | ;; 116 | *.fat) 117 | a_dir=$(expr "$arg" : '\(.*\).fat') 118 | 7z x "$arg" -o"$a_dir" -p"$pswd" 119 | local code=$? 120 | ;; 121 | *.hfs) 122 | a_dir=$(expr "$arg" : '\(.*\).hfs') 123 | 7z x "$arg" -o"$a_dir" -p"$pswd" 124 | local code=$? 125 | ;; 126 | *.lzh) 127 | a_dir=$(expr "$arg" : '\(.*\).lzh') 128 | 7z x "$arg" -o"$a_dir" -p"$pswd" 129 | local code=$? 130 | ;; 131 | *.lzma) 132 | a_dir=$(expr "$arg" : '\(.*\).lzma') 133 | 7z x "$arg" -o"$a_dir" -p"$pswd" 134 | local code=$? 135 | ;; 136 | *.lzma2) 137 | a_dir=$(expr "$arg" : '\(.*\).lzma2') 138 | 7z x "$arg" -o"$a_dir" -p"$pswd" 139 | local code=$? 140 | ;; 141 | *.mbr) 142 | a_dir=$(expr "$arg" : '\(.*\).mbr') 143 | 7z x "$arg" -o"$a_dir" -p"$pswd" 144 | local code=$? 145 | ;; 146 | *.msi) 147 | a_dir=$(expr "$arg" : '\(.*\).msi') 148 | 7z x "$arg" -o"$a_dir" -p"$pswd" 149 | local code=$? 150 | ;; 151 | *.mslz) 152 | a_dir=$(expr "$arg" : '\(.*\).mslz') 153 | 7z x "$arg" -o"$a_dir" -p"$pswd" 154 | local code=$? 155 | ;; 156 | *.nsis) 157 | a_dir=$(expr "$arg" : '\(.*\).nsis') 158 | 7z x "$arg" -o"$a_dir" -p"$pswd" 159 | local code=$? 160 | ;; 161 | *.ntfs) 162 | a_dir=$(expr "$arg" : '\(.*\).ntfs') 163 | 7z x "$arg" -o"$a_dir" -p"$pswd" 164 | local code=$? 165 | ;; 166 | *.rpm) 167 | a_dir=$(expr "$arg" : '\(.*\).rpm') 168 | 7z x "$arg" -o"$a_dir" -p"$pswd" 169 | local code=$? 170 | ;; 171 | *.squashfs) 172 | a_dir=$(expr "$arg" : '\(.*\).squashfs') 173 | 7z x "$arg" -o"$a_dir" -p"$pswd" 174 | local code=$? 175 | ;; 176 | *.udf) 177 | a_dir=$(expr "$arg" : '\(.*\).udf') 178 | 7z x "$arg" -o"$a_dir" -p"$pswd" 179 | local code=$? 180 | ;; 181 | *.vhd) 182 | a_dir=$(expr "$arg" : '\(.*\).vhd') 183 | 7z x "$arg" -o"$a_dir" -p"$pswd" 184 | local code=$? 185 | ;; 186 | *.xar) 187 | a_dir=$(expr "$arg" : '\(.*\).xar') 188 | 7z x "$arg" -o"$a_dir" -p"$pswd" 189 | local code=$? 190 | ;; 191 | *) 192 | echo "'$arg' cannot be extracted via extract()" 1>&2 193 | exit 1 194 | ;; 195 | esac 196 | cd - || exit $? 197 | exit $code 198 | } 199 | 200 | extract "$1" "$2" 201 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/telegram_downloader.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import threading 3 | import time 4 | from bot import LOGGER, download_dict, download_dict_lock, app, STOP_DUPLICATE 5 | from .download_helper import DownloadHelper 6 | from ..status_utils.telegram_download_status import TelegramDownloadStatus 7 | from bot.helper.telegram_helper.message_utils import sendMarkup, sendStatusMessage 8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper 9 | 10 | global_lock = threading.Lock() 11 | GLOBAL_GID = set() 12 | logging.getLogger("pyrogram").setLevel(logging.WARNING) 13 | 14 | 15 | class TelegramDownloadHelper(DownloadHelper): 16 | def __init__(self, listener): 17 | super().__init__() 18 | self.__listener = listener 19 | self.__resource_lock = threading.RLock() 20 | self.__name = "" 21 | self.__start_time = time.time() 22 | self.__gid = "" 23 | self._bot = app 24 | self.__is_cancelled = False 25 | 26 | @property 27 | def gid(self): 28 | with self.__resource_lock: 29 | return self.__gid 30 | 31 | @property 32 | def download_speed(self): 33 | with self.__resource_lock: 34 | return self.downloaded_bytes / (time.time() - self.__start_time) 35 | 36 | def __onDownloadStart(self, name, size, file_id): 37 | with download_dict_lock: 38 | download_dict[self.__listener.uid] = TelegramDownloadStatus(self, self.__listener) 39 | with global_lock: 40 | GLOBAL_GID.add(file_id) 41 | with self.__resource_lock: 42 | self.name = name 43 | self.size = size 44 | self.__gid = file_id 45 | self.__listener.onDownloadStarted() 46 | 47 | def __onDownloadProgress(self, current, total): 48 | if self.__is_cancelled: 49 | self.__onDownloadError('Cancelled by user!') 50 | self._bot.stop_transmission() 51 | return 52 | with self.__resource_lock: 53 | self.downloaded_bytes = current 54 | try: 55 | self.progress = current / self.size * 100 56 | except ZeroDivisionError: 57 | self.progress = 0 58 | 59 | def __onDownloadError(self, error): 60 | with global_lock: 61 | try: 62 | GLOBAL_GID.remove(self.gid) 63 | except KeyError: 64 | pass 65 | self.__listener.onDownloadError(error) 66 | 67 | def __onDownloadComplete(self): 68 | with global_lock: 69 | GLOBAL_GID.remove(self.gid) 70 | self.__listener.onDownloadComplete() 71 | 72 | def __download(self, message, path): 73 | download = self._bot.download_media( 74 | message, 75 | progress = self.__onDownloadProgress, 76 | file_name = path 77 | ) 78 | if download is not None: 79 | self.__onDownloadComplete() 80 | else: 81 | if not self.__is_cancelled: 82 | self.__onDownloadError('Internal error occurred') 83 | 84 | def add_download(self, message, path, filename): 85 | _message = self._bot.get_messages(message.chat.id, reply_to_message_ids=message.message_id) 86 | media = None 87 | media_array = [_message.document, _message.video, _message.audio] 88 | for i in media_array: 89 | if i is not None: 90 | media = i 91 | break 92 | if media is not None: 93 | with global_lock: 94 | # For avoiding locking the thread lock for long time unnecessarily 95 | download = media.file_id not in GLOBAL_GID 96 | if filename == "": 97 | name = media.file_name 98 | else: 99 | name = filename 100 | path = path + name 101 | 102 | if download: 103 | if STOP_DUPLICATE: 104 | LOGGER.info(f"Checking File/Folder if already in Drive...") 105 | if self.__listener.isTar: 106 | name = name + ".tar" 107 | if self.__listener.extract: 108 | smsg = None 109 | else: 110 | gd = GoogleDriveHelper() 111 | smsg, button = gd.drive_list(name) 112 | if smsg: 113 | sendMarkup("File/Folder is already available in Drive.\nHere are the search results:", self.__listener.bot, self.__listener.update, button) 114 | return 115 | sendStatusMessage(self.__listener.update, self.__listener.bot) 116 | self.__onDownloadStart(name, media.file_size, media.file_id) 117 | LOGGER.info(f'Downloading Telegram file with id: {media.file_id}') 118 | threading.Thread(target=self.__download, args=(_message, path)).start() 119 | else: 120 | self.__onDownloadError('File already being downloaded!') 121 | else: 122 | self.__onDownloadError('No document in the replied message') 123 | 124 | def cancel_download(self): 125 | LOGGER.info(f'Cancelling download on user request: {self.gid}') 126 | self.__is_cancelled = True 127 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/aria2_download.py: -------------------------------------------------------------------------------- 1 | from bot import aria2, download_dict_lock, STOP_DUPLICATE, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT 2 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper 3 | from bot.helper.ext_utils.bot_utils import * 4 | from .download_helper import DownloadHelper 5 | from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus 6 | from bot.helper.telegram_helper.message_utils import * 7 | import threading 8 | from aria2p import API 9 | from time import sleep 10 | 11 | 12 | class AriaDownloadHelper(DownloadHelper): 13 | 14 | def __init__(self): 15 | super().__init__() 16 | 17 | @new_thread 18 | def __onDownloadStarted(self, api, gid): 19 | if STOP_DUPLICATE or TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None: 20 | sleep(2) 21 | dl = getDownloadByGid(gid) 22 | download = aria2.get_download(gid) 23 | if STOP_DUPLICATE and dl is not None: 24 | LOGGER.info(f"Checking File/Folder if already in Drive...") 25 | sname = aria2.get_download(gid).name 26 | if dl.getListener().isTar: 27 | sname = sname + ".tar" 28 | if dl.getListener().extract: 29 | smsg = None 30 | else: 31 | gdrive = GoogleDriveHelper(None) 32 | smsg, button = gdrive.drive_list(sname) 33 | if smsg: 34 | dl.getListener().onDownloadError(f'File/Folder already available in Drive.\n\n') 35 | aria2.remove([download], force=True) 36 | sendMarkup("Here are the search results:", dl.getListener().bot, dl.getListener().update, button) 37 | return 38 | if (TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None) and dl is not None: 39 | size = aria2.get_download(gid).total_length 40 | if dl.getListener().isTar or dl.getListener().extract: 41 | is_tar_ext = True 42 | mssg = f'Tar/Unzip limit is {TAR_UNZIP_LIMIT}' 43 | else: 44 | is_tar_ext = False 45 | mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}' 46 | result = check_limit(size, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext) 47 | if result: 48 | dl.getListener().onDownloadError(f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}') 49 | aria2.remove([download], force=True) 50 | return 51 | update_all_messages() 52 | 53 | def __onDownloadComplete(self, api: API, gid): 54 | dl = getDownloadByGid(gid) 55 | download = aria2.get_download(gid) 56 | if download.followed_by_ids: 57 | new_gid = download.followed_by_ids[0] 58 | new_download = aria2.get_download(new_gid) 59 | if dl is None: 60 | dl = getDownloadByGid(new_gid) 61 | with download_dict_lock: 62 | download_dict[dl.uid()] = AriaDownloadStatus(new_gid, dl.getListener()) 63 | if new_download.is_torrent: 64 | download_dict[dl.uid()].is_torrent = True 65 | update_all_messages() 66 | LOGGER.info(f'Changed gid from {gid} to {new_gid}') 67 | else: 68 | if dl: 69 | threading.Thread(target=dl.getListener().onDownloadComplete).start() 70 | 71 | @new_thread 72 | def __onDownloadStopped(self, api, gid): 73 | sleep(4) 74 | dl = getDownloadByGid(gid) 75 | if dl: 76 | dl.getListener().onDownloadError('★ 𝗠𝗔𝗚𝗡𝗘𝗧/𝗧𝗢𝗥𝗥𝗘𝗡𝗧 𝗟𝗜𝗡𝗞 𝗜𝗦 𝗗𝗘𝗔𝗗 ❌ ★') 77 | 78 | @new_thread 79 | def __onDownloadError(self, api, gid): 80 | LOGGER.info(f"onDownloadError: {gid}") 81 | sleep(0.5) # sleep for split second to ensure proper dl gid update from onDownloadComplete 82 | dl = getDownloadByGid(gid) 83 | download = aria2.get_download(gid) 84 | error = download.error_message 85 | LOGGER.info(f"Download Error: {error}") 86 | if dl: 87 | dl.getListener().onDownloadError(error) 88 | 89 | def start_listener(self): 90 | aria2.listen_to_notifications(threaded=True, on_download_start=self.__onDownloadStarted, 91 | on_download_error=self.__onDownloadError, 92 | on_download_stop=self.__onDownloadStopped, 93 | on_download_complete=self.__onDownloadComplete, 94 | timeout=1) 95 | 96 | def add_download(self, link: str, path, listener, filename): 97 | if is_magnet(link): 98 | download = aria2.add_magnet(link, {'dir': path, 'out': filename}) 99 | else: 100 | download = aria2.add_uris([link], {'dir': path, 'out': filename}) 101 | if download.error_message: # no need to proceed further at this point 102 | listener.onDownloadError(download.error_message) 103 | return 104 | with download_dict_lock: 105 | download_dict[listener.uid] = AriaDownloadStatus(download.gid, listener) 106 | LOGGER.info(f"Started: {download.gid} DIR:{download.dir} ") 107 | -------------------------------------------------------------------------------- /nodes.py: -------------------------------------------------------------------------------- 1 | from anytree import NodeMixin, RenderTree, PreOrderIter 2 | import qbittorrentapi as qba 3 | 4 | SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] 5 | 6 | class TorNode(NodeMixin): 7 | def __init__(self, name, is_folder=False, is_file=False, parent=None, progress=None, size=None, priority=None, file_id=None): 8 | super().__init__() 9 | self.name = name 10 | self.is_folder = is_folder 11 | self.is_file = is_file 12 | 13 | if parent is not None: 14 | self.parent = parent 15 | if progress is not None: 16 | self.progress = progress 17 | if size is not None: 18 | self.size = size 19 | if priority is not None: 20 | self.priority = priority 21 | if file_id is not None: 22 | self.file_id = file_id 23 | 24 | 25 | def get_folders(path): 26 | path_seperator = "/" 27 | folders = path.split(path_seperator) 28 | return folders 29 | 30 | 31 | def make_tree(res): 32 | """This function takes the list of all the torrent files. The files are name hierarchically. 33 | Felt a need to document to save time. 34 | 35 | Args: 36 | res (list): Torrent files list. 37 | 38 | Returns: 39 | TorNode: Parent node of the tree constructed and can be used further. 40 | """ 41 | parent = TorNode("Torrent") 42 | #nodes = dict() 43 | l = 0 44 | 45 | for i in res: 46 | # Get the hierarchy of the folders by splitting based on '/' 47 | folders = get_folders(i.name) 48 | # Check if the file is alone for if its in folder 49 | if len(folders) > 1: 50 | # Enter here if in folder 51 | 52 | # Set the parent 53 | previous_node = parent 54 | 55 | # Traverse till second last assuming the last is a file. 56 | for j in range(len(folders)-1): 57 | current_node = None 58 | 59 | if previous_node is not None: 60 | # As we are traversing the folder from top to bottom we are searching 61 | # the first folder (folders list) under the parent node in first iteration. 62 | # If the node is found then it becomes the current node else the current node 63 | # is left None. 64 | for k in previous_node.children: 65 | if k.name == folders[j]: 66 | current_node = k 67 | break 68 | else: 69 | # think its useless afterall 70 | for k in parent.children: 71 | if k.name == folders[j]: 72 | current_node = k 73 | break 74 | 75 | # if the node is not found then create the folder node 76 | # if the node is found then use it as base for the next 77 | if current_node is None: 78 | previous_node = TorNode(folders[j],parent=previous_node,is_folder=True) 79 | else: 80 | previous_node = current_node 81 | # at this point the previous_node will contain the deepest folder in it so add the file to it 82 | TorNode(folders[-1],is_file=True,parent=previous_node,progress=i.progress,size=i.size,priority=i.priority,file_id=l) 83 | l += 1 84 | else: 85 | # at the file to the parent if no folders are there 86 | TorNode(folders[-1],is_file=True,parent=parent,progress=i.progress,size=i.size,priority=i.priority,file_id=l) 87 | l += 1 88 | 89 | 90 | return parent 91 | 92 | 93 | def print_tree(parent): 94 | for pre, _, node in RenderTree(parent): 95 | treestr = u"%s%s" % (pre, node.name) 96 | print(treestr.ljust(8), node.is_folder, node.is_file) 97 | 98 | 99 | def create_list(par, msg): 100 | if par.name != ".unwanted": 101 | msg[0] += "" 125 | 126 | def get_readable_file_size(size_in_bytes) -> str: 127 | if size_in_bytes is None: 128 | return '0B' 129 | index = 0 130 | while size_in_bytes >= 1024: 131 | size_in_bytes /= 1024 132 | index += 1 133 | try: 134 | return f'{round(size_in_bytes, 2)}{SIZE_UNITS[index]}' 135 | except IndexError: 136 | return 'File too large' 137 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/fs_utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client 3 | import shutil 4 | import os 5 | import pathlib 6 | import magic 7 | import tarfile 8 | from .exceptions import NotSupportedExtractionArchive 9 | 10 | 11 | def clean_download(path: str): 12 | if os.path.exists(path): 13 | LOGGER.info(f"Cleaning download: {path}") 14 | shutil.rmtree(path) 15 | 16 | 17 | def start_cleanup(): 18 | try: 19 | shutil.rmtree(DOWNLOAD_DIR) 20 | except FileNotFoundError: 21 | pass 22 | 23 | 24 | def clean_all(): 25 | aria2.remove_all(True) 26 | get_client().torrents_delete(torrent_hashes="all", delete_files=True) 27 | try: 28 | shutil.rmtree(DOWNLOAD_DIR) 29 | except FileNotFoundError: 30 | pass 31 | 32 | 33 | def exit_clean_up(signal, frame): 34 | try: 35 | LOGGER.info("Please wait, while we clean up the downloads and stop running downloads") 36 | clean_all() 37 | sys.exit(0) 38 | except KeyboardInterrupt: 39 | LOGGER.warning("Force Exiting before the cleanup finishes!") 40 | sys.exit(1) 41 | 42 | 43 | def get_path_size(path): 44 | if os.path.isfile(path): 45 | return os.path.getsize(path) 46 | total_size = 0 47 | for root, dirs, files in os.walk(path): 48 | for f in files: 49 | abs_path = os.path.join(root, f) 50 | total_size += os.path.getsize(abs_path) 51 | return total_size 52 | 53 | 54 | def tar(org_path): 55 | tar_path = org_path + ".tar" 56 | #path = pathlib.PurePath(org_path) 57 | LOGGER.info(f'Tar: orig_path: {org_path}, tar_path: {tar_path}') 58 | tar = tarfile.open(tar_path, "w") 59 | tar.add(org_path, arcname=os.path.basename(org_path)) 60 | tar.close() 61 | return tar_path 62 | 63 | 64 | def zip(name, path): 65 | root_dir = os.path.dirname(path) 66 | base_dir = os.path.basename(path.strip(os.sep)) 67 | zip_file = shutil.make_archive(name, "zip", root_dir, base_dir) 68 | zip_path = shutil.move(zip_file, root_dir) 69 | LOGGER.info(f"Zip: {zip_path}") 70 | return zip_path 71 | 72 | 73 | def get_base_name(orig_path: str): 74 | if orig_path.endswith(".tar.bz2"): 75 | return orig_path.replace(".tar.bz2", "") 76 | elif orig_path.endswith(".tar.gz"): 77 | return orig_path.replace(".tar.gz", "") 78 | elif orig_path.endswith(".bz2"): 79 | return orig_path.replace(".bz2", "") 80 | elif orig_path.endswith(".gz"): 81 | return orig_path.replace(".gz", "") 82 | elif orig_path.endswith(".tar.xz"): 83 | return orig_path.replace(".tar.xz", "") 84 | elif orig_path.endswith(".tar"): 85 | return orig_path.replace(".tar", "") 86 | elif orig_path.endswith(".tbz2"): 87 | return orig_path.replace("tbz2", "") 88 | elif orig_path.endswith(".tgz"): 89 | return orig_path.replace(".tgz", "") 90 | elif orig_path.endswith(".zip"): 91 | return orig_path.replace(".zip", "") 92 | elif orig_path.endswith(".7z"): 93 | return orig_path.replace(".7z", "") 94 | elif orig_path.endswith(".Z"): 95 | return orig_path.replace(".Z", "") 96 | elif orig_path.endswith(".rar"): 97 | return orig_path.replace(".rar", "") 98 | elif orig_path.endswith(".iso"): 99 | return orig_path.replace(".iso", "") 100 | elif orig_path.endswith(".wim"): 101 | return orig_path.replace(".wim", "") 102 | elif orig_path.endswith(".cab"): 103 | return orig_path.replace(".cab", "") 104 | elif orig_path.endswith(".apm"): 105 | return orig_path.replace(".apm", "") 106 | elif orig_path.endswith(".arj"): 107 | return orig_path.replace(".arj", "") 108 | elif orig_path.endswith(".chm"): 109 | return orig_path.replace(".chm", "") 110 | elif orig_path.endswith(".cpio"): 111 | return orig_path.replace(".cpio", "") 112 | elif orig_path.endswith(".cramfs"): 113 | return orig_path.replace(".cramfs", "") 114 | elif orig_path.endswith(".deb"): 115 | return orig_path.replace(".deb", "") 116 | elif orig_path.endswith(".dmg"): 117 | return orig_path.replace(".dmg", "") 118 | elif orig_path.endswith(".fat"): 119 | return orig_path.replace(".fat", "") 120 | elif orig_path.endswith(".hfs"): 121 | return orig_path.replace(".hfs", "") 122 | elif orig_path.endswith(".lzh"): 123 | return orig_path.replace(".lzh", "") 124 | elif orig_path.endswith(".lzma"): 125 | return orig_path.replace(".lzma", "") 126 | elif orig_path.endswith(".lzma2"): 127 | return orig_path.replace(".lzma2", "") 128 | elif orig_path.endswith(".mbr"): 129 | return orig_path.replace(".mbr", "") 130 | elif orig_path.endswith(".msi"): 131 | return orig_path.replace(".msi", "") 132 | elif orig_path.endswith(".mslz"): 133 | return orig_path.replace(".mslz", "") 134 | elif orig_path.endswith(".nsis"): 135 | return orig_path.replace(".nsis", "") 136 | elif orig_path.endswith(".ntfs"): 137 | return orig_path.replace(".ntfs", "") 138 | elif orig_path.endswith(".rpm"): 139 | return orig_path.replace(".rpm", "") 140 | elif orig_path.endswith(".squashfs"): 141 | return orig_path.replace(".squashfs", "") 142 | elif orig_path.endswith(".udf"): 143 | return orig_path.replace(".udf", "") 144 | elif orig_path.endswith(".vhd"): 145 | return orig_path.replace(".vhd", "") 146 | elif orig_path.endswith(".xar"): 147 | return orig_path.replace(".xar", "") 148 | else: 149 | raise NotSupportedExtractionArchive('File format not supported for extraction') 150 | 151 | 152 | def get_mime_type(file_path): 153 | mime = magic.Magic(mime=True) 154 | mime_type = mime.from_file(file_path) 155 | mime_type = mime_type if mime_type else "text/plain" 156 | return mime_type 157 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py: -------------------------------------------------------------------------------- 1 | from .download_helper import DownloadHelper 2 | import time 3 | from yt_dlp import YoutubeDL, DownloadError 4 | from bot import download_dict_lock, download_dict 5 | from ..status_utils.youtube_dl_download_status import YoutubeDLDownloadStatus 6 | import logging 7 | import re 8 | import threading 9 | 10 | LOGGER = logging.getLogger(__name__) 11 | 12 | 13 | class MyLogger: 14 | def __init__(self, obj): 15 | self.obj = obj 16 | 17 | def debug(self, msg): 18 | LOGGER.debug(msg) 19 | match = re.search(r'.ffmpeg..Merging formats into..(.*?).$', msg) 20 | if match and not self.obj.is_playlist: 21 | newname = match.group(1) 22 | newname = newname.split("/") 23 | newname = newname[-1] 24 | self.obj.name = newname 25 | 26 | @staticmethod 27 | def warning(msg): 28 | LOGGER.warning(msg) 29 | 30 | @staticmethod 31 | def error(msg): 32 | LOGGER.error(msg) 33 | 34 | 35 | class YoutubeDLHelper(DownloadHelper): 36 | def __init__(self, listener): 37 | super().__init__() 38 | self.name = "" 39 | self.__start_time = time.time() 40 | self.__listener = listener 41 | self.__gid = "" 42 | self.opts = { 43 | 'progress_hooks': [self.__onDownloadProgress], 44 | 'logger': MyLogger(self), 45 | 'usenetrc': True 46 | } 47 | self.__download_speed = 0 48 | self.downloaded_bytes = 0 49 | self.size = 0 50 | self.is_playlist = False 51 | self.last_downloaded = 0 52 | self.is_cancelled = False 53 | self.vid_id = '' 54 | self.__resource_lock = threading.RLock() 55 | 56 | @property 57 | def download_speed(self): 58 | with self.__resource_lock: 59 | return self.__download_speed 60 | 61 | @property 62 | def gid(self): 63 | with self.__resource_lock: 64 | return self.__gid 65 | 66 | def __onDownloadProgress(self, d): 67 | if self.is_cancelled: 68 | raise ValueError("Cancelling Download..") 69 | if d['status'] == "finished": 70 | if self.is_playlist: 71 | self.last_downloaded = 0 72 | elif d['status'] == "downloading": 73 | with self.__resource_lock: 74 | self.__download_speed = d['speed'] 75 | try: 76 | tbyte = d['total_bytes'] 77 | except KeyError: 78 | tbyte = d['total_bytes_estimate'] 79 | if self.is_playlist: 80 | progress = d['downloaded_bytes'] / tbyte 81 | chunk_size = d['downloaded_bytes'] - self.last_downloaded 82 | self.last_downloaded = tbyte * progress 83 | self.downloaded_bytes += chunk_size 84 | else: 85 | self.size = tbyte 86 | self.downloaded_bytes = d['downloaded_bytes'] 87 | try: 88 | self.progress = (self.downloaded_bytes / self.size) * 100 89 | except ZeroDivisionError: 90 | pass 91 | 92 | def __onDownloadStart(self): 93 | with download_dict_lock: 94 | download_dict[self.__listener.uid] = YoutubeDLDownloadStatus(self, self.__listener) 95 | 96 | def __onDownloadComplete(self): 97 | self.__listener.onDownloadComplete() 98 | 99 | def onDownloadError(self, error): 100 | self.__listener.onDownloadError(error) 101 | 102 | def extractMetaData(self, link, qual, name): 103 | if "hotstar" in link or "sonyliv" in link: 104 | self.opts['geo_bypass_country'] = 'IN' 105 | 106 | with YoutubeDL(self.opts) as ydl: 107 | try: 108 | result = ydl.extract_info(link, download=False) 109 | name = ydl.prepare_filename(result) if name == "" else name 110 | if qual == "audio": 111 | name = name.replace(".mp4", ".mp3").replace(".webm", ".mp3") 112 | except DownloadError as e: 113 | self.onDownloadError(str(e)) 114 | return 115 | if result.get('direct'): 116 | return None 117 | if 'entries' in result: 118 | video = result['entries'][0] 119 | for v in result['entries']: 120 | if v and v.get('filesize'): 121 | self.size += float(v['filesize']) 122 | # For playlists, ydl.prepare-filename returns the following format: -.NA 123 | self.name = name.split(f"-{result['id']}")[0] 124 | self.vid_id = video.get('id') 125 | self.is_playlist = True 126 | else: 127 | video = result 128 | if video.get('filesize'): 129 | self.size = float(video.get('filesize')) 130 | self.name = name 131 | self.vid_id = video.get('id') 132 | return video 133 | 134 | def __download(self, link): 135 | try: 136 | with YoutubeDL(self.opts) as ydl: 137 | try: 138 | ydl.download([link]) 139 | except DownloadError as e: 140 | self.onDownloadError(str(e)) 141 | return 142 | self.__onDownloadComplete() 143 | except ValueError: 144 | LOGGER.info("Download Cancelled by User!") 145 | self.onDownloadError("Download Cancelled by User!") 146 | 147 | def add_download(self, link, path, qual, name): 148 | pattern = '^.*(youtu\.be\/|youtube.com\/)(playlist?)' 149 | if re.match(pattern, link): 150 | self.opts['ignoreerrors'] = True 151 | self.__onDownloadStart() 152 | self.extractMetaData(link, qual, name) 153 | LOGGER.info(f"Downloading with YT-DLP: {link}") 154 | self.__gid = f"{self.vid_id}{self.__listener.uid}" 155 | if qual == "audio": 156 | self.opts['format'] = 'bestaudio/best' 157 | self.opts['postprocessors'] = [{'key': 'FFmpegExtractAudio','preferredcodec': 'mp3','preferredquality': '320',}] 158 | else: 159 | self.opts['format'] = qual 160 | if not self.is_playlist: 161 | self.opts['outtmpl'] = f"{path}/{self.name}" 162 | else: 163 | self.opts['outtmpl'] = f"{path}/{self.name}/%(title)s.%(ext)s" 164 | self.__download(link) 165 | 166 | def cancel_download(self): 167 | self.is_cancelled = True 168 | -------------------------------------------------------------------------------- /bot/helper/telegram_helper/message_utils.py: -------------------------------------------------------------------------------- 1 | from telegram import InlineKeyboardMarkup 2 | from telegram.message import Message 3 | from telegram.update import Update 4 | import psutil, shutil 5 | import time 6 | from bot import AUTO_DELETE_MESSAGE_DURATION, LOGGER, bot, \ 7 | status_reply_dict, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, Interval, DOWNLOAD_STATUS_UPDATE_INTERVAL 8 | from bot.helper.ext_utils.bot_utils import get_readable_message, get_readable_file_size, get_readable_time, MirrorStatus, setInterval 9 | from telegram.error import TimedOut, BadRequest 10 | 11 | 12 | def sendMessage(text: str, bot, update: Update): 13 | try: 14 | return bot.send_message(update.message.chat_id, 15 | reply_to_message_id=update.message.message_id, 16 | text=text, allow_sending_without_reply=True, parse_mode='HTMl') 17 | except Exception as e: 18 | LOGGER.error(str(e)) 19 | def sendMarkup(text: str, bot, update: Update, reply_markup: InlineKeyboardMarkup): 20 | return bot.send_message(update.message.chat_id, 21 | reply_to_message_id=update.message.message_id, 22 | text=text, reply_markup=reply_markup, allow_sending_without_reply=True, parse_mode='HTMl') 23 | 24 | def editMessage(text: str, message: Message, reply_markup=None): 25 | try: 26 | bot.edit_message_text(text=text, message_id=message.message_id, 27 | chat_id=message.chat.id,reply_markup=reply_markup, 28 | parse_mode='HTMl') 29 | except Exception as e: 30 | LOGGER.error(str(e)) 31 | 32 | 33 | def deleteMessage(bot, message: Message): 34 | try: 35 | bot.delete_message(chat_id=message.chat.id, 36 | message_id=message.message_id) 37 | except Exception as e: 38 | LOGGER.error(str(e)) 39 | 40 | 41 | def sendLogFile(bot, update: Update): 42 | with open('log.txt', 'rb') as f: 43 | bot.send_document(document=f, filename=f.name, 44 | reply_to_message_id=update.message.message_id, 45 | chat_id=update.message.chat_id) 46 | 47 | 48 | def auto_delete_message(bot, cmd_message: Message, bot_message: Message): 49 | if AUTO_DELETE_MESSAGE_DURATION != -1: 50 | time.sleep(AUTO_DELETE_MESSAGE_DURATION) 51 | try: 52 | # Skip if None is passed meaning we don't want to delete bot xor cmd message 53 | deleteMessage(bot, cmd_message) 54 | deleteMessage(bot, bot_message) 55 | except AttributeError: 56 | pass 57 | 58 | 59 | def delete_all_messages(): 60 | with status_reply_dict_lock: 61 | for message in list(status_reply_dict.values()): 62 | try: 63 | deleteMessage(bot, message) 64 | del status_reply_dict[message.chat.id] 65 | except Exception as e: 66 | LOGGER.error(str(e)) 67 | 68 | 69 | def update_all_messages(): 70 | total, used, free = shutil.disk_usage('.') 71 | free = get_readable_file_size(free) 72 | currentTime = get_readable_time(time.time() - botStartTime) 73 | msg, buttons = get_readable_message() 74 | if msg is None: 75 | return 76 | msg += f"CPU: {psutil.cpu_percent()}%" \ 77 | f" RAM: {psutil.virtual_memory().percent}%" \ 78 | f" DISK: {psutil.disk_usage('/').percent}%" 79 | with download_dict_lock: 80 | dlspeed_bytes = 0 81 | uldl_bytes = 0 82 | for download in list(download_dict.values()): 83 | speedy = download.speed() 84 | if download.status() == MirrorStatus.STATUS_DOWNLOADING: 85 | if 'K' in speedy: 86 | dlspeed_bytes += float(speedy.split('K')[0]) * 1024 87 | elif 'M' in speedy: 88 | dlspeed_bytes += float(speedy.split('M')[0]) * 1048576 89 | if download.status() == MirrorStatus.STATUS_UPLOADING: 90 | if 'KB/s' in speedy: 91 | uldl_bytes += float(speedy.split('K')[0]) * 1024 92 | elif 'MB/s' in speedy: 93 | uldl_bytes += float(speedy.split('M')[0]) * 1048576 94 | dlspeed = get_readable_file_size(dlspeed_bytes) 95 | ulspeed = get_readable_file_size(uldl_bytes) 96 | msg += f"\nFREESPACE: {free} | BOTUPTIME: {currentTime}\nDL: {dlspeed}/s 🔻 | UL: {ulspeed}/s 🔺\n" 97 | with status_reply_dict_lock: 98 | for chat_id in list(status_reply_dict.keys()): 99 | if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id].text: 100 | try: 101 | if buttons == "": 102 | editMessage(msg, status_reply_dict[chat_id]) 103 | else: 104 | editMessage(msg, status_reply_dict[chat_id], buttons) 105 | except Exception as e: 106 | LOGGER.error(str(e)) 107 | status_reply_dict[chat_id].text = msg 108 | 109 | 110 | def sendStatusMessage(msg, bot): 111 | if len(Interval) == 0: 112 | Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) 113 | total, used, free = shutil.disk_usage('.') 114 | free = get_readable_file_size(free) 115 | currentTime = get_readable_time(time.time() - botStartTime) 116 | progress, buttons = get_readable_message() 117 | if progress is None: 118 | progress, buttons = get_readable_message() 119 | progress += f"CPU: {psutil.cpu_percent()}%" \ 120 | f" RAM: {psutil.virtual_memory().percent}%" \ 121 | f" DISK: {psutil.disk_usage('/').percent}%" 122 | with download_dict_lock: 123 | dlspeed_bytes = 0 124 | uldl_bytes = 0 125 | for download in list(download_dict.values()): 126 | speedy = download.speed() 127 | if download.status() == MirrorStatus.STATUS_DOWNLOADING: 128 | if 'K' in speedy: 129 | dlspeed_bytes += float(speedy.split('K')[0]) * 1024 130 | elif 'M' in speedy: 131 | dlspeed_bytes += float(speedy.split('M')[0]) * 1048576 132 | if download.status() == MirrorStatus.STATUS_UPLOADING: 133 | if 'KB/s' in speedy: 134 | uldl_bytes += float(speedy.split('K')[0]) * 1024 135 | elif 'MB/s' in speedy: 136 | uldl_bytes += float(speedy.split('M')[0]) * 1048576 137 | dlspeed = get_readable_file_size(dlspeed_bytes) 138 | ulspeed = get_readable_file_size(uldl_bytes) 139 | progress += f"\nFREESPACE: {free} | BOTUPTIME: {currentTime}\nDL: {dlspeed}/s 🔻 | UL: {ulspeed}/s 🔺\n" 140 | with status_reply_dict_lock: 141 | if msg.message.chat.id in list(status_reply_dict.keys()): 142 | try: 143 | message = status_reply_dict[msg.message.chat.id] 144 | deleteMessage(bot, message) 145 | del status_reply_dict[msg.message.chat.id] 146 | except Exception as e: 147 | LOGGER.error(str(e)) 148 | del status_reply_dict[msg.message.chat.id] 149 | pass 150 | if buttons == "": 151 | message = sendMessage(progress, bot, msg) 152 | else: 153 | message = sendMarkup(progress, bot, msg, buttons) 154 | status_reply_dict[msg.message.chat.id] = message 155 | -------------------------------------------------------------------------------- /bot/modules/authorize.py: -------------------------------------------------------------------------------- 1 | from bot.helper.telegram_helper.message_utils import sendMessage 2 | from bot import AUTHORIZED_CHATS, SUDO_USERS, dispatcher, DB_URI 3 | from telegram.ext import CommandHandler 4 | from bot.helper.telegram_helper.filters import CustomFilters 5 | from telegram.ext import Filters 6 | from telegram import Update 7 | from bot.helper.telegram_helper.bot_commands import BotCommands 8 | from bot.helper.ext_utils.db_handler import DbManger 9 | 10 | 11 | def authorize(update, context): 12 | reply_message = None 13 | message_ = None 14 | reply_message = update.message.reply_to_message 15 | message_ = update.message.text.split(' ') 16 | if len(message_) == 2: 17 | user_id = int(message_[1]) 18 | if user_id not in AUTHORIZED_CHATS: 19 | if DB_URI is not None: 20 | msg = DbManger().db_auth(user_id) 21 | else: 22 | with open('authorized_chats.txt', 'a') as file: 23 | file.write(f'{user_id}\n') 24 | AUTHORIZED_CHATS.add(user_id) 25 | msg = 'User Authorized' 26 | else: 27 | msg = 'User Already Authorized' 28 | else: 29 | if reply_message is None: 30 | # Trying to authorize a chat 31 | chat_id = update.effective_chat.id 32 | if chat_id not in AUTHORIZED_CHATS: 33 | if DB_URI is not None: 34 | msg = DbManger().db_auth(chat_id) 35 | else: 36 | with open('authorized_chats.txt', 'a') as file: 37 | file.write(f'{chat_id}\n') 38 | AUTHORIZED_CHATS.add(chat_id) 39 | msg = 'Chat Authorized' 40 | else: 41 | msg = 'Chat Already Authorized' 42 | 43 | else: 44 | # Trying to authorize someone by replying 45 | user_id = reply_message.from_user.id 46 | if user_id not in AUTHORIZED_CHATS: 47 | if DB_URI is not None: 48 | msg = DbManger().db_auth(user_id) 49 | else: 50 | with open('authorized_chats.txt', 'a') as file: 51 | file.write(f'{user_id}\n') 52 | AUTHORIZED_CHATS.add(user_id) 53 | msg = 'User Authorized' 54 | else: 55 | msg = 'User Already Authorized' 56 | sendMessage(msg, context.bot, update) 57 | 58 | 59 | def unauthorize(update, context): 60 | reply_message = None 61 | message_ = None 62 | reply_message = update.message.reply_to_message 63 | message_ = update.message.text.split(' ') 64 | if len(message_) == 2: 65 | user_id = int(message_[1]) 66 | if user_id in AUTHORIZED_CHATS: 67 | if DB_URI is not None: 68 | msg = DbManger().db_unauth(user_id) 69 | else: 70 | AUTHORIZED_CHATS.remove(user_id) 71 | msg = 'User Unauthorized' 72 | else: 73 | msg = 'User Already Unauthorized' 74 | else: 75 | if reply_message is None: 76 | # Trying to unauthorize a chat 77 | chat_id = update.effective_chat.id 78 | if chat_id in AUTHORIZED_CHATS: 79 | if DB_URI is not None: 80 | msg = DbManger().db_unauth(chat_id) 81 | else: 82 | AUTHORIZED_CHATS.remove(chat_id) 83 | msg = 'Chat Unauthorized' 84 | else: 85 | msg = 'Chat Already Unauthorized' 86 | else: 87 | # Trying to authorize someone by replying 88 | user_id = reply_message.from_user.id 89 | if user_id in AUTHORIZED_CHATS: 90 | if DB_URI is not None: 91 | msg = DbManger().db_unauth(user_id) 92 | else: 93 | AUTHORIZED_CHATS.remove(user_id) 94 | msg = 'User Unauthorized' 95 | else: 96 | msg = 'User Already Unauthorized' 97 | with open('authorized_chats.txt', 'a') as file: 98 | file.truncate(0) 99 | for i in AUTHORIZED_CHATS: 100 | file.write(f'{i}\n') 101 | sendMessage(msg, context.bot, update) 102 | 103 | 104 | def addSudo(update, context): 105 | reply_message = None 106 | message_ = None 107 | reply_message = update.message.reply_to_message 108 | message_ = update.message.text.split(' ') 109 | if len(message_) == 2: 110 | user_id = int(message_[1]) 111 | if user_id not in SUDO_USERS: 112 | if DB_URI is not None: 113 | msg = DbManger().db_addsudo(user_id) 114 | else: 115 | with open('sudo_users.txt', 'a') as file: 116 | file.write(f'{user_id}\n') 117 | SUDO_USERS.add(user_id) 118 | msg = 'Promoted as Sudo' 119 | else: 120 | msg = 'Already Sudo' 121 | else: 122 | if reply_message is None: 123 | msg = "Give ID or Reply To message of whom you want to Promote" 124 | else: 125 | # Trying to authorize someone by replying 126 | user_id = reply_message.from_user.id 127 | if user_id not in SUDO_USERS: 128 | if DB_URI is not None: 129 | msg = DbManger().db_addsudo(user_id) 130 | else: 131 | with open('sudo_users.txt', 'a') as file: 132 | file.write(f'{user_id}\n') 133 | SUDO_USERS.add(user_id) 134 | msg = 'Promoted as Sudo' 135 | else: 136 | msg = 'Already Sudo' 137 | sendMessage(msg, context.bot, update) 138 | 139 | 140 | def removeSudo(update, context): 141 | reply_message = None 142 | message_ = None 143 | reply_message = update.message.reply_to_message 144 | message_ = update.message.text.split(' ') 145 | if len(message_) == 2: 146 | user_id = int(message_[1]) 147 | if user_id in SUDO_USERS: 148 | if DB_URI is not None: 149 | msg = DbManger().db_rmsudo(user_id) 150 | else: 151 | SUDO_USERS.remove(user_id) 152 | msg = 'Demoted' 153 | else: 154 | msg = 'Not a Sudo' 155 | else: 156 | if reply_message is None: 157 | msg = "Give ID or Reply To message of whom you want to remove from Sudo" 158 | else: 159 | user_id = reply_message.from_user.id 160 | if user_id in SUDO_USERS: 161 | if DB_URI is not None: 162 | msg = DbManger().db_rmsudo(user_id) 163 | else: 164 | SUDO_USERS.remove(user_id) 165 | msg = 'Demoted' 166 | else: 167 | msg = 'Not a Sudo' 168 | if DB_URI is None: 169 | with open('sudo_users.txt', 'a') as file: 170 | file.truncate(0) 171 | for i in SUDO_USERS: 172 | file.write(f'{i}\n') 173 | sendMessage(msg, context.bot, update) 174 | 175 | 176 | def sendAuthChats(update, context): 177 | user = sudo = '' 178 | user += '\n'.join(str(id) for id in AUTHORIZED_CHATS) 179 | sudo += '\n'.join(str(id) for id in SUDO_USERS) 180 | sendMessage(f'Authorized Chats\n{user}\nSudo Users\n{sudo}', context.bot, update) 181 | 182 | 183 | send_auth_handler = CommandHandler(command=BotCommands.AuthorizedUsersCommand, callback=sendAuthChats, 184 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 185 | authorize_handler = CommandHandler(command=BotCommands.AuthorizeCommand, callback=authorize, 186 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 187 | unauthorize_handler = CommandHandler(command=BotCommands.UnAuthorizeCommand, callback=unauthorize, 188 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 189 | addsudo_handler = CommandHandler(command=BotCommands.AddSudoCommand, callback=addSudo, 190 | filters=CustomFilters.owner_filter, run_async=True) 191 | removesudo_handler = CommandHandler(command=BotCommands.RmSudoCommand, callback=removeSudo, 192 | filters=CustomFilters.owner_filter, run_async=True) 193 | 194 | dispatcher.add_handler(send_auth_handler) 195 | dispatcher.add_handler(authorize_handler) 196 | dispatcher.add_handler(unauthorize_handler) 197 | dispatcher.add_handler(addsudo_handler) 198 | dispatcher.add_handler(removesudo_handler) 199 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/mega_downloader.py: -------------------------------------------------------------------------------- 1 | from bot import LOGGER, MEGA_API_KEY, download_dict_lock, download_dict, MEGA_EMAIL_ID, MEGA_PASSWORD 2 | import threading 3 | from mega import (MegaApi, MegaListener, MegaRequest, MegaTransfer, MegaError) 4 | from bot.helper.telegram_helper.message_utils import * 5 | import os 6 | from bot.helper.ext_utils.bot_utils import new_thread, get_mega_link_type, get_readable_file_size, check_limit 7 | from bot.helper.mirror_utils.status_utils.mega_download_status import MegaDownloadStatus 8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper 9 | from bot import MEGA_LIMIT, STOP_DUPLICATE, TAR_UNZIP_LIMIT 10 | import random 11 | import string 12 | 13 | class MegaDownloaderException(Exception): 14 | pass 15 | 16 | 17 | class MegaAppListener(MegaListener): 18 | _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN,MegaRequest.TYPE_FETCH_NODES) 19 | NO_ERROR = "no error" 20 | 21 | def __init__(self, continue_event: threading.Event, listener): 22 | self.continue_event = continue_event 23 | self.node = None 24 | self.public_node = None 25 | self.listener = listener 26 | self.uid = listener.uid 27 | self.__bytes_transferred = 0 28 | self.is_cancelled = False 29 | self.__speed = 0 30 | self.__name = '' 31 | self.__size = 0 32 | self.error = None 33 | self.gid = "" 34 | super(MegaAppListener, self).__init__() 35 | 36 | @property 37 | def speed(self): 38 | """Returns speed of the download in bytes/second""" 39 | return self.__speed 40 | 41 | @property 42 | def name(self): 43 | """Returns name of the download""" 44 | return self.__name 45 | 46 | def setValues(self, name, size, gid): 47 | self.__name = name 48 | self.__size = size 49 | self.gid = gid 50 | 51 | @property 52 | def size(self): 53 | """Size of download in bytes""" 54 | return self.__size 55 | 56 | @property 57 | def downloaded_bytes(self): 58 | return self.__bytes_transferred 59 | 60 | def onRequestStart(self, api, request): 61 | pass 62 | 63 | def onRequestFinish(self, api, request, error): 64 | if str(error).lower() != "no error": 65 | self.error = error.copy() 66 | return 67 | request_type = request.getType() 68 | if request_type == MegaRequest.TYPE_LOGIN: 69 | api.fetchNodes() 70 | elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE: 71 | self.public_node = request.getPublicMegaNode() 72 | elif request_type == MegaRequest.TYPE_FETCH_NODES: 73 | LOGGER.info("Fetching Root Node.") 74 | self.node = api.getRootNode() 75 | LOGGER.info(f"Node Name: {self.node.getName()}") 76 | if request_type not in self._NO_EVENT_ON or self.node and "cloud drive" not in self.node.getName().lower(): 77 | self.continue_event.set() 78 | 79 | def onRequestTemporaryError(self, api, request, error: MegaError): 80 | LOGGER.info(f'Mega Request error in {error}') 81 | if not self.is_cancelled: 82 | self.listener.onDownloadError("RequestTempError: " + error.toString()) 83 | self.is_cancelled = True 84 | self.error = error.toString() 85 | self.continue_event.set() 86 | 87 | def onTransferStart(self, api: MegaApi, transfer: MegaTransfer): 88 | pass 89 | 90 | def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer): 91 | if self.is_cancelled: 92 | api.cancelTransfer(transfer, None) 93 | self.__speed = transfer.getSpeed() 94 | self.__bytes_transferred = transfer.getTransferredBytes() 95 | 96 | def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error): 97 | try: 98 | if transfer.isFolderTransfer() and transfer.isFinished() or transfer.getFileName() == self.name and not self.is_cancelled: 99 | self.listener.onDownloadComplete() 100 | self.continue_event.set() 101 | except Exception as e: 102 | LOGGER.error(e) 103 | 104 | def onTransferTemporaryError(self, api, transfer, error): 105 | filen = transfer.getFileName() 106 | state = transfer.getState() 107 | errStr = error.toString() 108 | LOGGER.info(f'Mega download error in file {transfer} {filen}: {error}') 109 | 110 | if state == 1 or state == 4: 111 | # Sometimes MEGA (offical client) can't stream a node either and raises a temp failed error. 112 | # Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault] 113 | return 114 | 115 | self.error = errStr 116 | if not self.is_cancelled: 117 | self.is_cancelled = True 118 | self.listener.onDownloadError(f"TransferTempError: {errStr} ({filen})") 119 | 120 | def cancel_download(self): 121 | self.is_cancelled = True 122 | self.listener.onDownloadError("Download Canceled by user") 123 | 124 | 125 | class AsyncExecutor: 126 | 127 | def __init__(self): 128 | self.continue_event = threading.Event() 129 | 130 | def do(self, function, args): 131 | self.continue_event.clear() 132 | function(*args) 133 | self.continue_event.wait() 134 | 135 | listeners = [] 136 | 137 | class MegaDownloadHelper: 138 | def __init__(self): 139 | pass 140 | 141 | @staticmethod 142 | @new_thread 143 | def add_download(mega_link: str, path: str, listener): 144 | if MEGA_API_KEY is None: 145 | raise MegaDownloaderException('Mega API KEY not provided! Cannot mirror Mega links') 146 | executor = AsyncExecutor() 147 | api = MegaApi(MEGA_API_KEY, None, None, 'telegram-mirror-bot') 148 | global listeners 149 | mega_listener = MegaAppListener(executor.continue_event, listener) 150 | listeners.append(mega_listener) 151 | api.addListener(mega_listener) 152 | if MEGA_EMAIL_ID is not None and MEGA_PASSWORD is not None: 153 | executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD)) 154 | link_type = get_mega_link_type(mega_link) 155 | if link_type == "file": 156 | LOGGER.info("File. If your download didn't start, then check your link if it's available to download") 157 | executor.do(api.getPublicNode, (mega_link,)) 158 | node = mega_listener.public_node 159 | else: 160 | LOGGER.info("Folder. If your download didn't start, then check your link if it's available to download") 161 | folder_api = MegaApi(MEGA_API_KEY,None,None,'TgBot') 162 | folder_api.addListener(mega_listener) 163 | executor.do(folder_api.loginToFolder, (mega_link,)) 164 | node = folder_api.authorizeNode(mega_listener.node) 165 | if mega_listener.error is not None: 166 | return listener.onDownloadError(str(mega_listener.error)) 167 | if STOP_DUPLICATE: 168 | LOGGER.info(f'Checking File/Folder if already in Drive') 169 | mname = node.getName() 170 | if listener.isTar: 171 | mname = mname + ".tar" 172 | if listener.extract: 173 | smsg = None 174 | else: 175 | gd = GoogleDriveHelper() 176 | smsg, button = gd.drive_list(mname) 177 | if smsg: 178 | msg1 = "File/Folder is already available in Drive.\nHere are the search results:" 179 | sendMarkup(msg1, listener.bot, listener.update, button) 180 | executor.continue_event.set() 181 | return 182 | if MEGA_LIMIT is not None or TAR_UNZIP_LIMIT is not None: 183 | size = api.getSize(node) 184 | if listener.isTar or listener.extract: 185 | is_tar_ext = True 186 | msg3 = f'Failed, Tar/Unzip limit is {TAR_UNZIP_LIMIT}.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.' 187 | else: 188 | is_tar_ext = False 189 | msg3 = f'Failed, Mega limit is {MEGA_LIMIT}.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.' 190 | result = check_limit(size, MEGA_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext) 191 | if result: 192 | sendMessage(msg3, listener.bot, listener.update) 193 | executor.continue_event.set() 194 | return 195 | with download_dict_lock: 196 | download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener) 197 | os.makedirs(path) 198 | gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=8)) 199 | mega_listener.setValues(node.getName(), api.getSize(node), gid) 200 | sendStatusMessage(listener.update, listener.bot) 201 | executor.do(api.startDownload,(node,path)) 202 | -------------------------------------------------------------------------------- /bot/helper/ext_utils/bot_utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import threading 4 | import time 5 | import math 6 | 7 | from bot.helper.telegram_helper.bot_commands import BotCommands 8 | from bot import dispatcher, download_dict, download_dict_lock, STATUS_LIMIT 9 | from telegram import InlineKeyboardMarkup 10 | from telegram.ext import CallbackQueryHandler 11 | from bot.helper.telegram_helper import button_build, message_utils 12 | 13 | LOGGER = logging.getLogger(__name__) 14 | 15 | MAGNET_REGEX = r"magnet:\?xt=urn:btih:[a-zA-Z0-9]*" 16 | 17 | URL_REGEX = r"(?:(?:https?|ftp):\/\/)?[\w/\-?=%.]+\.[\w/\-?=%.]+" 18 | 19 | COUNT = 0 20 | PAGE_NO = 1 21 | 22 | 23 | class MirrorStatus: 24 | STATUS_UPLOADING = "Uploading...📤" 25 | STATUS_DOWNLOADING = "Downloading...📥" 26 | STATUS_CLONING = "Cloning...♻️" 27 | STATUS_WAITING = "Queued...📝" 28 | STATUS_FAILED = "Failed 🚫. Cleaning Download..." 29 | STATUS_PAUSE = "Paused...⭕️" 30 | STATUS_ARCHIVING = "Archiving...🔐" 31 | STATUS_EXTRACTING = "Extracting...📂" 32 | 33 | 34 | PROGRESS_MAX_SIZE = 100 // 8 35 | PROGRESS_INCOMPLETE = ['☆', '☆', '☆', '☆', '☆', '☆', '☆'] 36 | 37 | SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] 38 | 39 | 40 | class setInterval: 41 | def __init__(self, interval, action): 42 | self.interval = interval 43 | self.action = action 44 | self.stopEvent = threading.Event() 45 | thread = threading.Thread(target=self.__setInterval) 46 | thread.start() 47 | 48 | def __setInterval(self): 49 | nextTime = time.time() + self.interval 50 | while not self.stopEvent.wait(nextTime - time.time()): 51 | nextTime += self.interval 52 | self.action() 53 | 54 | def cancel(self): 55 | self.stopEvent.set() 56 | 57 | 58 | def get_readable_file_size(size_in_bytes) -> str: 59 | if size_in_bytes is None: 60 | return '0B' 61 | index = 0 62 | while size_in_bytes >= 1024: 63 | size_in_bytes /= 1024 64 | index += 1 65 | try: 66 | return f'{round(size_in_bytes, 2)}{SIZE_UNITS[index]}' 67 | except IndexError: 68 | return 'File too large' 69 | 70 | 71 | def getDownloadByGid(gid): 72 | with download_dict_lock: 73 | for dl in download_dict.values(): 74 | status = dl.status() 75 | if status != MirrorStatus.STATUS_ARCHIVING and status != MirrorStatus.STATUS_EXTRACTING: 76 | if dl.gid() == gid: 77 | return dl 78 | return None 79 | 80 | 81 | def getAllDownload(): 82 | with download_dict_lock: 83 | for dlDetails in download_dict.values(): 84 | if dlDetails.status() == MirrorStatus.STATUS_DOWNLOADING or dlDetails.status() == MirrorStatus.STATUS_WAITING: 85 | if dlDetails: 86 | return dlDetails 87 | return None 88 | 89 | 90 | def get_progress_bar_string(status): 91 | completed = status.processed_bytes() / 8 92 | total = status.size_raw() / 8 93 | if total == 0: 94 | p = 0 95 | else: 96 | p = round(completed * 100 / total) 97 | p = min(max(p, 0), 100) 98 | cFull = p // 8 99 | cPart = p % 8 - 1 100 | p_str = '★' * cFull 101 | if cPart >= 0: 102 | p_str += PROGRESS_INCOMPLETE[cPart] 103 | p_str += '☆' * (PROGRESS_MAX_SIZE - cFull) 104 | p_str = f"[{p_str}]" 105 | return p_str 106 | 107 | 108 | def get_readable_message(): 109 | with download_dict_lock: 110 | msg = "" 111 | INDEX = 0 112 | if STATUS_LIMIT is not None: 113 | dick_no = len(download_dict) 114 | global pages 115 | pages = math.ceil(dick_no/STATUS_LIMIT) 116 | if PAGE_NO > pages and pages != 0: 117 | globals()['COUNT'] -= STATUS_LIMIT 118 | globals()['PAGE_NO'] -= 1 119 | for download in list(download_dict.values()): 120 | INDEX += 1 121 | if INDEX > COUNT: 122 | msg += f"☞ 🗃️Filename : {download.name()}" 123 | msg += f"\n☞ 🚦Status : {download.status()}" 124 | if download.status() != MirrorStatus.STATUS_ARCHIVING and download.status() != MirrorStatus.STATUS_EXTRACTING: 125 | msg += f"\n{get_progress_bar_string(download)} {download.progress()}" 126 | if download.status() == MirrorStatus.STATUS_CLONING: 127 | msg += f"\n☞ 🚦Cloned: {get_readable_file_size(download.processed_bytes())} of {download.size()}" 128 | elif download.status() == MirrorStatus.STATUS_UPLOADING: 129 | msg += f"\n☞ 📤Uploaded : {get_readable_file_size(download.processed_bytes())} of {download.size()}" 130 | else: 131 | msg += f"\n☞ 📥Downloaded : {get_readable_file_size(download.processed_bytes())} of {download.size()}" 132 | msg += f"\n☞ ⚡️Speed : {download.speed()}" \ 133 | f"\n☞ ⏰ETA : {download.eta()} " 134 | # if hasattr(download, 'is_torrent'): 135 | try: 136 | msg += f"\n☞ Seeders : {download.aria_download().num_seeders}" \ 137 | f" | ☞ Peers : {download.aria_download().connections}" 138 | except: 139 | pass 140 | try: 141 | msg += f"\n☞ Seeders : {download.torrent_info().num_seeds}" \ 142 | f" | ☞ Leechers : {download.torrent_info().num_leechs}" 143 | except: 144 | pass 145 | msg += f"\n☞ To cancel ❌ : /{BotCommands.CancelMirror} {download.gid()}" 146 | msg += "\n\n" 147 | if STATUS_LIMIT is not None: 148 | if INDEX >= COUNT + STATUS_LIMIT: 149 | break 150 | if STATUS_LIMIT is not None: 151 | if INDEX > COUNT + STATUS_LIMIT: 152 | return None, None 153 | if dick_no > STATUS_LIMIT: 154 | msg += f"Page: {PAGE_NO}/{pages} | Tasks: {dick_no}\n" 155 | buttons = button_build.ButtonMaker() 156 | buttons.sbutton("Previous", "pre") 157 | buttons.sbutton("Next", "nex") 158 | button = InlineKeyboardMarkup(buttons.build_menu(2)) 159 | return msg, button 160 | return msg, "" 161 | 162 | 163 | def flip(update, context): 164 | query = update.callback_query 165 | query.answer() 166 | global COUNT, PAGE_NO 167 | if query.data == "nex": 168 | if PAGE_NO == pages: 169 | COUNT = 0 170 | PAGE_NO = 1 171 | else: 172 | COUNT += STATUS_LIMIT 173 | PAGE_NO += 1 174 | elif query.data == "pre": 175 | if PAGE_NO == 1: 176 | COUNT = STATUS_LIMIT * (pages - 1) 177 | PAGE_NO = pages 178 | else: 179 | COUNT -= STATUS_LIMIT 180 | PAGE_NO -= 1 181 | message_utils.update_all_messages() 182 | 183 | 184 | def check_limit(size, limit, tar_unzip_limit=None, is_tar_ext=False): 185 | LOGGER.info(f"Checking File/Folder Size...") 186 | if is_tar_ext and tar_unzip_limit is not None: 187 | limit = tar_unzip_limit 188 | if limit is not None: 189 | limit = limit.split(' ', maxsplit=1) 190 | limitint = int(limit[0]) 191 | if 'G' in limit[1] or 'g' in limit[1]: 192 | if size > limitint * 1024**3: 193 | return True 194 | elif 'T' in limit[1] or 't' in limit[1]: 195 | if size > limitint * 1024**4: 196 | return True 197 | 198 | def get_readable_time(seconds: int) -> str: 199 | result = '' 200 | (days, remainder) = divmod(seconds, 86400) 201 | days = int(days) 202 | if days != 0: 203 | result += f'{days}d' 204 | (hours, remainder) = divmod(remainder, 3600) 205 | hours = int(hours) 206 | if hours != 0: 207 | result += f'{hours}h' 208 | (minutes, seconds) = divmod(remainder, 60) 209 | minutes = int(minutes) 210 | if minutes != 0: 211 | result += f'{minutes}m' 212 | seconds = int(seconds) 213 | result += f'{seconds}s' 214 | return result 215 | 216 | 217 | def is_url(url: str): 218 | url = re.findall(URL_REGEX, url) 219 | if url: 220 | return True 221 | return False 222 | 223 | 224 | def is_gdrive_link(url: str): 225 | return "drive.google.com" in url 226 | 227 | 228 | def is_mega_link(url: str): 229 | return "mega.nz" in url or "mega.co.nz" in url 230 | 231 | 232 | def get_mega_link_type(url: str): 233 | if "folder" in url: 234 | return "folder" 235 | elif "file" in url: 236 | return "file" 237 | elif "/#F!" in url: 238 | return "folder" 239 | return "file" 240 | 241 | 242 | def is_magnet(url: str): 243 | magnet = re.findall(MAGNET_REGEX, url) 244 | if magnet: 245 | return True 246 | return False 247 | 248 | 249 | def new_thread(fn): 250 | """To use as decorator to make a function call threaded. 251 | Needs import 252 | from threading import Thread""" 253 | 254 | def wrapper(*args, **kwargs): 255 | thread = threading.Thread(target=fn, args=args, kwargs=kwargs) 256 | thread.start() 257 | return thread 258 | 259 | return wrapper 260 | 261 | 262 | next_handler = CallbackQueryHandler(flip, pattern="nex", run_async=True) 263 | previous_handler = CallbackQueryHandler(flip, pattern="pre", run_async=True) 264 | dispatcher.add_handler(next_handler) 265 | dispatcher.add_handler(previous_handler) 266 | -------------------------------------------------------------------------------- /bot/helper/mirror_utils/download_utils/qbit_downloader.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import string 4 | import time 5 | import logging 6 | import shutil 7 | 8 | import qbittorrentapi as qba 9 | from fnmatch import fnmatch 10 | from urllib.parse import urlparse, parse_qs 11 | from torrentool.api import Torrent 12 | from telegram import InlineKeyboardMarkup 13 | from telegram.ext import CallbackQueryHandler 14 | 15 | from bot import download_dict, download_dict_lock, BASE_URL, dispatcher, get_client, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT 16 | from bot.helper.mirror_utils.status_utils.qbit_download_status import QbDownloadStatus 17 | from bot.helper.telegram_helper.message_utils import * 18 | from bot.helper.ext_utils.bot_utils import setInterval, new_thread, MirrorStatus, getDownloadByGid, get_readable_file_size, check_limit 19 | from bot.helper.telegram_helper import button_build 20 | 21 | LOGGER = logging.getLogger(__name__) 22 | 23 | 24 | class qbittorrent: 25 | 26 | 27 | def __init__(self): 28 | self.update_interval = 2 29 | self.meta_time = time.time() 30 | self.stalled_time = time.time() 31 | self.checked = False 32 | 33 | @new_thread 34 | def add_torrent(self, link, dire, listener, qbitsel): 35 | self.client = get_client() 36 | self.listener = listener 37 | self.dire = dire 38 | self.qbitsel = qbitsel 39 | is_file = False 40 | count = 0 41 | pincode = "" 42 | try: 43 | if os.path.exists(link): 44 | is_file = True 45 | self.ext_hash = get_hash_file(link) 46 | else: 47 | self.ext_hash = get_hash_magnet(link) 48 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash) 49 | if len(tor_info) > 0: 50 | sendMessage("This torrent is already in list.", listener.bot, listener.update) 51 | return 52 | if is_file: 53 | op = self.client.torrents_add(torrent_files=[link], save_path=dire) 54 | os.remove(link) 55 | else: 56 | op = self.client.torrents_add(link, save_path=dire) 57 | if op.lower() == "ok.": 58 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash) 59 | if len(tor_info) == 0: 60 | while True: 61 | if time.time() - self.meta_time >= 300: 62 | sendMessage("The torrent was not added. report when u see this error", listener.bot, listener.update) 63 | return False 64 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash) 65 | if len(tor_info) > 0: 66 | break 67 | else: 68 | sendMessage("This is an unsupported/invalid link.", listener.bot, listener.update) 69 | return 70 | gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=14)) 71 | with download_dict_lock: 72 | download_dict[listener.uid] = QbDownloadStatus(gid, listener, self.ext_hash, self.client) 73 | tor_info = tor_info[0] 74 | LOGGER.info(f"QbitDownload started: {tor_info.name}") 75 | self.updater = setInterval(self.update_interval, self.update) 76 | if BASE_URL is not None and qbitsel: 77 | if is_file: 78 | self.client.torrents_pause(torrent_hashes=self.ext_hash) 79 | else: 80 | meta = sendMessage("Downloading Metadata...Please wait then you can select files or mirror torrent file if it have low seeders", listener.bot, listener.update) 81 | while True: 82 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash) 83 | if len(tor_info) == 0: 84 | deleteMessage(listener.bot, meta) 85 | return False 86 | try: 87 | tor_info = tor_info[0] 88 | if tor_info.state == "metaDL" or tor_info.state == "checkingResumeData": 89 | time.sleep(0.5) 90 | else: 91 | self.client.torrents_pause(torrent_hashes=self.ext_hash) 92 | deleteMessage(listener.bot, meta) 93 | break 94 | except: 95 | deleteMessage(listener.bot, meta) 96 | return False 97 | for n in str(self.ext_hash): 98 | if n.isdigit(): 99 | pincode += str(n) 100 | count += 1 101 | if count == 4: 102 | break 103 | URL = f"{BASE_URL}/slam/files/{self.ext_hash}" 104 | pindata = f"pin {gid} {pincode}" 105 | donedata = f"done {gid} {self.ext_hash}" 106 | buttons = button_build.ButtonMaker() 107 | buttons.buildbutton("Select Files", URL) 108 | buttons.sbutton("Pincode", pindata) 109 | buttons.sbutton("Done Selecting", donedata) 110 | QBBUTTONS = InlineKeyboardMarkup(buttons.build_menu(2)) 111 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading." 112 | sendMarkup(msg, listener.bot, listener.update, QBBUTTONS) 113 | else: 114 | sendStatusMessage(listener.update, listener.bot) 115 | except qba.UnsupportedMediaType415Error as e: 116 | LOGGER.error(str(e)) 117 | sendMessage("This is an unsupported/invalid link. {str(e)}", listener.bot, listener.update) 118 | except Exception as e: 119 | LOGGER.error(str(e)) 120 | sendMessage(str(e), listener.bot, listener.update) 121 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True) 122 | 123 | 124 | def update(self): 125 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash) 126 | if len(tor_info) == 0: 127 | self.updater.cancel() 128 | return 129 | try: 130 | tor_info = tor_info[0] 131 | if tor_info.state == "metaDL": 132 | self.stalled_time = time.time() 133 | if time.time() - self.meta_time >= 600: 134 | self.listener.onDownloadError("Dead Torrent!") 135 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True) 136 | self.updater.cancel() 137 | return 138 | elif tor_info.state == "downloading": 139 | self.stalled_time = time.time() 140 | if (TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None) and not self.checked: 141 | if self.listener.isTar or self.listener.extract: 142 | is_tar_ext = True 143 | mssg = f'Tar/Unzip limit is {TAR_UNZIP_LIMIT}' 144 | else: 145 | is_tar_ext = False 146 | mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}' 147 | size = tor_info.size 148 | result = check_limit(size, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext) 149 | self.checked = True 150 | if result: 151 | self.listener.onDownloadError(f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}") 152 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True) 153 | self.updater.cancel() 154 | return 155 | elif tor_info.state == "stalledDL": 156 | if time.time() - self.stalled_time >= 900: 157 | self.listener.onDownloadError("Dead Torrent!") 158 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True) 159 | self.updater.cancel() 160 | return 161 | elif tor_info.state == "error": 162 | self.listener.onDownloadError("Error. IDK why, report in support group") 163 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True) 164 | self.updater.cancel() 165 | return 166 | elif tor_info.state == "uploading" or tor_info.state.lower().endswith("up"): 167 | self.client.torrents_pause(torrent_hashes=self.ext_hash) 168 | if self.qbitsel: 169 | for dirpath, subdir, files in os.walk(f"{self.dire}", topdown=False): 170 | for file in files: 171 | if fnmatch(file, "*.!qB"): 172 | os.remove(os.path.join(dirpath, file)) 173 | for folder in subdir: 174 | if fnmatch(folder, ".unwanted"): 175 | shutil.rmtree(os.path.join(dirpath, folder)) 176 | if not os.listdir(dirpath): 177 | os.rmdir(dirpath) 178 | self.listener.onDownloadComplete() 179 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True) 180 | self.updater.cancel() 181 | except: 182 | self.updater.cancel() 183 | 184 | 185 | def get_confirm(update, context): 186 | query = update.callback_query 187 | user_id = query.from_user.id 188 | data = query.data 189 | data = data.split(" ") 190 | qdl = getDownloadByGid(data[1]) 191 | if qdl is not None: 192 | if user_id != qdl.listener.message.from_user.id: 193 | query.answer(text="Don't waste your time!", show_alert=True) 194 | return 195 | if data[0] == "pin": 196 | query.answer(text=data[2], show_alert=True) 197 | elif data[0] == "done": 198 | query.answer() 199 | qdl.client.torrents_resume(torrent_hashes=data[2]) 200 | sendStatusMessage(qdl.listener.update, qdl.listener.bot) 201 | query.message.delete() 202 | else: 203 | query.answer(text="This task has been cancelled!", show_alert=True) 204 | query.message.delete() 205 | 206 | 207 | def get_hash_magnet(mgt): 208 | if mgt.startswith('magnet:'): 209 | _, _, _, _, query, _ = urlparse(mgt) 210 | 211 | qs = parse_qs(query) 212 | v = qs.get('xt', None) 213 | 214 | if v == None or v == []: 215 | LOGGER.error('Invalid magnet URI: no "xt" query parameter.') 216 | return False 217 | 218 | v = v[0] 219 | if not v.startswith('urn:btih:'): 220 | LOGGER.error('Invalid magnet URI: "xt" value not valid for BitTorrent.') 221 | return False 222 | 223 | mgt = v[len('urn:btih:'):] 224 | return mgt.lower() 225 | 226 | 227 | def get_hash_file(path): 228 | tr = Torrent.from_file(path) 229 | mgt = tr.magnet_link 230 | return get_hash_magnet(mgt) 231 | 232 | 233 | pin_handler = CallbackQueryHandler(get_confirm, pattern="pin", run_async=True) 234 | done_handler = CallbackQueryHandler(get_confirm, pattern="done", run_async=True) 235 | dispatcher.add_handler(pin_handler) 236 | dispatcher.add_handler(done_handler) 237 | -------------------------------------------------------------------------------- /bot/modules/config.py: -------------------------------------------------------------------------------- 1 | from pyrogram import filters, types, emoji 2 | from bot.helper.telegram_helper.bot_commands import BotCommands 3 | from bot import app, OWNER_ID, bot 4 | from bot.helper import get_text, check_heroku 5 | from bot import * 6 | 7 | # Add Variable 8 | 9 | @app.on_message(filters.command(['setvar', f'setvar@{bot.username}']) & filters.user(OWNER_ID)) 10 | @check_heroku 11 | async def set_varr(client, message, app_): 12 | msg_ = await message.reply_text("`Please Wait!`") 13 | heroku_var = app_.config() 14 | _var = get_text(message) 15 | if not _var: 16 | await msg_.edit("`Here is Usage Syntax: /setvar KEY VALUE`", parse_mode="markdown") 17 | return 18 | if not " " in _var: 19 | await msg_.edit("`Variable VALUE needed !`", parse_mode="markdown") 20 | return 21 | var_ = _var.split(" ", 1) 22 | if len(var_) > 2: 23 | await msg_.edit("`Here is Usage Syntax: /setvar KEY VALUE`", parse_mode="markdown") 24 | return 25 | _varname, _varvalue = var_ 26 | await msg_.edit( 27 | f"`Variable {_varname} Added With Value {_varvalue}!`" \ 28 | f"\nYour Heroku app will restart. Be patient." 29 | ) 30 | heroku_var[_varname] = _varvalue 31 | 32 | # Delete Variable 33 | 34 | @app.on_message(filters.command(['delvar', f'delvar@{bot.username}']) & filters.user(OWNER_ID)) 35 | @check_heroku 36 | async def del_varr(client, message, app_): 37 | msg_ = await message.reply_text("`Please Wait!`", parse_mode="markdown") 38 | heroku_var = app_.config() 39 | _var = get_text(message) 40 | if not _var: 41 | await msg_.edit("`Give Var Name As Input!`", parse_mode="markdown") 42 | return 43 | if not _var in heroku_var: 44 | await msg_.edit("`This Var Doesn't Exists!`", parse_mode="markdown") 45 | return 46 | await msg_.edit( 47 | f"`Sucessfully Deleted {_var} Var!`" \ 48 | f"\nYour Heroku app will restart. Be patient.", 49 | parse_mode="markdown") 50 | del heroku_var[_var] 51 | 52 | @app.on_message(filters.command(['reboot', f'reboot@{bot.username}']) & filters.user(OWNER_ID)) 53 | @check_heroku 54 | async def gib_restart(client, message, hap): 55 | msg_ = await message.reply_text("[HEROKU] - Restarting") 56 | hap.restart() 57 | 58 | # CONFIG LIST # 59 | 60 | __header__='📕 **Page** **{}**\n\n' 61 | 62 | @app.on_message(filters.command([BotCommands.ConfigMenuCommand, f'{BotCommands.ConfigMenuCommand}@{bot.username}']) & filters.user(OWNER_ID)) 63 | async def config_menu(_, message): 64 | await message.reply( 65 | f"**Hello {message.from_user.mention}**,\n\n**If you want to add or set Variable in Heroku use** `/setvar`\n\n**If you want to delete Variable in Heroku use `/delvar`**\n\n**WARNING! Very Recommended to do this command in private since it's contain Bot info.**\n\n**Here's This is Slam-MirrorBot Current Configs**", 66 | reply_markup=types.InlineKeyboardMarkup( 67 | [[types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), types.InlineKeyboardButton(f"BOT CONFIG", callback_data='docs_1')]] 68 | ) 69 | ) 70 | 71 | @app.on_callback_query(filters.regex('^docs_') & filters.user(OWNER_ID)) 72 | async def config_button(_, query): 73 | data = query.data.split('_')[1] 74 | if data == '1': 75 | return await query.message.edit( 76 | __header__.format(data) 77 | + f"**[ Telegram Config ]**\n\n**BOT_TOKEN:** `{BOT_TOKEN}`\n\n**TELEGRAM_API:** `{TELEGRAM_API}`\n\n**TELEGRAM_HASH:** `{TELEGRAM_HASH}`\n\n**TELEGRAPH_TOKEN:** `{telegraph_token}`", 78 | reply_markup=types.InlineKeyboardMarkup( 79 | [ 80 | [ 81 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_10'), 82 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 83 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_2') 84 | ] 85 | ] 86 | ) 87 | ) 88 | elif data == '2': 89 | return await query.message.edit( 90 | __header__.format(data) 91 | + f"**[ Drive and Index Config ]**\n\n**GDRIVE_FOLDER_ID:** `{parent_id}`\n\n**IS_TEAM_DRIVE:** `{IS_TEAM_DRIVE}`\n\n**USE_SERVICE_ACCOUNTS:** `{USE_SERVICE_ACCOUNTS}`\n\n**INDEX_URL:** `{INDEX_URL}`", 92 | reply_markup=types.InlineKeyboardMarkup( 93 | [ 94 | [ 95 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_1'), 96 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 97 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_3') 98 | ] 99 | ] 100 | ) 101 | ) 102 | elif data == '3': 103 | return await query.message.edit( 104 | __header__.format(data) 105 | + f"**[ Mega and Uptobox Config ]**\n\n**MEGA_API_KEY:** `{MEGA_API_KEY}`\n\n**MEGA_EMAIL_ID:** `{MEGA_EMAIL_ID}`\n\n**MEGA_PASSWORD:** `{MEGA_PASSWORD}`\n\n**UPTOBOX_TOKEN:** `{UPTOBOX_TOKEN}`", 106 | reply_markup=types.InlineKeyboardMarkup( 107 | [ 108 | [ 109 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_2'), 110 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 111 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_4') 112 | ] 113 | ] 114 | ) 115 | ) 116 | elif data == '4': 117 | return await query.message.edit( 118 | __header__.format(data) 119 | + f"**[ Stop and Block Config ]**\n\n**STOP_DUPLICATE:** `{STOP_DUPLICATE}`\n\n**BLOCK_MEGA_FOLDER:** `{BLOCK_MEGA_FOLDER}`\n\n**BLOCK_MEGA_LINKS:** `{BLOCK_MEGA_LINKS}`", 120 | reply_markup=types.InlineKeyboardMarkup( 121 | [ 122 | [ 123 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_3'), 124 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 125 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_5') 126 | ] 127 | ] 128 | ) 129 | ) 130 | elif data == '5': 131 | return await query.message.edit( 132 | __header__.format(data) 133 | + f"**[ Limit Size Config ]**\n\n**TORRENT_DIRECT_LIMIT:** `{TORRENT_DIRECT_LIMIT}`\n\n**TAR_UNZIP_LIMIT:** `{TAR_UNZIP_LIMIT}`\n\n**CLONE_LIMIT:** `{CLONE_LIMIT}`\n\n**MEGA_LIMIT:** `{MEGA_LIMIT}`", 134 | reply_markup=types.InlineKeyboardMarkup( 135 | [ 136 | [ 137 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_4'), 138 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 139 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_6') 140 | ] 141 | ] 142 | ) 143 | ) 144 | elif data == '6': 145 | user = sudo = '' 146 | user += '\n'.join(str(id) for id in AUTHORIZED_CHATS) 147 | sudo += '\n'.join(str(id) for id in SUDO_USERS) 148 | return await query.message.edit( 149 | __header__.format(data) 150 | + f"**[ User ID Config ]**\n\n**OWNER_ID:** `{OWNER_ID}`\n\n**AUTHORIZED_CHATS:**\n`{user}`\n\n**SUDO_USERS:**\n`{sudo}`", 151 | reply_markup=types.InlineKeyboardMarkup( 152 | [ 153 | [ 154 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_5'), 155 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 156 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_7') 157 | ] 158 | ] 159 | ) 160 | ) 161 | elif data == '7': 162 | return await query.message.edit( 163 | __header__.format(data) 164 | + f"**[ Button Config ]**\n\n**BUTTON_FOUR_NAME:** `{BUTTON_FOUR_NAME}`\n\n**BUTTON_FOUR_URL:** `{BUTTON_FOUR_URL}`\n\n**BUTTON_FIVE_NAME:** `{BUTTON_FIVE_NAME}`\n\n**BUTTON_FIVE_URL:** `{BUTTON_FIVE_URL}`\n\n**BUTTON_SIX_NAME:** `{BUTTON_SIX_NAME}`\n\n**BUTTON_SIX_URL:** `{BUTTON_SIX_URL}`", 165 | reply_markup=types.InlineKeyboardMarkup( 166 | [ 167 | [ 168 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_6'), 169 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 170 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_8') 171 | ] 172 | ] 173 | ) 174 | ) 175 | elif data == '8': 176 | return await query.message.edit( 177 | __header__.format(data) 178 | + f"**[ Heroku Config ]**\n\n**HEROKU_API_KEY:** `{HEROKU_API_KEY}`\n\n**HEROKU_APP_NAME:** `{HEROKU_APP_NAME}`\n\n**[ Shortener Config ]**\n\n**SHORTENER:** `{SHORTENER}`\n\n**SHORTENER_API:** `{SHORTENER_API}`", 179 | reply_markup=types.InlineKeyboardMarkup( 180 | [ 181 | [ 182 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_7'), 183 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 184 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_9') 185 | ] 186 | ] 187 | ) 188 | ) 189 | elif data == '9': 190 | return await query.message.edit( 191 | __header__.format(data) 192 | + f"**[ Others Config ]**\n\n**VIEW_LINK:** `{VIEW_LINK}`\n\n**STATUS_LIMIT:** `{STATUS_LIMIT}`\n\n**DOWNLOAD_STATUS_UPDATE_INTERVAL:** `{DOWNLOAD_STATUS_UPDATE_INTERVAL}`\n\n**IGNORE_PENDING_REQUESTS:** `{IGNORE_PENDING_REQUESTS}`\n\n**AUTO_DELETE_MESSAGE_DURATION:** `{AUTO_DELETE_MESSAGE_DURATION}`\n\n**DOWNLOAD_DIR:** `{DOWNLOAD_DIR}`\n\n**DATABASE_URL:** `{DB_URI}`", 193 | reply_markup=types.InlineKeyboardMarkup( 194 | [ 195 | [ 196 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_8'), 197 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 198 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_10') 199 | ] 200 | ] 201 | ) 202 | ) 203 | elif data == '10': 204 | return await query.message.edit( 205 | __header__.format(data) 206 | + f"**[ qBittorrent Config ]**\n\n**IS_VPS:** `{IS_VPS}`\n\n**SERVER_PORT:** `{SERVER_PORT}`\n\n**BASE_URL_OF_BOT:** `{BASE_URL}`\n\n**[ Updater Config ]**\n\n**UPSTREAM_REPO:** `{UPSTREAM_REPO}`\n\n**UPSTREAM_BRANCH:** `{UPSTREAM_BRANCH}`\n\n**ACCOUNTS_ZIP_URL:** `{ACCOUNTS_ZIP_URL}`\n\n**TOKEN_PICKLE_URL:** `{TOKEN_PICKLE_URL}`", 207 | reply_markup=types.InlineKeyboardMarkup( 208 | [ 209 | [ 210 | types.InlineKeyboardButton(f"{emoji.LEFT_ARROW}", callback_data='docs_9'), 211 | types.InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data='docs_end'), 212 | types.InlineKeyboardButton(f"{emoji.RIGHT_ARROW}", callback_data='docs_1') 213 | ] 214 | ] 215 | ) 216 | ) 217 | elif data == 'end': 218 | return await query.message.delete() 219 | -------------------------------------------------------------------------------- /bot/__main__.py: -------------------------------------------------------------------------------- 1 | import shutil, psutil 2 | import signal 3 | import os 4 | import asyncio 5 | 6 | from pyrogram import idle 7 | from bot import app 8 | from sys import executable 9 | 10 | from telegram import ParseMode 11 | from telegram.ext import CommandHandler 12 | from wserver import start_server_async 13 | from bot import bot, IMAGE_URL, dispatcher, updater, botStartTime, IGNORE_PENDING_REQUESTS, IS_VPS, SERVER_PORT, OWNER_ID, AUTHORIZED_CHATS 14 | from bot.helper.ext_utils import fs_utils 15 | from bot.helper.telegram_helper.bot_commands import BotCommands 16 | from bot.helper.telegram_helper.message_utils import * 17 | from .helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time 18 | from .helper.telegram_helper.filters import CustomFilters 19 | from bot.helper.telegram_helper import button_build 20 | from .modules import authorize, list, cancel_mirror, mirror_status, mirror, clone, watch, shell, eval, torrent_search, delete, speedtest, count, config, updates 21 | 22 | 23 | def stats(update, context): 24 | currentTime = get_readable_time(time.time() - botStartTime) 25 | total, used, free = shutil.disk_usage('.') 26 | total = get_readable_file_size(total) 27 | used = get_readable_file_size(used) 28 | free = get_readable_file_size(free) 29 | sent = get_readable_file_size(psutil.net_io_counters().bytes_sent) 30 | recv = get_readable_file_size(psutil.net_io_counters().bytes_recv) 31 | cpuUsage = psutil.cpu_percent(interval=0.5) 32 | memory = psutil.virtual_memory().percent 33 | disk = psutil.disk_usage('/').percent 34 | stats = f'╭──「⭕️ BOT STATISTICS ⭕️」\n' \ 35 | f'\n' \ 36 | f'├ ⏰ Bot Uptime : {currentTime}\n' \ 37 | f'├ 💾 Total Disk Space : {total}\n' \ 38 | f'├ 📀 Total Used Space : {used}\n' \ 39 | f'├ 💿 Total Free Space : {free}\n' \ 40 | f'├ 🔼 Total Upload : {sent}\n' \ 41 | f'├ 🔽 Total Download : {recv}\n' \ 42 | f'├ 🖥️ CPU : {cpuUsage}%\n' \ 43 | f'├ 🎮 RAM : {memory}%\n' \ 44 | f'├ 💽 DISK : {disk}%\n' \ 45 | f'\n' \ 46 | f'╰──「 🚸 @HelpAutomatted_Bot 🚸 」' 47 | update.effective_message.reply_photo(IMAGE_URL, stats, parse_mode=ParseMode.HTML) 48 | 49 | 50 | def start(update, context): 51 | start_string = f''' 52 | This bot can mirror all your links to Google Drive! 53 | Type /{BotCommands.HelpCommand} to get a list of available commands 54 | ''' 55 | buttons = button_build.ButtonMaker() 56 | buttons.buildbutton("Web", "https://www.caduceus.ml/") 57 | buttons.buildbutton("Channel", "https://t.me/AT_BOTs") 58 | reply_markup = InlineKeyboardMarkup(buttons.build_menu(2)) 59 | LOGGER.info('UID: {} - UN: {} - MSG: {}'.format(update.message.chat.id, update.message.chat.username, update.message.text)) 60 | uptime = get_readable_time((time.time() - botStartTime)) 61 | if CustomFilters.authorized_user(update) or CustomFilters.authorized_chat(update): 62 | if update.message.chat.type == "private" : 63 | sendMessage(f"Hey I'm Alive 🙂\nSince: {uptime}", context.bot, update) 64 | else : 65 | sendMarkup(IMAGE_URL, start_string, context.bot, update, reply_markup) 66 | else : 67 | sendMarkup(f"Oops! You are not allowed to use me..", context.bot, update, reply_markup) 68 | 69 | 70 | def restart(update, context): 71 | restart_message = sendMessage("Restarting, Please wait!", context.bot, update) 72 | # Save restart message object in order to reply to it after restarting 73 | with open(".restartmsg", "w") as f: 74 | f.truncate(0) 75 | f.write(f"{restart_message.chat.id}\n{restart_message.message_id}\n") 76 | fs_utils.clean_all() 77 | os.execl(executable, executable, "-m", "bot") 78 | 79 | 80 | def ping(update, context): 81 | start_time = int(round(time.time() * 1000)) 82 | reply = sendMessage("Starting Ping", context.bot, update) 83 | end_time = int(round(time.time() * 1000)) 84 | editMessage(f'{end_time - start_time} ms', reply) 85 | 86 | 87 | def log(update, context): 88 | sendLogFile(context.bot, update) 89 | 90 | 91 | def bot_help(update, context): 92 | help_string_adm = f''' 93 | /{BotCommands.HelpCommand}: To get this message 94 | /{BotCommands.MirrorCommand} [download_url][magnet_link]: Start mirroring the link to Google Drive. Use /{BotCommands.MirrorCommand} qb to mirror with qBittorrent, and use /{BotCommands.MirrorCommand} qbs to select files before downloading 95 | /{BotCommands.TarMirrorCommand} [download_url][magnet_link]: Start mirroring and upload the archived (.tar) version of the download 96 | /{BotCommands.ZipMirrorCommand} [download_url][magnet_link]: Start mirroring and upload the archived (.zip) version of the download 97 | /{BotCommands.UnzipMirrorCommand} [download_url][magnet_link]: Starts mirroring and if downloaded file is any archive, extracts it to Google Drive 98 | /{BotCommands.CloneCommand} [drive_url]: Copy file/folder to Google Drive 99 | /{BotCommands.CountCommand} [drive_url]: Count file/folder of Google Drive Links 100 | /{BotCommands.DeleteCommand} [drive_url]: Delete file from Google Drive (Only Owner & Sudo) 101 | /{BotCommands.WatchCommand} [youtube-dl supported link]: Mirror through youtube-dl. Click /{BotCommands.WatchCommand} for more help 102 | /{BotCommands.TarWatchCommand} [youtube-dl supported link]: Mirror through youtube-dl and tar before uploading 103 | /{BotCommands.CancelMirror}: Reply to the message by which the download was initiated and that download will be cancelled 104 | /{BotCommands.CancelAllCommand}: Cancel all running tasks 105 | /{BotCommands.ListCommand} [search term]: Searches the search term in the Google Drive, If found replies with the link 106 | /{BotCommands.StatusCommand}: Shows a status of all the downloads 107 | /{BotCommands.StatsCommand}: Show Stats of the machine the bot is hosted on 108 | /{BotCommands.PingCommand}: Check how long it takes to Ping the Bot 109 | /{BotCommands.AuthorizeCommand}: Authorize a chat or a user to use the bot (Can only be invoked by Owner & Sudo of the bot) 110 | /{BotCommands.UnAuthorizeCommand}: Unauthorize a chat or a user to use the bot (Can only be invoked by Owner & Sudo of the bot) 111 | /{BotCommands.AuthorizedUsersCommand}: Show authorized users (Only Owner & Sudo) 112 | /{BotCommands.AddSudoCommand}: Add sudo user (Only Owner) 113 | /{BotCommands.RmSudoCommand}: Remove sudo users (Only Owner) 114 | /{BotCommands.RestartCommand}: Restart the bot 115 | /{BotCommands.LogCommand}: Get a log file of the bot. Handy for getting crash reports 116 | /{BotCommands.ConfigMenuCommand}: Get Info Menu about bot config (Owner Only) 117 | /{BotCommands.UpdateCommand}: Update Bot from Upstream Repo (Owner Only) 118 | /{BotCommands.SpeedCommand}: Check Internet Speed of the Host 119 | /{BotCommands.ShellCommand}: Run commands in Shell (Terminal) 120 | /{BotCommands.ExecHelpCommand}: Get help for Executor module (Only Owner) 121 | /{BotCommands.GDTOTCommand}: Send GDTOT link along with command 122 | ''' 123 | 124 | help_string = f''' 125 | /{BotCommands.HelpCommand}: To get this message 126 | /{BotCommands.MirrorCommand} [download_url][magnet_link]: Start mirroring the link to Google Drive. Use /{BotCommands.MirrorCommand} qb to mirror with qBittorrent, and use /{BotCommands.MirrorCommand} qbs to select files before downloading 127 | /{BotCommands.TarMirrorCommand} [download_url][magnet_link]: Start mirroring and upload the archived (.tar) version of the download 128 | /{BotCommands.ZipMirrorCommand} [download_url][magnet_link]: Start mirroring and upload the archived (.zip) version of the download 129 | /{BotCommands.UnzipMirrorCommand} [download_url][magnet_link]: Starts mirroring and if downloaded file is any archive, extracts it to Google Drive 130 | /{BotCommands.CloneCommand} [drive_url]: Copy file/folder to Google Drive 131 | /{BotCommands.CountCommand} [drive_url]: Count file/folder of Google Drive Links 132 | /{BotCommands.WatchCommand} [youtube-dl supported link]: Mirror through youtube-dl. Click /{BotCommands.WatchCommand} for more help 133 | /{BotCommands.TarWatchCommand} [youtube-dl supported link]: Mirror through youtube-dl and tar before uploading 134 | /{BotCommands.CancelMirror}: Reply to the message by which the download was initiated and that download will be cancelled 135 | /{BotCommands.ListCommand} [search term]: Searches the search term in the Google Drive, If found replies with the link 136 | /{BotCommands.StatusCommand}: Shows a status of all the downloads 137 | /{BotCommands.StatsCommand}: Show Stats of the machine the bot is hosted on 138 | /{BotCommands.PingCommand}: Check how long it takes to Ping the Bot 139 | /{BotCommands.GDTOTCommand}: Send GDTOT link along with command 140 | ''' 141 | 142 | if CustomFilters.sudo_user(update) or CustomFilters.owner_filter(update): 143 | sendMessage(help_string_adm, context.bot, update) 144 | else: 145 | sendMessage(help_string, context.bot, update) 146 | 147 | 148 | botcmds = [ 149 | (f'{BotCommands.HelpCommand}','Get Detailed Help'), 150 | (f'{BotCommands.MirrorCommand}', 'Start Mirroring'), 151 | (f'{BotCommands.TarMirrorCommand}','Start mirroring and upload as .tar'), 152 | (f'{BotCommands.UnzipMirrorCommand}','Extract files'), 153 | (f'{BotCommands.ZipMirrorCommand}','Start mirroring and upload as .zip'), 154 | (f'{BotCommands.CloneCommand}','Copy file/folder to Drive'), 155 | (f'{BotCommands.CountCommand}','Count file/folder of Drive link'), 156 | (f'{BotCommands.DeleteCommand}','Delete file from Drive'), 157 | (f'{BotCommands.WatchCommand}','Mirror Youtube-dl support link'), 158 | (f'{BotCommands.TarWatchCommand}','Mirror Youtube playlist link as .tar'), 159 | (f'{BotCommands.CancelMirror}','Cancel a task'), 160 | (f'{BotCommands.CancelAllCommand}','Cancel all tasks'), 161 | (f'{BotCommands.ListCommand}','Searches files in Drive'), 162 | (f'{BotCommands.StatusCommand}','Get Mirror Status message'), 163 | (f'{BotCommands.StatsCommand}','Bot Usage Stats'), 164 | (f'{BotCommands.PingCommand}','Ping the Bot'), 165 | (f'{BotCommands.RestartCommand}','Restart the bot [owner/sudo only]'), 166 | (f'{BotCommands.LogCommand}','Get the Bot Log [owner/sudo only]'), 167 | (f'{BotCommands.GDTOTCommand}: Send GDTOT link along with command') 168 | ] 169 | 170 | 171 | def main(): 172 | fs_utils.start_cleanup() 173 | if IS_VPS: 174 | asyncio.get_event_loop().run_until_complete(start_server_async(PORT)) 175 | # Check if the bot is restarting 176 | if os.path.isfile(".restartmsg"): 177 | with open(".restartmsg") as f: 178 | chat_id, msg_id = map(int, f) 179 | bot.edit_message_text("Restarted successfully!", chat_id, msg_id) 180 | os.remove(".restartmsg") 181 | elif OWNER_ID: 182 | try: 183 | text = "Bot Restarted!" 184 | bot.sendMessage(chat_id=OWNER_ID, text=text, parse_mode=ParseMode.HTML) 185 | if AUTHORIZED_CHATS: 186 | for i in AUTHORIZED_CHATS: 187 | bot.sendMessage(chat_id=i, text=text, parse_mode=ParseMode.HTML) 188 | except Exception as e: 189 | LOGGER.warning(e) 190 | 191 | bot.set_my_commands(botcmds) 192 | 193 | start_handler = CommandHandler(BotCommands.StartCommand, start, run_async=True) 194 | ping_handler = CommandHandler(BotCommands.PingCommand, ping, 195 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 196 | restart_handler = CommandHandler(BotCommands.RestartCommand, restart, 197 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 198 | help_handler = CommandHandler(BotCommands.HelpCommand, 199 | bot_help, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 200 | stats_handler = CommandHandler(BotCommands.StatsCommand, 201 | stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) 202 | log_handler = CommandHandler(BotCommands.LogCommand, log, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) 203 | dispatcher.add_handler(start_handler) 204 | dispatcher.add_handler(ping_handler) 205 | dispatcher.add_handler(restart_handler) 206 | dispatcher.add_handler(help_handler) 207 | dispatcher.add_handler(stats_handler) 208 | dispatcher.add_handler(log_handler) 209 | updater.start_polling(drop_pending_updates=IGNORE_PENDING_REQUESTS) 210 | LOGGER.info("Bot Started!") 211 | signal.signal(signal.SIGINT, fs_utils.exit_clean_up) 212 | 213 | app.start() 214 | main() 215 | idle() 216 | -------------------------------------------------------------------------------- /bot/modules/torrent_search.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import html 4 | import asyncio 5 | import aiohttp 6 | import json 7 | import feedparser 8 | import requests 9 | import itertools 10 | 11 | from telegram.ext import CommandHandler 12 | from telegram import ParseMode 13 | 14 | from urllib.parse import quote as urlencode, urlsplit 15 | 16 | from pyrogram import Client, filters, emoji 17 | from pyrogram.parser import html as pyrogram_html 18 | from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton 19 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler 20 | 21 | from bot import app, dispatcher, bot 22 | from bot.helper import custom_filters 23 | from bot.helper.telegram_helper.bot_commands import BotCommands 24 | from bot.helper.telegram_helper.filters import CustomFilters 25 | from bot.helper.telegram_helper.message_utils import sendMessage 26 | 27 | search_lock = asyncio.Lock() 28 | search_info = {False: dict(), True: dict()} 29 | 30 | async def return_search(query, page=1, sukebei=False): 31 | page -= 1 32 | query = query.lower().strip() 33 | used_search_info = search_info[sukebei] 34 | async with search_lock: 35 | results, get_time = used_search_info.get(query, (None, 0)) 36 | if (time.time() - get_time) > 3600: 37 | results = [] 38 | async with aiohttp.ClientSession() as session: 39 | async with session.get(f'https://{"sukebei." if sukebei else ""}nyaa.si/?page=rss&q={urlencode(query)}') as resp: 40 | d = feedparser.parse(await resp.text()) 41 | text = '' 42 | a = 0 43 | parser = pyrogram_html.HTML(None) 44 | for i in sorted(d['entries'], key=lambda i: int(i['nyaa_seeders']), reverse=True): 45 | if i['nyaa_size'].startswith('0'): 46 | continue 47 | if not int(i['nyaa_seeders']): 48 | break 49 | link = i['link'] 50 | splitted = urlsplit(link) 51 | if splitted.scheme == 'magnet' and splitted.query: 52 | link = f'{link}' 53 | newtext = f'''{a + 1}. {html.escape(i["title"])} 54 | Link: {link} 55 | Size: {i["nyaa_size"]} 56 | Seeders: {i["nyaa_seeders"]} 57 | Leechers: {i["nyaa_leechers"]} 58 | Category: {i["nyaa_category"]}\n\n''' 59 | futtext = text + newtext 60 | if (a and not a % 10) or len((await parser.parse(futtext))['message']) > 4096: 61 | results.append(text) 62 | futtext = newtext 63 | text = futtext 64 | a += 1 65 | results.append(text) 66 | ttl = time.time() 67 | used_search_info[query] = results, ttl 68 | try: 69 | return results[page], len(results), ttl 70 | except IndexError: 71 | return '', len(results), ttl 72 | 73 | message_info = dict() 74 | ignore = set() 75 | 76 | @app.on_message(filters.command(['nyaasi', f'nyaasi@{bot.username}'])) 77 | async def nyaa_search(client, message): 78 | text = message.text.split(' ') 79 | text.pop(0) 80 | query = ' '.join(text) 81 | await init_search(client, message, query, False) 82 | 83 | @app.on_message(filters.command(['sukebei', f'sukebei@{bot.username}'])) 84 | async def nyaa_search_sukebei(client, message): 85 | text = message.text.split(' ') 86 | text.pop(0) 87 | query = ' '.join(text) 88 | await init_search(client, message, query, True) 89 | 90 | async def init_search(client, message, query, sukebei): 91 | result, pages, ttl = await return_search(query, sukebei=sukebei) 92 | if not result: 93 | await message.reply_text('No results found') 94 | else: 95 | buttons = [InlineKeyboardButton(f'1/{pages}', 'nyaa_nop'), InlineKeyboardButton(f'Next', 'nyaa_next')] 96 | if pages == 1: 97 | buttons.pop() 98 | reply = await message.reply_text(result, reply_markup=InlineKeyboardMarkup([ 99 | buttons 100 | ])) 101 | message_info[(reply.chat.id, reply.message_id)] = message.from_user.id, ttl, query, 1, pages, sukebei 102 | 103 | @app.on_callback_query(custom_filters.callback_data('nyaa_nop')) 104 | async def nyaa_nop(client, callback_query): 105 | await callback_query.answer(cache_time=3600) 106 | 107 | callback_lock = asyncio.Lock() 108 | @app.on_callback_query(custom_filters.callback_data(['nyaa_back', 'nyaa_next'])) 109 | async def nyaa_callback(client, callback_query): 110 | message = callback_query.message 111 | message_identifier = (message.chat.id, message.message_id) 112 | data = callback_query.data 113 | async with callback_lock: 114 | if message_identifier in ignore: 115 | await callback_query.answer() 116 | return 117 | user_id, ttl, query, current_page, pages, sukebei = message_info.get(message_identifier, (None, 0, None, 0, 0, None)) 118 | og_current_page = current_page 119 | if data == 'nyaa_back': 120 | current_page -= 1 121 | elif data == 'nyaa_next': 122 | current_page += 1 123 | if current_page < 1: 124 | current_page = 1 125 | elif current_page > pages: 126 | current_page = pages 127 | ttl_ended = (time.time() - ttl) > 3600 128 | if ttl_ended: 129 | text = getattr(message.text, 'html', 'Search expired') 130 | else: 131 | if callback_query.from_user.id != user_id: 132 | await callback_query.answer('...no', cache_time=3600) 133 | return 134 | text, pages, ttl = await return_search(query, current_page, sukebei) 135 | buttons = [InlineKeyboardButton(f'Prev', 'nyaa_back'), InlineKeyboardButton(f'{current_page}/{pages}', 'nyaa_nop'), InlineKeyboardButton(f'Next', 'nyaa_next')] 136 | if ttl_ended: 137 | buttons = [InlineKeyboardButton('Search Expired', 'nyaa_nop')] 138 | else: 139 | if current_page == 1: 140 | buttons.pop(0) 141 | if current_page == pages: 142 | buttons.pop() 143 | if ttl_ended or current_page != og_current_page: 144 | await callback_query.edit_message_text(text, reply_markup=InlineKeyboardMarkup([ 145 | buttons 146 | ])) 147 | message_info[message_identifier] = user_id, ttl, query, current_page, pages, sukebei 148 | if ttl_ended: 149 | ignore.add(message_identifier) 150 | await callback_query.answer() 151 | 152 | 153 | class TorrentSearch: 154 | index = 0 155 | query = None 156 | message = None 157 | response = None 158 | response_range = None 159 | 160 | RESULT_LIMIT = 4 161 | RESULT_STR = None 162 | 163 | def __init__(self, command: str, source: str, result_str: str): 164 | self.command = command 165 | self.source = source.rstrip('/') 166 | self.RESULT_STR = result_str 167 | 168 | app.add_handler(MessageHandler(self.find, filters.command([command, f'{self.command}@{bot.username}']))) 169 | app.add_handler(CallbackQueryHandler(self.previous, filters.regex(f"{self.command}_previous"))) 170 | app.add_handler(CallbackQueryHandler(self.delete, filters.regex(f"{self.command}_delete"))) 171 | app.add_handler(CallbackQueryHandler(self.next, filters.regex(f"{self.command}_next"))) 172 | 173 | @staticmethod 174 | def format_magnet(string: str): 175 | if not string: 176 | return "" 177 | return string.split('&tr', 1)[0] 178 | 179 | def get_formatted_string(self, values): 180 | string = self.RESULT_STR.format(**values) 181 | extra = "" 182 | if "Files" in values: 183 | tmp_str = "➲[{Quality} - {Type} ({Size})]({Torrent}): `{magnet}`" 184 | extra += "\n".join( 185 | tmp_str.format(**f, magnet=self.format_magnet(f['Magnet'])) 186 | for f in values['Files'] 187 | ) 188 | else: 189 | magnet = values.get('magnet', values.get('Magnet')) # Avoid updating source dict 190 | if magnet: 191 | extra += f"➲Magnet: `{self.format_magnet(magnet)}`" 192 | if (extra): 193 | string += "\n" + extra 194 | return string 195 | 196 | async def update_message(self): 197 | prevBtn = InlineKeyboardButton(f"Prev", callback_data=f"{self.command}_previous") 198 | delBtn = InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data=f"{self.command}_delete") 199 | nextBtn = InlineKeyboardButton(f"Next", callback_data=f"{self.command}_next") 200 | 201 | inline = [] 202 | if (self.index != 0): 203 | inline.append(prevBtn) 204 | inline.append(delBtn) 205 | if (self.index != len(self.response_range) - 1): 206 | inline.append(nextBtn) 207 | 208 | res_lim = min(self.RESULT_LIMIT, len(self.response) - self.RESULT_LIMIT*self.index) 209 | result = f"**Page - {self.index+1}**\n\n" 210 | result += "\n\n=======================\n\n".join( 211 | self.get_formatted_string(self.response[self.response_range[self.index]+i]) 212 | for i in range(res_lim) 213 | ) 214 | 215 | await self.message.edit( 216 | result, 217 | reply_markup=InlineKeyboardMarkup([inline]), 218 | parse_mode="markdown", 219 | ) 220 | 221 | async def find(self, client, message): 222 | if len(message.command) < 2: 223 | await message.reply_text(f"Usage: /{self.command} query") 224 | return 225 | 226 | query = urlencode(message.text.split(None, 1)[1]) 227 | self.message = await message.reply_text("Searching") 228 | try: 229 | async with aiohttp.ClientSession() as session: 230 | async with session.get(f"{self.source}/{query}") as resp: 231 | if (resp.status != 200): 232 | raise Exception('unsuccessful request') 233 | result = await resp.json() 234 | if (result and isinstance(result[0], list)): 235 | result = list(itertools.chain(*result)) 236 | self.response = result 237 | self.response_range = range(0, len(self.response), self.RESULT_LIMIT) 238 | except: 239 | await self.message.edit("No Results Found.") 240 | return 241 | await self.update_message() 242 | 243 | async def delete(self, client, message): 244 | index = 0 245 | query = None 246 | message = None 247 | response = None 248 | response_range = None 249 | await self.message.delete() 250 | 251 | async def previous(self, client, message): 252 | self.index -= 1 253 | await self.update_message() 254 | 255 | async def next(self, client, message): 256 | self.index += 1 257 | await self.update_message() 258 | 259 | RESULT_STR_1337 = ( 260 | "➲Name: `{Name}`\n" 261 | "➲Size: {Size}\n" 262 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}" 263 | ) 264 | RESULT_STR_PIRATEBAY = ( 265 | "➲Name: `{Name}`\n" 266 | "➲Size: {Size}\n" 267 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}" 268 | ) 269 | RESULT_STR_TGX = ( 270 | "➲Name: `{Name}`\n" 271 | "➲Size: {Size}\n" 272 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}" 273 | ) 274 | RESULT_STR_YTS = ( 275 | "➲Name: `{Name}`\n" 276 | "➲Released on: {ReleasedDate}\n" 277 | "➲Genre: {Genre}\n" 278 | "➲Rating: {Rating}\n" 279 | "➲Likes: {Likes}\n" 280 | "➲Duration: {Runtime}\n" 281 | "➲Language: {Language}" 282 | ) 283 | RESULT_STR_EZTV = ( 284 | "➲Name: `{Name}`\n" 285 | "➲Size: {Size}\n" 286 | "➲Seeders: {Seeders}" 287 | ) 288 | RESULT_STR_TORLOCK = ( 289 | "➲Name: `{Name}`\n" 290 | "➲Size: {Size}\n" 291 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}" 292 | ) 293 | RESULT_STR_RARBG = ( 294 | "➲Name: `{Name}`\n" 295 | "➲Size: {Size}\n" 296 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}" 297 | ) 298 | RESULT_STR_ALL = ( 299 | "➲Name: `{Name}`\n" 300 | "➲Size: {Size}\n" 301 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}" 302 | ) 303 | 304 | torrents_dict = { 305 | '1337x': {'source': "https://torrenter-api.herokuapp.com/api/1337x/", 'result_str': RESULT_STR_1337}, 306 | 'piratebay': {'source': "https://torrenter-api.herokuapp.com/api/piratebay/", 'result_str': RESULT_STR_PIRATEBAY}, 307 | 'tgx': {'source': "https://torrenter-api.herokuapp.com/api/tgx/", 'result_str': RESULT_STR_TGX}, 308 | 'yts': {'source': "https://torrenter-api.herokuapp.com/api/yts/", 'result_str': RESULT_STR_YTS}, 309 | 'eztv': {'source': "https://torrenter-api.herokuapp.com/api/eztv/", 'result_str': RESULT_STR_EZTV}, 310 | 'torlock': {'source': "https://torrenter-api.herokuapp.com/api/torlock/", 'result_str': RESULT_STR_TORLOCK}, 311 | 'rarbg': {'source': "https://torrenter-api.herokuapp.com/api/rarbg/", 'result_str': RESULT_STR_RARBG}, 312 | 'ts': {'source': "https://torrenter-api.herokuapp.com/api/all/", 'result_str': RESULT_STR_ALL} 313 | } 314 | 315 | torrent_handlers = [] 316 | for command, value in torrents_dict.items(): 317 | torrent_handlers.append(TorrentSearch(command, value['source'], value['result_str'])) 318 | 319 | def searchhelp(update, context): 320 | help_string = ''' 321 | Torrent Search 322 | • /nyaasi [search query] 323 | • /sukebei [search query] 324 | • /1337x [search query] 325 | • /piratebay [search query] 326 | • /tgx [search query] 327 | • /yts [search query] 328 | • /eztv [search query] 329 | • /torlock [search query] 330 | • /rarbg [search query] 331 | • /ts [search query] 332 | ''' 333 | sendMessage(help_string, context.bot, update) 334 | 335 | 336 | SEARCHHELP_HANDLER = CommandHandler(BotCommands.TsHelpCommand, searchhelp, filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user) & CustomFilters.mirror_owner_filter, run_async=True) 337 | dispatcher.add_handler(SEARCHHELP_HANDLER) 338 | -------------------------------------------------------------------------------- /bot/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import threading 4 | import time 5 | import random 6 | import string 7 | import subprocess 8 | import requests 9 | 10 | import aria2p 11 | import qbittorrentapi as qba 12 | import telegram.ext as tg 13 | from dotenv import load_dotenv 14 | from pyrogram import Client 15 | from telegraph import Telegraph 16 | 17 | import psycopg2 18 | from psycopg2 import Error 19 | 20 | import socket 21 | import faulthandler 22 | faulthandler.enable() 23 | 24 | socket.setdefaulttimeout(600) 25 | 26 | botStartTime = time.time() 27 | if os.path.exists('log.txt'): 28 | with open('log.txt', 'r+') as f: 29 | f.truncate(0) 30 | 31 | logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 32 | handlers=[logging.FileHandler('log.txt'), logging.StreamHandler()], 33 | level=logging.INFO) 34 | 35 | LOGGER = logging.getLogger(__name__) 36 | 37 | 38 | CONFIG_FILE_URL = os.environ.get('CONFIG_FILE_URL', None) 39 | if CONFIG_FILE_URL is not None: 40 | res = requests.get(CONFIG_FILE_URL) 41 | if res.status_code == 200: 42 | with open('config.env', 'wb+') as f: 43 | f.write(res.content) 44 | f.close() 45 | else: 46 | logging.error(res.status_code) 47 | 48 | load_dotenv('config.env') 49 | 50 | 51 | SERVER_PORT = os.environ.get('SERVER_PORT', None) 52 | PORT = os.environ.get('PORT', SERVER_PORT) 53 | web = subprocess.Popen([f"gunicorn wserver:start_server --bind 0.0.0.0:{PORT} --worker-class aiohttp.GunicornWebWorker"], shell=True) 54 | time.sleep(1) 55 | alive = subprocess.Popen(["python3", "alive.py"]) 56 | subprocess.run(["mkdir", "-p", "qBittorrent/config"]) 57 | subprocess.run(["cp", "qBittorrent.conf", "qBittorrent/config/qBittorrent.conf"]) 58 | subprocess.run(["qbittorrent-nox", "-d", "--profile=."]) 59 | Interval = [] 60 | DRIVES_NAMES = [] 61 | DRIVES_IDS = [] 62 | INDEX_URL = [] 63 | 64 | 65 | def getConfig(name: str): 66 | return os.environ[name] 67 | 68 | def mktable(): 69 | try: 70 | conn = psycopg2.connect(DB_URI) 71 | cur = conn.cursor() 72 | sql = "CREATE TABLE users (uid bigint, sudo boolean DEFAULT FALSE);" 73 | cur.execute(sql) 74 | conn.commit() 75 | LOGGER.info("Table Created!") 76 | except Error as e: 77 | LOGGER.error(e) 78 | exit(1) 79 | 80 | try: 81 | if bool(getConfig('_____REMOVE_THIS_LINE_____')): 82 | logging.error('The README.md file there to be read! Exiting now!') 83 | exit() 84 | except KeyError: 85 | pass 86 | 87 | aria2 = aria2p.API( 88 | aria2p.Client( 89 | host="http://localhost", 90 | port=6800, 91 | secret="", 92 | ) 93 | ) 94 | 95 | 96 | def get_client() -> qba.TorrentsAPIMixIn: 97 | qb_client = qba.Client(host="localhost", port=8090, username="admin", password="adminadmin") 98 | try: 99 | qb_client.auth_log_in() 100 | qb_client.application.set_preferences({"disk_cache":64, "incomplete_files_ext":True, "max_connec":3000, "max_connec_per_torrent":300, "async_io_threads":8, "preallocate_all":True, "upnp":True, "dl_limit":-1, "up_limit":-1, "dht":True, "pex":True, "lsd":True, "encryption":0, "queueing_enabled":True, "max_active_downloads":15, "max_active_torrents":50, "dont_count_slow_torrents":True, "bittorrent_protocol":0, "recheck_completed_torrents":True, "enable_multi_connections_from_same_ip":True, "slow_torrent_dl_rate_threshold":100,"slow_torrent_inactive_timer":600}) 101 | return qb_client 102 | except qba.LoginFailed as e: 103 | LOGGER.error(str(e)) 104 | return None 105 | 106 | 107 | DOWNLOAD_DIR = None 108 | BOT_TOKEN = None 109 | 110 | download_dict_lock = threading.Lock() 111 | status_reply_dict_lock = threading.Lock() 112 | # Key: update.effective_chat.id 113 | # Value: telegram.Message 114 | status_reply_dict = {} 115 | # Key: update.message.message_id 116 | # Value: An object of Status 117 | download_dict = {} 118 | # Stores list of users and chats the bot is authorized to use in 119 | AUTHORIZED_CHATS = set() 120 | SUDO_USERS = set() 121 | if os.path.exists('authorized_chats.txt'): 122 | with open('authorized_chats.txt', 'r+') as f: 123 | lines = f.readlines() 124 | for line in lines: 125 | AUTHORIZED_CHATS.add(int(line.split()[0])) 126 | if os.path.exists('sudo_users.txt'): 127 | with open('sudo_users.txt', 'r+') as f: 128 | lines = f.readlines() 129 | for line in lines: 130 | SUDO_USERS.add(int(line.split()[0])) 131 | try: 132 | achats = getConfig('AUTHORIZED_CHATS') 133 | achats = achats.split(" ") 134 | for chats in achats: 135 | AUTHORIZED_CHATS.add(int(chats)) 136 | except: 137 | pass 138 | try: 139 | schats = getConfig('SUDO_USERS') 140 | schats = schats.split(" ") 141 | for chats in schats: 142 | SUDO_USERS.add(int(chats)) 143 | except: 144 | pass 145 | try: 146 | BOT_TOKEN = getConfig('BOT_TOKEN') 147 | parent_id = getConfig('GDRIVE_FOLDER_ID') 148 | DOWNLOAD_DIR = getConfig('DOWNLOAD_DIR') 149 | if not DOWNLOAD_DIR.endswith("/"): 150 | DOWNLOAD_DIR = DOWNLOAD_DIR + '/' 151 | DOWNLOAD_STATUS_UPDATE_INTERVAL = int(getConfig('DOWNLOAD_STATUS_UPDATE_INTERVAL')) 152 | OWNER_ID = int(getConfig('OWNER_ID')) 153 | AUTO_DELETE_MESSAGE_DURATION = int(getConfig('AUTO_DELETE_MESSAGE_DURATION')) 154 | TELEGRAM_API = getConfig('TELEGRAM_API') 155 | TELEGRAM_HASH = getConfig('TELEGRAM_HASH') 156 | UPSTREAM_REPO = getConfig('UPSTREAM_REPO') 157 | UPSTREAM_BRANCH = getConfig('UPSTREAM_BRANCH') 158 | except KeyError as e: 159 | LOGGER.error("One or more env variables missing! Exiting now") 160 | exit(1) 161 | try: 162 | DB_URI = getConfig('DATABASE_URL') 163 | if len(DB_URI) == 0: 164 | raise KeyError 165 | except KeyError: 166 | logging.warning('Database not provided!') 167 | DB_URI = None 168 | if DB_URI is not None: 169 | try: 170 | conn = psycopg2.connect(DB_URI) 171 | cur = conn.cursor() 172 | sql = "SELECT * from users;" 173 | cur.execute(sql) 174 | rows = cur.fetchall() #returns a list ==> (uid, sudo) 175 | for row in rows: 176 | AUTHORIZED_CHATS.add(row[0]) 177 | if row[1]: 178 | SUDO_USERS.add(row[0]) 179 | except Error as e: 180 | if 'relation "users" does not exist' in str(e): 181 | mktable() 182 | else: 183 | LOGGER.error(e) 184 | exit(1) 185 | finally: 186 | cur.close() 187 | conn.close() 188 | 189 | LOGGER.info("Generating USER_SESSION_STRING") 190 | app = Client(':memory:', api_id=int(TELEGRAM_API), api_hash=TELEGRAM_HASH, bot_token=BOT_TOKEN) 191 | 192 | # Generate Telegraph Token 193 | sname = ''.join(random.SystemRandom().choices(string.ascii_letters, k=8)) 194 | LOGGER.info("Generating TELEGRAPH_TOKEN using '" + sname + "' name") 195 | telegraph = Telegraph() 196 | telegraph.create_account(short_name=sname) 197 | telegraph_token = telegraph.get_access_token() 198 | 199 | try: 200 | STATUS_LIMIT = getConfig('STATUS_LIMIT') 201 | if len(STATUS_LIMIT) == 0: 202 | raise KeyError 203 | else: 204 | STATUS_LIMIT = int(getConfig('STATUS_LIMIT')) 205 | except KeyError: 206 | STATUS_LIMIT = None 207 | try: 208 | MEGA_API_KEY = getConfig('MEGA_API_KEY') 209 | except KeyError: 210 | logging.warning('MEGA API KEY not provided!') 211 | MEGA_API_KEY = None 212 | try: 213 | MEGA_EMAIL_ID = getConfig('MEGA_EMAIL_ID') 214 | MEGA_PASSWORD = getConfig('MEGA_PASSWORD') 215 | if len(MEGA_EMAIL_ID) == 0 or len(MEGA_PASSWORD) == 0: 216 | raise KeyError 217 | except KeyError: 218 | logging.warning('MEGA Credentials not provided!') 219 | MEGA_EMAIL_ID = None 220 | MEGA_PASSWORD = None 221 | try: 222 | HEROKU_API_KEY = getConfig('HEROKU_API_KEY') 223 | except KeyError: 224 | logging.warning('HEROKU API KEY not provided!') 225 | HEROKU_API_KEY = None 226 | try: 227 | HEROKU_APP_NAME = getConfig('HEROKU_APP_NAME') 228 | except KeyError: 229 | logging.warning('HEROKU APP NAME not provided!') 230 | HEROKU_APP_NAME = None 231 | try: 232 | UPTOBOX_TOKEN = getConfig('UPTOBOX_TOKEN') 233 | except KeyError: 234 | logging.warning('UPTOBOX_TOKEN not provided!') 235 | UPTOBOX_TOKEN = None 236 | try: 237 | INDEX_URL = getConfig('INDEX_URL') 238 | if len(INDEX_URL) == 0: 239 | INDEX_URL = None 240 | except KeyError: 241 | INDEX_URL = None 242 | try: 243 | TORRENT_DIRECT_LIMIT = getConfig('TORRENT_DIRECT_LIMIT') 244 | if len(TORRENT_DIRECT_LIMIT) == 0: 245 | TORRENT_DIRECT_LIMIT = None 246 | except KeyError: 247 | TORRENT_DIRECT_LIMIT = None 248 | try: 249 | CLONE_LIMIT = getConfig('CLONE_LIMIT') 250 | if len(CLONE_LIMIT) == 0: 251 | CLONE_LIMIT = None 252 | except KeyError: 253 | CLONE_LIMIT = None 254 | try: 255 | MEGA_LIMIT = getConfig('MEGA_LIMIT') 256 | if len(MEGA_LIMIT) == 0: 257 | MEGA_LIMIT = None 258 | except KeyError: 259 | MEGA_LIMIT = None 260 | try: 261 | TAR_UNZIP_LIMIT = getConfig('TAR_UNZIP_LIMIT') 262 | if len(TAR_UNZIP_LIMIT) == 0: 263 | TAR_UNZIP_LIMIT = None 264 | except KeyError: 265 | TAR_UNZIP_LIMIT = None 266 | try: 267 | BUTTON_FOUR_NAME = getConfig('BUTTON_FOUR_NAME') 268 | BUTTON_FOUR_URL = getConfig('BUTTON_FOUR_URL') 269 | if len(BUTTON_FOUR_NAME) == 0 or len(BUTTON_FOUR_URL) == 0: 270 | raise KeyError 271 | except KeyError: 272 | BUTTON_FOUR_NAME = None 273 | BUTTON_FOUR_URL = None 274 | try: 275 | BUTTON_FIVE_NAME = getConfig('BUTTON_FIVE_NAME') 276 | BUTTON_FIVE_URL = getConfig('BUTTON_FIVE_URL') 277 | if len(BUTTON_FIVE_NAME) == 0 or len(BUTTON_FIVE_URL) == 0: 278 | raise KeyError 279 | except KeyError: 280 | BUTTON_FIVE_NAME = None 281 | BUTTON_FIVE_URL = None 282 | try: 283 | BUTTON_SIX_NAME = getConfig('BUTTON_SIX_NAME') 284 | BUTTON_SIX_URL = getConfig('BUTTON_SIX_URL') 285 | if len(BUTTON_SIX_NAME) == 0 or len(BUTTON_SIX_URL) == 0: 286 | raise KeyError 287 | except KeyError: 288 | BUTTON_SIX_NAME = None 289 | BUTTON_SIX_URL = None 290 | try: 291 | IMAGE_URL = getConfig('IMAGE_URL') 292 | if len(IMAGE_URL) == 0: 293 | IMAGE_URL = 'https://telegra.ph/file/019996f816db9ed576cff.jpg' 294 | except KeyError: 295 | IMAGE_URL = 'https://telegra.ph/file/019996f816db9ed576cff.jpg' 296 | try: 297 | STOP_DUPLICATE = getConfig('STOP_DUPLICATE') 298 | if STOP_DUPLICATE.lower() == 'true': 299 | STOP_DUPLICATE = True 300 | else: 301 | STOP_DUPLICATE = False 302 | except KeyError: 303 | STOP_DUPLICATE = False 304 | try: 305 | VIEW_LINK = getConfig('VIEW_LINK') 306 | if VIEW_LINK.lower() == 'true': 307 | VIEW_LINK = True 308 | else: 309 | VIEW_LINK = False 310 | except KeyError: 311 | VIEW_LINK = False 312 | try: 313 | IS_TEAM_DRIVE = getConfig('IS_TEAM_DRIVE') 314 | if IS_TEAM_DRIVE.lower() == 'true': 315 | IS_TEAM_DRIVE = True 316 | else: 317 | IS_TEAM_DRIVE = False 318 | except KeyError: 319 | IS_TEAM_DRIVE = False 320 | try: 321 | USE_SERVICE_ACCOUNTS = getConfig('USE_SERVICE_ACCOUNTS') 322 | if USE_SERVICE_ACCOUNTS.lower() == 'true': 323 | USE_SERVICE_ACCOUNTS = True 324 | else: 325 | USE_SERVICE_ACCOUNTS = False 326 | except KeyError: 327 | USE_SERVICE_ACCOUNTS = False 328 | try: 329 | BLOCK_MEGA_FOLDER = getConfig('BLOCK_MEGA_FOLDER') 330 | if BLOCK_MEGA_FOLDER.lower() == 'true': 331 | BLOCK_MEGA_FOLDER = True 332 | else: 333 | BLOCK_MEGA_FOLDER = False 334 | except KeyError: 335 | BLOCK_MEGA_FOLDER = False 336 | try: 337 | BLOCK_MEGA_LINKS = getConfig('BLOCK_MEGA_LINKS') 338 | if BLOCK_MEGA_LINKS.lower() == 'true': 339 | BLOCK_MEGA_LINKS = True 340 | else: 341 | BLOCK_MEGA_LINKS = False 342 | except KeyError: 343 | BLOCK_MEGA_LINKS = False 344 | try: 345 | SHORTENER = getConfig('SHORTENER') 346 | SHORTENER_API = getConfig('SHORTENER_API') 347 | if len(SHORTENER) == 0 or len(SHORTENER_API) == 0: 348 | raise KeyError 349 | except KeyError: 350 | SHORTENER = None 351 | SHORTENER_API = None 352 | 353 | IGNORE_PENDING_REQUESTS = False 354 | try: 355 | if getConfig("IGNORE_PENDING_REQUESTS").lower() == "true": 356 | IGNORE_PENDING_REQUESTS = True 357 | except KeyError: 358 | pass 359 | 360 | try: 361 | BASE_URL = getConfig('BASE_URL_OF_BOT') 362 | if len(BASE_URL) == 0: 363 | BASE_URL = None 364 | except KeyError: 365 | logging.warning('BASE_URL_OF_BOT not provided! Bot will get down soon....') 366 | BASE_URL = None 367 | 368 | try: 369 | IS_VPS = getConfig('IS_VPS') 370 | if IS_VPS.lower() == 'true': 371 | IS_VPS = True 372 | else: 373 | IS_VPS = False 374 | except KeyError: 375 | IS_VPS = False 376 | 377 | try: 378 | SERVER_PORT = getConfig('SERVER_PORT') 379 | if len(SERVER_PORT) == 0: 380 | SERVER_PORT = None 381 | except KeyError: 382 | logging.warning('SERVER_PORT not provided!') 383 | SERVER_PORT = None 384 | 385 | try: 386 | TOKEN_PICKLE_URL = getConfig('TOKEN_PICKLE_URL') 387 | if len(TOKEN_PICKLE_URL) == 0: 388 | TOKEN_PICKLE_URL = None 389 | else: 390 | out = subprocess.run(["wget", "-q", "-O", "token.pickle", TOKEN_PICKLE_URL]) 391 | if out.returncode != 0: 392 | logging.error(out) 393 | except KeyError: 394 | TOKEN_PICKLE_URL = None 395 | 396 | try: 397 | GDTOT_COOKIES = getConfig('GDTOT_COOKIES') 398 | except KeyError: 399 | logging.warning('GDTOT_COOKIES not provided!') 400 | GDTOT_COOKIES = None 401 | 402 | try: 403 | ACCOUNTS_ZIP_URL = getConfig('ACCOUNTS_ZIP_URL') 404 | if len(ACCOUNTS_ZIP_URL) == 0: 405 | ACCOUNTS_ZIP_URL = None 406 | else: 407 | out = subprocess.run(["wget", "-q", "-O", "accounts.zip", ACCOUNTS_ZIP_URL]) 408 | if out.returncode != 0: 409 | logging.error(out) 410 | raise KeyError 411 | subprocess.run(["unzip", "-q", "-o", "accounts.zip"]) 412 | os.remove("accounts.zip") 413 | except KeyError: 414 | ACCOUNTS_ZIP_URL = None 415 | 416 | updater = tg.Updater(token=BOT_TOKEN) 417 | bot = updater.bot 418 | dispatcher = updater.dispatcher 419 | --------------------------------------------------------------------------------