├── web
├── __init__.py
└── nodes.py
├── bot
├── helper
│ ├── __init__.py
│ ├── ext_utils
│ │ ├── __init__.py
│ │ ├── exceptions.py
│ │ ├── telegraph_helper.py
│ │ ├── html_helper.py
│ │ ├── fs_utils.py
│ │ └── db_handler.py
│ ├── mirror_utils
│ │ ├── __init__.py
│ │ ├── download_utils
│ │ │ ├── __init__.py
│ │ │ ├── gd_downloader.py
│ │ │ ├── direct_link_generator_license.md
│ │ │ ├── telegram_downloader.py
│ │ │ ├── yt_dlp_download_helper.py
│ │ │ ├── mega_downloader.py
│ │ │ ├── aria2_download.py
│ │ │ └── qbit_downloader.py
│ │ ├── status_utils
│ │ │ ├── __init__.py
│ │ │ ├── split_status.py
│ │ │ ├── telegram_download_status.py
│ │ │ ├── clone_status.py
│ │ │ ├── gd_download_status.py
│ │ │ ├── tg_upload_status.py
│ │ │ ├── upload_status.py
│ │ │ ├── mega_download_status.py
│ │ │ ├── youtube_dl_download_status.py
│ │ │ ├── zip_status.py
│ │ │ ├── extract_status.py
│ │ │ ├── qbit_download_status.py
│ │ │ └── aria_download_status.py
│ │ └── upload_utils
│ │ │ ├── __init__.py
│ │ │ └── pyrogramEngine.py
│ └── telegram_helper
│ │ ├── __init__.py
│ │ ├── button_build.py
│ │ ├── filters.py
│ │ ├── bot_commands.py
│ │ └── message_utils.py
└── modules
│ ├── __init__.py
│ ├── delete.py
│ ├── shell.py
│ ├── count.py
│ ├── list.py
│ ├── mirror_status.py
│ ├── eval.py
│ ├── cancel_mirror.py
│ ├── bt_select.py
│ ├── leech_settings.py
│ ├── authorize.py
│ ├── clone.py
│ └── search.py
├── _config.yml
├── start.sh
├── captain-definition
├── requirements-cli.txt
├── docker-compose.yml
├── .gitignore
├── Dockerfile
├── generate_string_session.py
├── requirements.txt
├── .github
├── workflows
│ └── deploy.yml
└── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── generate_drive_token.py
├── aria.sh
├── qBittorrent
└── config
│ └── qBittorrent.conf
├── driveid.py
├── update.py
├── add_to_team_drive.py
└── config_sample.env
/web/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/modules/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-time-machine
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/start.sh:
--------------------------------------------------------------------------------
1 | python3 update.py && python3 -m bot
2 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/upload_utils/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/captain-definition:
--------------------------------------------------------------------------------
1 | {
2 | "schemaVersion": 2,
3 | "dockerfilePath": "./Dockerfile"
4 | }
5 |
--------------------------------------------------------------------------------
/requirements-cli.txt:
--------------------------------------------------------------------------------
1 | oauth2client
2 | google-api-python-client
3 | progress
4 | progressbar2
5 | httplib2shim
6 | google_auth_oauthlib
7 | pyrogram>=2
8 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 |
3 | services:
4 | app:
5 | build: .
6 | command: bash start.sh
7 | restart: on-failure
8 | ports:
9 | - "80:80"
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | config.env
2 | *auth_token.txt
3 | *.pyc
4 | data*
5 | .vscode
6 | .idea
7 | *.json
8 | *.pickle
9 | .netrc
10 | log.txt
11 | accounts/*
12 | Thumbnails/*
13 | drive_folder
14 | cookies.txt
15 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM anasty17/mltb:latest
2 |
3 | WORKDIR /usr/src/app
4 | RUN chmod 777 /usr/src/app
5 |
6 | COPY requirements.txt .
7 | RUN pip3 install --no-cache-dir -r requirements.txt
8 |
9 | COPY . .
10 |
11 | CMD ["bash", "start.sh"]
12 |
--------------------------------------------------------------------------------
/generate_string_session.py:
--------------------------------------------------------------------------------
1 | from pyrogram import Client
2 |
3 | print('Required pyrogram V2 or greater.')
4 | API_KEY = int(input("Enter API KEY: "))
5 | API_HASH = input("Enter API HASH: ")
6 | with Client(name='USS', api_id=API_KEY, api_hash=API_HASH, in_memory=True) as app:
7 | print(app.export_session_string())
8 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/exceptions.py:
--------------------------------------------------------------------------------
1 | class DirectDownloadLinkException(Exception):
2 | """Not method found for extracting direct download link from the http link"""
3 | pass
4 |
5 |
6 | class NotSupportedExtractionArchive(Exception):
7 | """The archive format use is trying to extract is not supported"""
8 | pass
9 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | anytree
2 | aria2p
3 | asyncio
4 | beautifulsoup4
5 | bencoding
6 | cfscrape
7 | feedparser
8 | flask
9 | google-api-python-client
10 | google-auth-httplib2
11 | google-auth-oauthlib
12 | gunicorn
13 | lk21
14 | lxml
15 | pillow
16 | psutil
17 | psycopg2-binary
18 | pybase64
19 | pycryptodomex
20 | pyrogram
21 | python-dotenv
22 | python-magic
23 | python-telegram-bot
24 | qbittorrent-api
25 | requests
26 | SecretStorage
27 | tenacity
28 | tgCrypto
29 | xattr
30 | yt-dlp
31 | telegraph
--------------------------------------------------------------------------------
/.github/workflows/deploy.yml:
--------------------------------------------------------------------------------
1 | name: Manually Deploy to Heroku
2 |
3 | on: workflow_dispatch
4 |
5 | jobs:
6 | deploy:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v2
10 | - uses: akhileshns/heroku-deploy@v3.12.12
11 | with:
12 | heroku_api_key: ${{secrets.HEROKU_API_KEY}}
13 | heroku_app_name: ${{secrets.HEROKU_APP_NAME}}
14 | heroku_email: ${{secrets.HEROKU_EMAIL}}
15 | usedocker: true
16 | docker_heroku_process_type: web
17 | stack: "container"
18 | region: "us"
19 | env:
20 | HD_CONFIG_FILE_URL: ${{secrets.CONFIG_FILE_URL}}
21 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Used this Command with this link or file.
16 | 2. Before/after/while Upload ..
17 | 3. Check logs
18 |
19 | **Expected behavior**
20 | A clear and concise description of what you expected to happen.
21 |
22 | **Screenshots**
23 | If applicable, add screenshots to help explain your problem.
24 |
25 | **Additional context**
26 | Add any other context about the problem here.
27 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/button_build.py:
--------------------------------------------------------------------------------
1 | from telegram import InlineKeyboardButton, InlineKeyboardMarkup
2 |
3 | class ButtonMaker:
4 | def __init__(self):
5 | self.button = []
6 |
7 | def buildbutton(self, key, link):
8 | self.button.append(InlineKeyboardButton(text = key, url = link))
9 |
10 | def sbutton(self, key, data):
11 | self.button.append(InlineKeyboardButton(text = key, callback_data = data))
12 |
13 | def build_menu(self, n_cols, footer_buttons=None, header_buttons=None):
14 | menu = [self.button[i:i + n_cols] for i in range(0, len(self.button), n_cols)]
15 | if header_buttons:
16 | menu.insert(0, header_buttons)
17 | if footer_buttons:
18 | menu.append(footer_buttons)
19 | return InlineKeyboardMarkup(menu)
20 |
--------------------------------------------------------------------------------
/generate_drive_token.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import os
3 | from google_auth_oauthlib.flow import InstalledAppFlow
4 | from google.auth.transport.requests import Request
5 |
6 | credentials = None
7 | __G_DRIVE_TOKEN_FILE = "token.pickle"
8 | __OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"]
9 | if os.path.exists(__G_DRIVE_TOKEN_FILE):
10 | with open(__G_DRIVE_TOKEN_FILE, 'rb') as f:
11 | credentials = pickle.load(f)
12 | if (
13 | (credentials is None or not credentials.valid)
14 | and credentials
15 | and credentials.expired
16 | and credentials.refresh_token
17 | ):
18 | credentials.refresh(Request())
19 | else:
20 | flow = InstalledAppFlow.from_client_secrets_file(
21 | 'credentials.json', __OAUTH_SCOPE)
22 | credentials = flow.run_local_server(port=0, open_browser=False)
23 |
24 | # Save the credentials for the next run
25 | with open(__G_DRIVE_TOKEN_FILE, 'wb') as token:
26 | pickle.dump(credentials, token)
27 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/filters.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import MessageFilter
2 | from telegram import Message
3 | from bot import AUTHORIZED_CHATS, SUDO_USERS, OWNER_ID
4 |
5 |
6 | class CustomFilters:
7 | class __OwnerFilter(MessageFilter):
8 | def filter(self, message: Message):
9 | return message.from_user.id == OWNER_ID
10 |
11 | owner_filter = __OwnerFilter()
12 |
13 | class __AuthorizedUserFilter(MessageFilter):
14 | def filter(self, message: Message):
15 | uid = message.from_user.id
16 | return uid in AUTHORIZED_CHATS or uid in SUDO_USERS or uid == OWNER_ID
17 |
18 | authorized_user = __AuthorizedUserFilter()
19 |
20 | class __AuthorizedChat(MessageFilter):
21 | def filter(self, message: Message):
22 | return message.chat.id in AUTHORIZED_CHATS
23 |
24 | authorized_chat = __AuthorizedChat()
25 |
26 | class __SudoUser(MessageFilter):
27 | def filter(self, message: Message):
28 | return message.from_user.id in SUDO_USERS
29 |
30 | sudo_user = __SudoUser()
31 |
32 | @staticmethod
33 | def _owner_query(user_id):
34 | return user_id == OWNER_ID or user_id in SUDO_USERS
35 |
--------------------------------------------------------------------------------
/aria.sh:
--------------------------------------------------------------------------------
1 | if [ -z $TORRENT_TIMEOUT ]
2 | then
3 | TORRENT_TIMEOUT=0
4 | fi
5 | tracker_list=$(curl -Ns https://raw.githubusercontent.com/XIU2/TrackersListCollection/master/all.txt https://ngosang.github.io/trackerslist/trackers_all_http.txt https://newtrackon.com/api/all https://raw.githubusercontent.com/hezhijie0327/Trackerslist/main/trackerslist_tracker.txt | awk '$0' | tr '\n\n' ',')
6 | aria2c --allow-overwrite=true --auto-file-renaming=true --bt-enable-lpd=true --bt-detach-seed-only=true \
7 | --bt-remove-unselected-file=true --bt-stop-timeout=$TORRENT_TIMEOUT --bt-tracker="[$tracker_list]" \
8 | --check-certificate=false --check-integrity=true --continue=true --content-disposition-default-utf8=true \
9 | --daemon=true --disk-cache=40M --enable-rpc=true --follow-torrent=mem --force-save=true --http-accept-gzip=true \
10 | --max-connection-per-server=10 --max-concurrent-downloads=10 --max-file-not-found=0 --max-tries=20 \
11 | --min-split-size=10M --netrc-path=/usr/src/app/.netrc --optimize-concurrent-downloads=true \
12 | --peer-id-prefix=-qB4430- --peer-agent=qBittorrent/4.4.3 --quiet=true --reuse-uri=true \
13 | --rpc-max-request-size=1024M --seed-ratio=0 --split=10 --summary-interval=0 --user-agent=Wget/1.12
14 |
--------------------------------------------------------------------------------
/qBittorrent/config/qBittorrent.conf:
--------------------------------------------------------------------------------
1 | [LegalNotice]
2 | Accepted=true
3 |
4 | [BitTorrent]
5 | Session\AsyncIOThreadsCount=16
6 | Session\MultiConnectionsPerIp=true
7 | Session\SlowTorrentsDownloadRate=2
8 | Session\SlowTorrentsUploadRate=2
9 | Session\SlowTorrentsInactivityTimer=600
10 | Session\GlobalMaxSeedingMinutes=-1
11 |
12 | [Preferences]
13 | Advanced\AnnounceToAllTrackers=true
14 | Advanced\AnonymousMode=false
15 | Advanced\IgnoreLimitsLAN=false
16 | Advanced\LtTrackerExchange=true
17 | Advanced\RecheckOnCompletion=true
18 | Bittorrent\AddTrackers=false
19 | Bittorrent\MaxRatio=-1
20 | Bittorrent\MaxRatioAction=0
21 | Bittorrent\MaxConnecs=-1
22 | Bittorrent\MaxConnecsPerTorrent=-1
23 | Bittorrent\MaxUploads=-1
24 | Bittorrent\MaxUploadsPerTorrent=-1
25 | Bittorrent\DHT=true
26 | Bittorrent\PeX=true
27 | Bittorrent\LSD=true
28 | Downloads\PreAllocation=true
29 | Downloads\UseIncompleteExtension=true
30 | Downloads\DiskWriteCacheSize=-1
31 | General\PreventFromSuspendWhenDownloading=true
32 | Queueing\IgnoreSlowTorrents=true
33 | Queueing\MaxActiveDownloads=100
34 | Queueing\MaxActiveTorrents=50
35 | Queueing\MaxActiveUploads=50
36 | Queueing\QueueingEnabled=false
37 | Search\SearchEnabled=true
38 | WebUI\Enabled=true
39 | WebUI\Port=8090
40 | WebUI\LocalHostAuth=false
41 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/split_status.py:
--------------------------------------------------------------------------------
1 | from bot import LOGGER
2 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus, EngineStatus
3 |
4 |
5 | class SplitStatus:
6 | def __init__(self, name, size, gid, listener):
7 | self.__name = name
8 | self.__gid = gid
9 | self.__size = size
10 | self.__listener = listener
11 | self.message = listener.message
12 |
13 | def gid(self):
14 | return self.__gid
15 |
16 | def progress(self):
17 | return '0'
18 |
19 | def speed(self):
20 | return '0'
21 |
22 | def name(self):
23 | return self.__name
24 |
25 | def size(self):
26 | return get_readable_file_size(self.__size)
27 |
28 | def eta(self):
29 | return '0s'
30 |
31 | def status(self):
32 | return MirrorStatus.STATUS_SPLITTING
33 |
34 | def processed_bytes(self):
35 | return 0
36 |
37 | def download(self):
38 | return self
39 |
40 | def cancel_download(self):
41 | LOGGER.info(f'Cancelling Split: {self.__name}')
42 | if self.__listener.suproc is not None:
43 | self.__listener.suproc.kill()
44 | self.__listener.onUploadError('splitting stopped by user!')
45 |
46 | def eng(self):
47 | return EngineStatus.STATUS_SPLIT
48 |
--------------------------------------------------------------------------------
/bot/modules/delete.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | from telegram.ext import CommandHandler
3 |
4 | from bot import dispatcher, LOGGER
5 | from bot.helper.telegram_helper.message_utils import auto_delete_message, sendMessage
6 | from bot.helper.telegram_helper.filters import CustomFilters
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.mirror_utils.upload_utils import gdriveTools
9 | from bot.helper.ext_utils.bot_utils import is_gdrive_link
10 |
11 |
12 | def deletefile(update, context):
13 | reply_to = update.message.reply_to_message
14 | if len(context.args) == 1:
15 | link = context.args[0]
16 | elif reply_to:
17 | link = reply_to.text.split(maxsplit=1)[0].strip()
18 | else:
19 | link = ''
20 | if is_gdrive_link(link):
21 | LOGGER.info(link)
22 | drive = gdriveTools.GoogleDriveHelper()
23 | msg = drive.deletefile(link)
24 | else:
25 | msg = 'Send Gdrive link along with command or by replying to the link by command'
26 | reply_message = sendMessage(msg, context.bot, update.message)
27 | Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
28 |
29 | delete_handler = CommandHandler(command=BotCommands.DeleteCommand, callback=deletefile, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
30 | dispatcher.add_handler(delete_handler)
31 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/telegram_download_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time, EngineStatus
2 |
3 |
4 | class TelegramDownloadStatus:
5 | def __init__(self, obj, listener, gid):
6 | self.__obj = obj
7 | self.__gid = gid
8 | self.message = listener.message
9 |
10 | def gid(self):
11 | return self.__gid
12 |
13 | def processed_bytes(self):
14 | return self.__obj.downloaded_bytes
15 |
16 | def size_raw(self):
17 | return self.__obj.size
18 |
19 | def size(self):
20 | return get_readable_file_size(self.size_raw())
21 |
22 | def status(self):
23 | return MirrorStatus.STATUS_DOWNLOADING
24 |
25 | def name(self):
26 | return self.__obj.name
27 |
28 | def progress_raw(self):
29 | return self.__obj.progress
30 |
31 | def progress(self):
32 | return f'{round(self.progress_raw(), 2)}%'
33 |
34 | def speed_raw(self):
35 | """
36 | :return: Download speed in Bytes/Seconds
37 | """
38 | return self.__obj.download_speed
39 |
40 | def speed(self):
41 | return f'{get_readable_file_size(self.speed_raw())}/s'
42 |
43 | def eta(self):
44 | try:
45 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
46 | return f'{get_readable_time(seconds)}'
47 | except:
48 | return '-'
49 |
50 | def download(self):
51 | return self.__obj
52 |
53 | def eng(self):
54 | return EngineStatus.STATUS_TG
55 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/clone_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus, EngineStatus, get_readable_file_size, get_readable_time
2 |
3 |
4 | class CloneStatus:
5 | def __init__(self, obj, size, message, gid):
6 | self.__obj = obj
7 | self.__size = size
8 | self.__gid = gid
9 | self.message = message
10 |
11 | def processed_bytes(self):
12 | return self.__obj.transferred_size
13 |
14 | def size_raw(self):
15 | return self.__size
16 |
17 | def size(self):
18 | return get_readable_file_size(self.__size)
19 |
20 | def status(self):
21 | return MirrorStatus.STATUS_CLONING
22 |
23 | def name(self):
24 | return self.__obj.name
25 |
26 | def gid(self) -> str:
27 | return self.__gid
28 |
29 | def progress_raw(self):
30 | try:
31 | return self.__obj.transferred_size / self.__size * 100
32 | except:
33 | return 0
34 |
35 | def progress(self):
36 | return f'{round(self.progress_raw(), 2)}%'
37 |
38 | def speed_raw(self):
39 | """
40 | :return: Download speed in Bytes/Seconds
41 | """
42 | return self.__obj.cspeed()
43 |
44 | def speed(self):
45 | return f'{get_readable_file_size(self.speed_raw())}/s'
46 |
47 | def eta(self):
48 | try:
49 | seconds = (self.__size - self.__obj.transferred_size) / self.speed_raw()
50 | return f'{get_readable_time(seconds)}'
51 | except:
52 | return '-'
53 |
54 | def download(self):
55 | return self.__obj
56 |
57 | def eng(self):
58 | return EngineStatus.STATUS_GD
59 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/gd_download_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus, EngineStatus, get_readable_file_size, get_readable_time
2 |
3 |
4 | class GdDownloadStatus:
5 | def __init__(self, obj, size, listener, gid):
6 | self.__obj = obj
7 | self.__size = size
8 | self.__gid = gid
9 | self.message = listener.message
10 |
11 | def processed_bytes(self):
12 | return self.__obj.processed_bytes
13 |
14 | def size_raw(self):
15 | return self.__size
16 |
17 | def size(self):
18 | return get_readable_file_size(self.__size)
19 |
20 | def status(self):
21 | return MirrorStatus.STATUS_DOWNLOADING
22 |
23 | def name(self):
24 | return self.__obj.name
25 |
26 | def gid(self) -> str:
27 | return self.__gid
28 |
29 | def progress_raw(self):
30 | try:
31 | return self.__obj.processed_bytes / self.__size * 100
32 | except:
33 | return 0
34 |
35 | def progress(self):
36 | return f'{round(self.progress_raw(), 2)}%'
37 |
38 | def speed_raw(self):
39 | """
40 | :return: Download speed in Bytes/Seconds
41 | """
42 | return self.__obj.speed()
43 |
44 | def speed(self):
45 | return f'{get_readable_file_size(self.speed_raw())}/s'
46 |
47 | def eta(self):
48 | try:
49 | seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw()
50 | return f'{get_readable_time(seconds)}'
51 | except:
52 | return '-'
53 |
54 | def download(self):
55 | return self.__obj
56 |
57 | def eng(self):
58 | return EngineStatus.STATUS_GD
59 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/tg_upload_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time, EngineStatus
2 |
3 |
4 | class TgUploadStatus:
5 | def __init__(self, obj, size, gid, listener):
6 | self.__obj = obj
7 | self.__size = size
8 | self.__gid = gid
9 | self.message = listener.message
10 |
11 | def processed_bytes(self):
12 | return self.__obj.uploaded_bytes
13 |
14 | def size_raw(self):
15 | return self.__size
16 |
17 | def size(self):
18 | return get_readable_file_size(self.__size)
19 |
20 | def status(self):
21 | return MirrorStatus.STATUS_UPLOADING
22 |
23 | def name(self):
24 | return self.__obj.name
25 |
26 | def progress_raw(self):
27 | try:
28 | return self.__obj.uploaded_bytes / self.__size * 100
29 | except ZeroDivisionError:
30 | return 0
31 |
32 | def progress(self):
33 | return f'{round(self.progress_raw(), 2)}%'
34 |
35 | def speed_raw(self):
36 | """
37 | :return: Upload speed in Bytes/Seconds
38 | """
39 | return self.__obj.speed
40 |
41 | def speed(self):
42 | return f'{get_readable_file_size(self.speed_raw())}/s'
43 |
44 | def eta(self):
45 | try:
46 | seconds = (self.__size - self.__obj.uploaded_bytes) / self.speed_raw()
47 | return f'{get_readable_time(seconds)}'
48 | except ZeroDivisionError:
49 | return '-'
50 |
51 | def gid(self) -> str:
52 | return self.__gid
53 |
54 | def download(self):
55 | return self.__obj
56 |
57 | def eng(self):
58 | return EngineStatus.STATUS_TG
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/upload_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time, EngineStatus
2 |
3 |
4 | class UploadStatus:
5 | def __init__(self, obj, size, gid, listener):
6 | self.__obj = obj
7 | self.__size = size
8 | self.__gid = gid
9 | self.message = listener.message
10 |
11 | def processed_bytes(self):
12 | return self.__obj.processed_bytes
13 |
14 | def size_raw(self):
15 | return self.__size
16 |
17 | def size(self):
18 | return get_readable_file_size(self.__size)
19 |
20 | def status(self):
21 | return MirrorStatus.STATUS_UPLOADING
22 |
23 | def name(self):
24 | return self.__obj.name
25 |
26 | def progress_raw(self):
27 | try:
28 | return self.__obj.processed_bytes / self.__size * 100
29 | except ZeroDivisionError:
30 | return 0
31 |
32 | def progress(self):
33 | return f'{round(self.progress_raw(), 2)}%'
34 |
35 | def speed_raw(self):
36 | """
37 | :return: Upload speed in Bytes/Seconds
38 | """
39 | return self.__obj.speed()
40 |
41 | def speed(self):
42 | return f'{get_readable_file_size(self.speed_raw())}/s'
43 |
44 | def eta(self):
45 | try:
46 | seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw()
47 | return f'{get_readable_time(seconds)}'
48 | except ZeroDivisionError:
49 | return '-'
50 |
51 | def gid(self) -> str:
52 | return self.__gid
53 |
54 | def download(self):
55 | return self.__obj
56 |
57 | def eng(self):
58 | return EngineStatus.STATUS_GD
--------------------------------------------------------------------------------
/bot/modules/shell.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, PIPE
2 | from telegram.ext import CommandHandler
3 |
4 | from bot import LOGGER, dispatcher
5 | from bot.helper.telegram_helper.filters import CustomFilters
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 |
8 |
9 | def shell(update, context):
10 | message = update.effective_message
11 | cmd = message.text.split(maxsplit=1)
12 | if len(cmd) == 1:
13 | return message.reply_text('No command to execute was given.', parse_mode='HTML')
14 | cmd = cmd[1]
15 | process = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
16 | stdout, stderr = process.communicate()
17 | reply = ''
18 | stderr = stderr.decode()
19 | stdout = stdout.decode()
20 | if len(stdout) != 0:
21 | reply += f"*Stdout*\n`{stdout}`\n"
22 | LOGGER.info(f"Shell - {cmd} - {stdout}")
23 | if len(stderr) != 0:
24 | reply += f"*Stderr*\n`{stderr}`\n"
25 | LOGGER.error(f"Shell - {cmd} - {stderr}")
26 | if len(reply) > 3000:
27 | with open('shell_output.txt', 'w') as file:
28 | file.write(reply)
29 | with open('shell_output.txt', 'rb') as doc:
30 | context.bot.send_document(
31 | document=doc,
32 | filename=doc.name,
33 | reply_to_message_id=message.message_id,
34 | chat_id=message.chat_id)
35 | elif len(reply) != 0:
36 | message.reply_text(reply, parse_mode='Markdown')
37 | else:
38 | message.reply_text('No Reply', parse_mode='Markdown')
39 |
40 |
41 | SHELL_HANDLER = CommandHandler(BotCommands.ShellCommand, shell,
42 | filters=CustomFilters.owner_filter, run_async=True)
43 | dispatcher.add_handler(SHELL_HANDLER)
44 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/mega_download_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus, get_readable_time, EngineStatus
2 |
3 |
4 | class MegaDownloadStatus:
5 |
6 | def __init__(self, obj, listener):
7 | self.__listener = listener
8 | self.__obj = obj
9 | self.message = listener.message
10 |
11 | def name(self) -> str:
12 | return self.__obj.name
13 |
14 | def progress_raw(self):
15 | try:
16 | return round(self.processed_bytes() / self.__obj.size * 100,2)
17 | except:
18 | return 0.0
19 |
20 | def progress(self):
21 | """Progress of download in percentage"""
22 | return f"{self.progress_raw()}%"
23 |
24 | def status(self) -> str:
25 | return MirrorStatus.STATUS_DOWNLOADING
26 |
27 | def processed_bytes(self):
28 | return self.__obj.downloaded_bytes
29 |
30 | def eta(self):
31 | try:
32 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
33 | return f'{get_readable_time(seconds)}'
34 | except ZeroDivisionError:
35 | return '-'
36 |
37 | def size_raw(self):
38 | return self.__obj.size
39 |
40 | def size(self) -> str:
41 | return get_readable_file_size(self.size_raw())
42 |
43 | def downloaded(self) -> str:
44 | return get_readable_file_size(self.__obj.downloadedBytes)
45 |
46 | def speed_raw(self):
47 | return self.__obj.speed
48 |
49 | def speed(self) -> str:
50 | return f'{get_readable_file_size(self.speed_raw())}/s'
51 |
52 | def gid(self) -> str:
53 | return self.__obj.gid
54 |
55 | def download(self):
56 | return self.__obj
57 |
58 | def eng(self):
59 | return EngineStatus.STATUS_MEGA
60 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/youtube_dl_download_status.py:
--------------------------------------------------------------------------------
1 | from bot import DOWNLOAD_DIR
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time, EngineStatus
3 | from bot.helper.ext_utils.fs_utils import get_path_size
4 |
5 | class YoutubeDLDownloadStatus:
6 | def __init__(self, obj, listener, gid):
7 | self.__obj = obj
8 | self.__uid = listener.uid
9 | self.__gid = gid
10 | self.message = listener.message
11 |
12 | def gid(self):
13 | return self.__gid
14 |
15 | def processed_bytes(self):
16 | if self.__obj.downloaded_bytes != 0:
17 | return self.__obj.downloaded_bytes
18 | else:
19 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}")
20 |
21 | def size_raw(self):
22 | return self.__obj.size
23 |
24 | def size(self):
25 | return get_readable_file_size(self.size_raw())
26 |
27 | def status(self):
28 | return MirrorStatus.STATUS_DOWNLOADING
29 |
30 | def name(self):
31 | return self.__obj.name
32 |
33 | def progress_raw(self):
34 | return self.__obj.progress
35 |
36 | def progress(self):
37 | return f'{round(self.progress_raw(), 2)}%'
38 |
39 | def speed_raw(self):
40 | """
41 | :return: Download speed in Bytes/Seconds
42 | """
43 | return self.__obj.download_speed
44 |
45 | def speed(self):
46 | return f'{get_readable_file_size(self.speed_raw())}/s'
47 |
48 | def eta(self):
49 | try:
50 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
51 | return f'{get_readable_time(seconds)}'
52 | except:
53 | return '-'
54 |
55 | def download(self):
56 | return self.__obj
57 |
58 | def eng(self):
59 | return EngineStatus.STATUS_YT
--------------------------------------------------------------------------------
/bot/modules/count.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 |
3 | from bot import dispatcher
4 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
5 | from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage
6 | from bot.helper.telegram_helper.filters import CustomFilters
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.ext_utils.bot_utils import is_gdrive_link, new_thread
9 |
10 | @new_thread
11 | def countNode(update, context):
12 | reply_to = update.message.reply_to_message
13 | link = ''
14 | if len(context.args) == 1:
15 | link = context.args[0]
16 | if update.message.from_user.username:
17 | tag = f"@{update.message.from_user.username}"
18 | else:
19 | tag = update.message.from_user.mention_html(update.message.from_user.first_name)
20 | if reply_to:
21 | if len(link) == 0:
22 | link = reply_to.text.split(maxsplit=1)[0].strip()
23 | if reply_to.from_user.username:
24 | tag = f"@{reply_to.from_user.username}"
25 | else:
26 | tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
27 | if is_gdrive_link(link):
28 | msg = sendMessage(f"Counting: {link}", context.bot, update.message)
29 | gd = GoogleDriveHelper()
30 | result = gd.count(link)
31 | deleteMessage(context.bot, msg)
32 | cc = f'\n\ncc: {tag}'
33 | sendMessage(result + cc, context.bot, update.message)
34 | else:
35 | sendMessage('Send Gdrive link along with command or by replying to the link by command', context.bot, update.message)
36 |
37 | count_handler = CommandHandler(BotCommands.CountCommand, countNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
38 | dispatcher.add_handler(count_handler)
39 |
--------------------------------------------------------------------------------
/driveid.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | print("\n\n"\
4 | " Bot can search files recursively, but you have to add the list of drives you want to search.\n"\
5 | " Use the following format: (You can use 'root' in the ID in case you wan to use main drive.)\n"\
6 | " teamdrive NAME --> anything that you likes\n"\
7 | " teamdrive ID --> id of teamdrives in which you likes to search ('root' for main drive)\n"\
8 | " teamdrive INDEX URL --> enter index url for this drive.\n" \
9 | " go to the respective drive and copy the url from address bar\n")
10 | msg = ''
11 | if os.path.exists('drive_folder'):
12 | with open('drive_folder', 'r+') as f:
13 | lines = f.read()
14 | if not re.match(r'^\s*$', lines):
15 | print(lines)
16 | print("\n\n"\
17 | " DO YOU WISH TO KEEP THE ABOVE DETAILS THAT YOU PREVIOUSLY ADDED???? ENTER (y/n)\n"\
18 | " IF NOTHING SHOWS ENTER n")
19 | while 1:
20 | choice = input()
21 | if choice in ['y', 'Y']:
22 | msg = f'{lines}'
23 | break
24 | elif choice in ['n', 'N']:
25 | break
26 | else:
27 | print("\n\n DO YOU WISH TO KEEP THE ABOVE DETAILS ???? y/n <=== this is option ..... OPEN YOUR EYES & READ...")
28 | num = int(input(" How Many Drive/Folder You Likes To Add : "))
29 | for count in range(1, num + 1):
30 | print(f"\n > DRIVE - {count}\n")
31 | name = input(" Enter Drive NAME (anything) : ")
32 | id = input(" Enter Drive ID : ")
33 | index = input(" Enter Drive INDEX URL (optional) : ")
34 | if not name or not id:
35 | print("\n\n ERROR : Dont leave the name/id without filling.")
36 | exit(1)
37 | name=name.replace(" ", "_")
38 | if index:
39 | if index[-1] == "/":
40 | index = index[:-1]
41 | else:
42 | index = ''
43 | msg += f"{name} {id} {index}\n"
44 | with open('drive_folder', 'w') as file:
45 | file.truncate(0)
46 | file.write(msg)
47 | print("\n\n Done!")
48 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/zip_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from bot import DOWNLOAD_DIR, LOGGER
4 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus, get_readable_time, EngineStatus
5 | from bot.helper.ext_utils.fs_utils import get_path_size
6 |
7 |
8 | class ZipStatus:
9 | def __init__(self, name, size, gid, listener):
10 | self.__name = name
11 | self.__size = size
12 | self.__gid = gid
13 | self.__listener = listener
14 | self.__uid = listener.uid
15 | self.__start_time = time()
16 | self.message = listener.message
17 |
18 | def gid(self):
19 | return self.__gid
20 |
21 | def speed_raw(self):
22 | return self.processed_bytes() / (time() - self.__start_time)
23 |
24 | def progress_raw(self):
25 | try:
26 | return self.processed_bytes() / self.__size * 100
27 | except:
28 | return 0
29 |
30 | def progress(self):
31 | return f'{round(self.progress_raw(), 2)}%'
32 |
33 | def speed(self):
34 | return f'{get_readable_file_size(self.speed_raw())}/s'
35 |
36 | def name(self):
37 | return self.__name
38 |
39 | def size_raw(self):
40 | return self.__size
41 |
42 | def size(self):
43 | return get_readable_file_size(self.__size)
44 |
45 | def eta(self):
46 | try:
47 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
48 | return f'{get_readable_time(seconds)}'
49 | except:
50 | return '-'
51 |
52 | def status(self):
53 | return MirrorStatus.STATUS_ARCHIVING
54 |
55 | def processed_bytes(self):
56 | if self.__listener.newDir:
57 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}10000")
58 | else:
59 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
60 |
61 | def download(self):
62 | return self
63 |
64 | def cancel_download(self):
65 | LOGGER.info(f'Cancelling Archive: {self.__name}')
66 | if self.__listener.suproc is not None:
67 | self.__listener.suproc.kill()
68 | self.__listener.onUploadError('archiving stopped by user!')
69 |
70 | def eng(self):
71 | return EngineStatus.STATUS_ZIP
--------------------------------------------------------------------------------
/update.py:
--------------------------------------------------------------------------------
1 | from logging import FileHandler, StreamHandler, INFO, basicConfig, error as log_error, info as log_info
2 | from os import path as ospath, environ
3 | from subprocess import run as srun
4 | from requests import get as rget
5 | from dotenv import load_dotenv
6 |
7 | if ospath.exists('log.txt'):
8 | with open('log.txt', 'r+') as f:
9 | f.truncate(0)
10 |
11 | basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
12 | handlers=[FileHandler('log.txt'), StreamHandler()],
13 | level=INFO)
14 |
15 | CONFIG_FILE_URL = environ.get('CONFIG_FILE_URL')
16 | try:
17 | if len(CONFIG_FILE_URL) == 0:
18 | raise TypeError
19 | try:
20 | res = rget(CONFIG_FILE_URL)
21 | if res.status_code == 200:
22 | with open('config.env', 'wb+') as f:
23 | f.write(res.content)
24 | else:
25 | log_error(f"Failed to download config.env {res.status_code}")
26 | except Exception as e:
27 | log_error(f"CONFIG_FILE_URL: {e}")
28 | except:
29 | pass
30 |
31 | load_dotenv('config.env', override=True)
32 |
33 | UPSTREAM_REPO = environ.get('UPSTREAM_REPO')
34 | UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH')
35 | try:
36 | if len(UPSTREAM_REPO) == 0:
37 | raise TypeError
38 | except:
39 | UPSTREAM_REPO = None
40 | try:
41 | if len(UPSTREAM_BRANCH) == 0:
42 | raise TypeError
43 | except:
44 | UPSTREAM_BRANCH = 'master'
45 |
46 | if UPSTREAM_REPO is not None:
47 | if ospath.exists('.git'):
48 | srun(["rm", "-rf", ".git"])
49 |
50 | update = srun([f"git init -q \
51 | && git config --global user.email arshtwitterbot@gmail.com \
52 | && git config --global user.name helios \
53 | && git add . \
54 | && git commit -sm update -q \
55 | && git remote add origin {UPSTREAM_REPO} \
56 | && git fetch origin -q \
57 | && git reset --hard origin/{UPSTREAM_BRANCH} -q"], shell=True)
58 |
59 | if update.returncode == 0:
60 | log_info('Successfully updated with latest commit from UPSTREAM_REPO')
61 | else:
62 | log_error('Something went wrong while updating, check UPSTREAM_REPO if valid or not!')
63 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/extract_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from bot import DOWNLOAD_DIR, LOGGER
4 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus, EngineStatus, get_readable_time
5 | from bot.helper.ext_utils.fs_utils import get_path_size
6 |
7 |
8 | class ExtractStatus:
9 | def __init__(self, name, size, gid, listener):
10 | self.__name = name
11 | self.__size = size
12 | self.__gid = gid
13 | self.__listener = listener
14 | self.__uid = listener.uid
15 | self.__start_time = time()
16 | self.message = listener.message
17 |
18 | def gid(self):
19 | return self.__gid
20 |
21 | def speed_raw(self):
22 | return self.processed_bytes() / (time() - self.__start_time)
23 |
24 | def progress_raw(self):
25 | try:
26 | return self.processed_bytes() / self.__size * 100
27 | except:
28 | return 0
29 |
30 | def progress(self):
31 | return f'{round(self.progress_raw(), 2)}%'
32 |
33 | def speed(self):
34 | return f'{get_readable_file_size(self.speed_raw())}/s'
35 |
36 | def name(self):
37 | return self.__name
38 |
39 | def size_raw(self):
40 | return self.__size
41 |
42 | def size(self):
43 | return get_readable_file_size(self.__size)
44 |
45 | def eta(self):
46 | try:
47 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
48 | return f'{get_readable_time(seconds)}'
49 | except:
50 | return '-'
51 |
52 | def status(self):
53 | return MirrorStatus.STATUS_EXTRACTING
54 |
55 | def processed_bytes(self):
56 | if self.__listener.newDir:
57 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}10000")
58 | else:
59 | return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
60 |
61 | def download(self):
62 | return self
63 |
64 | def cancel_download(self):
65 | LOGGER.info(f'Cancelling Extract: {self.__name}')
66 | if self.__listener.suproc is not None:
67 | self.__listener.suproc.kill()
68 | self.__listener.onUploadError('extracting stopped by user!')
69 |
70 | def eng(self):
71 | return EngineStatus.STATUS_EXT
72 |
--------------------------------------------------------------------------------
/bot/modules/list.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | from telegram.ext import CommandHandler, CallbackQueryHandler
3 |
4 | from bot import LOGGER, dispatcher
5 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
6 | from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, sendMarkup, sendFile, deleteMessage
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper import button_build
10 |
11 | def list_buttons(update, context):
12 | user_id = update.message.from_user.id
13 | if len(context.args) == 0:
14 | return sendMessage('Send a search key along with command', context.bot, update.message)
15 | buttons = button_build.ButtonMaker()
16 | buttons.sbutton("Folders", f"types {user_id} folders")
17 | buttons.sbutton("Files", f"types {user_id} files")
18 | buttons.sbutton("Both", f"types {user_id} both")
19 | buttons.sbutton("Cancel", f"types {user_id} cancel")
20 | button = buttons.build_menu(2)
21 | sendMarkup('Choose option to list.', context.bot, update.message, button)
22 |
23 | def select_type(update, context):
24 | query = update.callback_query
25 | user_id = query.from_user.id
26 | msg = query.message
27 | key = msg.reply_to_message.text.split(" ", maxsplit=1)[1]
28 | data = query.data
29 | data = data.split()
30 | if user_id != int(data[1]):
31 | return query.answer(text="Not Yours!", show_alert=True)
32 | elif data[2] == 'cancel':
33 | query.answer()
34 | return editMessage("list has been canceled!", msg)
35 | query.answer()
36 | item_type = data[2]
37 | editMessage(f"Searching for {key}", msg)
38 | Thread(target=_list_drive, args=(context.bot, key, msg, item_type)).start()
39 |
40 | def _list_drive(bot, key, bmsg, item_type):
41 | LOGGER.info(f"listing: {key}")
42 | gdrive = GoogleDriveHelper()
43 | msg, button = gdrive.drive_list(key, isRecursive=True, itemType=item_type)
44 | if button:
45 | editMessage(msg, bmsg, button)
46 | else:
47 | editMessage(f'No result found for {key}', bmsg)
48 |
49 | list_handler = CommandHandler(BotCommands.ListCommand, list_buttons, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
50 | list_type_handler = CallbackQueryHandler(select_type, pattern="types", run_async=True)
51 | dispatcher.add_handler(list_handler)
52 | dispatcher.add_handler(list_type_handler)
53 |
--------------------------------------------------------------------------------
/bot/modules/mirror_status.py:
--------------------------------------------------------------------------------
1 | from psutil import cpu_percent, virtual_memory, disk_usage
2 | from time import time
3 | from threading import Thread
4 | from telegram.ext import CommandHandler, CallbackQueryHandler
5 |
6 | from bot import dispatcher, status_reply_dict, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, DOWNLOAD_DIR, Interval, DOWNLOAD_STATUS_UPDATE_INTERVAL
7 | from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, auto_delete_message, sendStatusMessage, update_all_messages
8 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time, turn, setInterval
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 |
12 |
13 | def mirror_status(update, context):
14 | with download_dict_lock:
15 | count = len(download_dict)
16 | if count == 0:
17 | currentTime = get_readable_time(time() - botStartTime)
18 | free = get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)
19 | message = 'No Active Downloads !\n___________________________'
20 | message += f"\nCPU: {cpu_percent()}% | FREE: {free}" \
21 | f"\nRAM: {virtual_memory().percent}% | UPTIME: {currentTime}"
22 | reply_message = sendMessage(message, context.bot, update.message)
23 | Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
24 | else:
25 | index = update.effective_chat.id
26 | with status_reply_dict_lock:
27 | if index in status_reply_dict:
28 | deleteMessage(context.bot, status_reply_dict[index][0])
29 | del status_reply_dict[index]
30 | try:
31 | if Interval:
32 | Interval[0].cancel()
33 | Interval.clear()
34 | except:
35 | pass
36 | finally:
37 | Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
38 | sendStatusMessage(update.message, context.bot)
39 | deleteMessage(context.bot, update.message)
40 |
41 | def status_pages(update, context):
42 | query = update.callback_query
43 | data = query.data
44 | data = data.split()
45 | query.answer()
46 | done = turn(data)
47 | if done:
48 | update_all_messages(True)
49 | else:
50 | query.message.delete()
51 |
52 |
53 | mirror_status_handler = CommandHandler(BotCommands.StatusCommand, mirror_status,
54 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
55 |
56 | status_pages_handler = CallbackQueryHandler(status_pages, pattern="status", run_async=True)
57 | dispatcher.add_handler(mirror_status_handler)
58 | dispatcher.add_handler(status_pages_handler)
59 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/gd_downloader.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 |
4 | from bot import download_dict, download_dict_lock, LOGGER, STOP_DUPLICATE, ZIP_UNZIP_LIMIT, STORAGE_THRESHOLD, TORRENT_DIRECT_LIMIT
5 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
6 | from bot.helper.mirror_utils.status_utils.gd_download_status import GdDownloadStatus
7 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage, sendMarkup
8 | from bot.helper.ext_utils.fs_utils import get_base_name, check_storage_threshold
9 | from bot.helper.ext_utils.bot_utils import get_readable_file_size
10 |
11 |
12 | def add_gd_download(link, path, listener, newname):
13 | res, size, name, files = GoogleDriveHelper().helper(link)
14 | if res != "":
15 | return sendMessage(res, listener.bot, listener.message)
16 | if newname:
17 | name = newname
18 | if STOP_DUPLICATE and not listener.isLeech:
19 | LOGGER.info('Checking File/Folder if already in Drive...')
20 | if listener.isZip:
21 | gname = f"{name}.zip"
22 | elif listener.extract:
23 | try:
24 | gname = get_base_name(name)
25 | except:
26 | gname = None
27 | if gname is not None:
28 | gmsg, button = GoogleDriveHelper().drive_list(gname, True)
29 | if gmsg:
30 | msg = "File/Folder is already available in Drive.\nHere are the search results:"
31 | return sendMarkup(msg, listener.bot, listener.message, button)
32 | if any([ZIP_UNZIP_LIMIT, STORAGE_THRESHOLD, TORRENT_DIRECT_LIMIT]):
33 | arch = any([listener.extract, listener.isZip])
34 | limit = None
35 | if STORAGE_THRESHOLD is not None:
36 | acpt = check_storage_threshold(size, arch)
37 | if not acpt:
38 | msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
39 | msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
40 | return sendMessage(msg, listener.bot, listener.message)
41 | if ZIP_UNZIP_LIMIT is not None and arch:
42 | mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
43 | limit = ZIP_UNZIP_LIMIT
44 | elif TORRENT_DIRECT_LIMIT is not None:
45 | mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}GB'
46 | limit = TORRENT_DIRECT_LIMIT
47 | if limit is not None:
48 | LOGGER.info('Checking File/Folder Size...')
49 | if size > limit * 1024**3:
50 | msg = f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}.'
51 | return sendMessage(msg, listener.bot, listener.message)
52 | LOGGER.info(f"Download Name: {name}")
53 | drive = GoogleDriveHelper(name, path, size, listener)
54 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
55 | download_status = GdDownloadStatus(drive, size, listener, gid)
56 | with download_dict_lock:
57 | download_dict[listener.uid] = download_status
58 | listener.onDownloadStart()
59 | sendStatusMessage(listener.message, listener.bot)
60 | drive.download(link)
61 |
--------------------------------------------------------------------------------
/add_to_team_drive.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | from google.oauth2.service_account import Credentials
3 | import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time
4 | from google_auth_oauthlib.flow import InstalledAppFlow
5 | from google.auth.transport.requests import Request
6 | import os, pickle
7 |
8 | stt = time.time()
9 |
10 | parse = argparse.ArgumentParser(
11 | description='A tool to add service accounts to a shared drive from a folder containing credential files.')
12 | parse.add_argument('--path', '-p', default='accounts',
13 | help='Specify an alternative path to the service accounts folder.')
14 | parse.add_argument('--credentials', '-c', default='./credentials.json',
15 | help='Specify the relative path for the credentials file.')
16 | parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.')
17 | parsereq = parse.add_argument_group('required arguments')
18 | parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True)
19 |
20 | args = parse.parse_args()
21 | acc_dir = args.path
22 | did = args.drive_id
23 | credentials = glob.glob(args.credentials)
24 |
25 | try:
26 | open(credentials[0], 'r')
27 | print('>> Found credentials.')
28 | except IndexError:
29 | print('>> No credentials found.')
30 | sys.exit(0)
31 |
32 | if not args.yes:
33 | # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
34 | # credentials[0],'r').read()))['installed']['client_id'])
35 | input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
36 | '(shared drive) as Manager\n>> (Press any key to continue)')
37 |
38 | creds = None
39 | if os.path.exists('token_sa.pickle'):
40 | with open('token_sa.pickle', 'rb') as token:
41 | creds = pickle.load(token)
42 | # If there are no (valid) credentials available, let the user log in.
43 | if not creds or not creds.valid:
44 | if creds and creds.expired and creds.refresh_token:
45 | creds.refresh(Request())
46 | else:
47 | flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
48 | 'https://www.googleapis.com/auth/admin.directory.group',
49 | 'https://www.googleapis.com/auth/admin.directory.group.member'
50 | ])
51 | # creds = flow.run_local_server(port=0)
52 | creds = flow.run_console()
53 | # Save the credentials for the next run
54 | with open('token_sa.pickle', 'wb') as token:
55 | pickle.dump(creds, token)
56 |
57 | drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
58 | batch = drive.new_batch_http_request()
59 |
60 | aa = glob.glob('%s/*.json' % acc_dir)
61 | pbar = progress.bar.Bar("Readying accounts", max=len(aa))
62 | for i in aa:
63 | ce = json.loads(open(i, 'r').read())['client_email']
64 | batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
65 | "role": "organizer",
66 | "type": "user",
67 | "emailAddress": ce
68 | }))
69 | pbar.next()
70 | pbar.finish()
71 | print('Adding...')
72 | batch.execute()
73 |
74 | print('Complete.')
75 | hours, rem = divmod((time.time() - stt), 3600)
76 | minutes, sec = divmod(rem, 60)
77 | print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
78 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/telegraph_helper.py:
--------------------------------------------------------------------------------
1 | # Implement By - @VarnaX-279
2 |
3 | from string import ascii_letters
4 | from random import SystemRandom
5 |
6 | from time import sleep
7 | from telegraph import Telegraph
8 | from telegraph.exceptions import RetryAfterError
9 |
10 | from bot import LOGGER, AUTHOR_NAME, AUTHOR_URL
11 |
12 |
13 | class TelegraphHelper:
14 | def __init__(self, author_name=None, author_url=None):
15 | self.telegraph = Telegraph()
16 | self.short_name = ''.join(SystemRandom().choices(ascii_letters, k=8))
17 | self.access_token = None
18 | self.author_name = author_name
19 | self.author_url = author_url
20 | self.create_account()
21 |
22 | def create_account(self):
23 | self.telegraph.create_account(
24 | short_name=self.short_name,
25 | author_name=self.author_name,
26 | author_url=self.author_url
27 | )
28 | self.access_token = self.telegraph.get_access_token()
29 | LOGGER.info("Creating Telegraph Account")
30 |
31 | def create_page(self, title, content):
32 | try:
33 | return self.telegraph.create_page(
34 | title = title,
35 | author_name=self.author_name,
36 | author_url=self.author_url,
37 | html_content=content
38 | )
39 | except RetryAfterError as st:
40 | LOGGER.warning(f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.')
41 | sleep(st.retry_after)
42 | return self.create_page(title, content)
43 |
44 | def edit_page(self, path, title, content):
45 | try:
46 | return self.telegraph.edit_page(
47 | path = path,
48 | title = title,
49 | author_name=self.author_name,
50 | author_url=self.author_url,
51 | html_content=content
52 | )
53 | except RetryAfterError as st:
54 | LOGGER.warning(f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.')
55 | sleep(st.retry_after)
56 | return self.edit_page(path, title, content)
57 |
58 | def edit_telegraph(self, path, telegraph_content):
59 | nxt_page = 1
60 | prev_page = 0
61 | num_of_path = len(path)
62 | for content in telegraph_content :
63 | if nxt_page == 1 :
64 | content += f'Next'
65 | nxt_page += 1
66 | else :
67 | if prev_page <= num_of_path:
68 | content += f'Prev'
69 | prev_page += 1
70 | if nxt_page < num_of_path:
71 | content += f' | Next'
72 | nxt_page += 1
73 | self.edit_page(
74 | path = path[prev_page],
75 | title = 'Helios Mirror Torrent Search',
76 | content=content
77 | )
78 | return
79 |
80 | try:
81 | telegraph=TelegraphHelper(f'{AUTHOR_NAME}', f'{AUTHOR_URL}')
82 | except Exception as err:
83 | LOGGER.warning(f"Can't Create Telegraph Account: {err}")
84 | telegraph = None
85 | pass
86 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/qbit_download_status.py:
--------------------------------------------------------------------------------
1 | from bot import LOGGER
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time, EngineStatus
3 |
4 | def get_download(client, hash_):
5 | try:
6 | return client.torrents_info(torrent_hashes=hash_)[0]
7 | except Exception as e:
8 | LOGGER.error(f'{e}: while getting torrent info')
9 |
10 |
11 | class QbDownloadStatus:
12 |
13 | def __init__(self, listener, obj):
14 | self.__obj = obj
15 | self.__listener = listener
16 | self.__uid = listener.uid
17 | self.__info = get_download(obj.client, obj.ext_hash)
18 | self.message = listener.message
19 |
20 | def __update(self):
21 | self.__info = get_download(self.__obj.client, self.__obj.ext_hash)
22 |
23 | def progress(self):
24 | """
25 | Calculates the progress of the mirror (upload or download)
26 | :return: returns progress in percentage
27 | """
28 | return f'{round(self.__info.progress*100, 2)}%'
29 |
30 | def size_raw(self):
31 | """
32 | Gets total size of the mirror file/folder
33 | :return: total size of mirror
34 | """
35 | return self.__info.size
36 |
37 | def processed_bytes(self):
38 | return self.__info.downloaded
39 |
40 | def speed(self):
41 | self.__update()
42 | return f"{get_readable_file_size(self.__info.dlspeed)}/s"
43 |
44 | def name(self):
45 | self.__update()
46 | if self.__info.state in ["metaDL", "checkingResumeData"]:
47 | return f"[METADATA]{self.__info.name}"
48 | else:
49 | return self.__info.name
50 |
51 | def size(self):
52 | return get_readable_file_size(self.__info.size)
53 |
54 | def eta(self):
55 | return get_readable_time(self.__info.eta)
56 |
57 | def status(self):
58 | download = self.__info.state
59 | if download in ["queuedDL", "queuedUP"]:
60 | return MirrorStatus.STATUS_WAITING
61 | elif download in ["pausedDL", "pausedUP"]:
62 | return MirrorStatus.STATUS_PAUSED
63 | elif download in ["checkingUP", "checkingDL"]:
64 | return MirrorStatus.STATUS_CHECKING
65 | elif download in ["stalledUP", "uploading"] and self.__obj.is_seeding:
66 | return MirrorStatus.STATUS_SEEDING
67 | else:
68 | return MirrorStatus.STATUS_DOWNLOADING
69 |
70 | def seeders_num(self):
71 | return self.__info.num_seeds
72 |
73 | def leechers_num(self):
74 | return self.__info.num_leechs
75 |
76 | def uploaded_bytes(self):
77 | return f"{get_readable_file_size(self.__info.uploaded)}"
78 |
79 | def upload_speed(self):
80 | return f"{get_readable_file_size(self.__info.upspeed)}/s"
81 |
82 | def ratio(self):
83 | return f"{round(self.__info.ratio, 3)}"
84 |
85 | def seeding_time(self):
86 | return f"{get_readable_time(self.__info.seeding_time)}"
87 |
88 | def download(self):
89 | return self.__obj
90 |
91 | def gid(self):
92 | return self.__obj.ext_hash[:12]
93 |
94 | def client(self):
95 | return self.__obj.client
96 |
97 | def listener(self):
98 | return self.__listener
99 |
100 | def eng(self):
101 | return EngineStatus.STATUS_QB
102 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/bot_commands.py:
--------------------------------------------------------------------------------
1 | from bot import CMD_INDEX
2 | import os
3 | def getCommand(name: str, command: str):
4 | try:
5 | if len(os.environ[name]) == 0:
6 | raise KeyError
7 | return os.environ[name]
8 | except KeyError:
9 | return command
10 |
11 |
12 | class _BotCommands:
13 | def __init__(self):
14 | self.StartCommand = getCommand(f'START_CMD', f'start{CMD_INDEX}')
15 | self.MirrorCommand = getCommand('MIRROR_CMD', f'mirror{CMD_INDEX}')
16 | self.UnzipMirrorCommand = getCommand('UNZIP_CMD', f'unzipmirror{CMD_INDEX}')
17 | self.ZipMirrorCommand = getCommand('ZIP_CMD', f'zipmirror{CMD_INDEX}')
18 | self.QbMirrorCommand = getCommand('QBMIRROR_CMD', f'qbmirror{CMD_INDEX}')
19 | self.QbUnzipMirrorCommand = getCommand('QBUNZIP_CMD', f'qbunzipmirror{CMD_INDEX}')
20 | self.QbZipMirrorCommand = getCommand('QBZIP_CMD', f'qbzipmirror{CMD_INDEX}')
21 | self.YtdlCommand = getCommand('YTDL_CMD', f'ytdl{CMD_INDEX}')
22 | self.YtdlZipCommand = getCommand('YTDLZIP_CMD', f'ytdlzip{CMD_INDEX}')
23 | self.LeechCommand = getCommand('LEECH_CMD', f'leech{CMD_INDEX}')
24 | self.UnzipLeechCommand = getCommand('UNZIPLEECH_CMD', f'unzipleech{CMD_INDEX}')
25 | self.ZipLeechCommand = getCommand('ZIPLEECH_CMD', f'zipleech{CMD_INDEX}')
26 | self.QbLeechCommand = getCommand('QBLEECH_CMD', f'qbleech{CMD_INDEX}')
27 | self.QbUnzipLeechCommand = getCommand('QBZIPLEECH_CMD', f'qbunzipleech{CMD_INDEX}')
28 | self.QbZipLeechCommand = getCommand('QBUNZIPLEECH_CMD', f'qbzipleech{CMD_INDEX}')
29 | self.YtdlLeechCommand =getCommand('YTDLLEECH_CMD', f'ytdlleech{CMD_INDEX}')
30 | self.YtdlZipLeechCommand = getCommand('YTDLZIPLEECH_CMD', f'ytdlzipleech{CMD_INDEX}')
31 | self.CloneCommand = getCommand('CLONE_CMD', f'clone{CMD_INDEX}')
32 | self.CountCommand = getCommand('COUNT_CMD', f'count{CMD_INDEX}')
33 | self.DeleteCommand = getCommand('DELETE_CMD', f'del{CMD_INDEX}')
34 | self.CancelMirror = getCommand('CANCEL_CMD', f'cancel{CMD_INDEX}')
35 | self.CancelAllCommand = getCommand('CANCEL_ALL_CMD', f'cancelall{CMD_INDEX}')
36 | self.ListCommand = getCommand('LIST_CMD', f'list{CMD_INDEX}')
37 | self.SearchCommand = getCommand('SEARCH_CMD', f'search{CMD_INDEX}')
38 | self.StatusCommand = getCommand('STATUS_CMD', f'status{CMD_INDEX}')
39 | self.AuthorizedUsersCommand = f'users{CMD_INDEX}'
40 | self.AuthorizeCommand = f'authorize{CMD_INDEX}'
41 | self.UnAuthorizeCommand = f'unauthorize{CMD_INDEX}'
42 | self.AddSudoCommand = f'addsudo{CMD_INDEX}'
43 | self.RmSudoCommand = f'rmsudo{CMD_INDEX}'
44 | self.PingCommand = f'ping{CMD_INDEX}'
45 | self.RestartCommand = f'restart{CMD_INDEX}'
46 | self.StatsCommand = f'stats{CMD_INDEX}'
47 | self.HelpCommand = f'help{CMD_INDEX}'
48 | self.LogCommand = f'log{CMD_INDEX}'
49 | self.ShellCommand = f'shell{CMD_INDEX}'
50 | self.EvalCommand = f'eval{CMD_INDEX}'
51 | self.ExecCommand = f'exec{CMD_INDEX}'
52 | self.ClearLocalsCommand = f'clearlocals{CMD_INDEX}'
53 | self.LeechSetCommand = f'leechset{CMD_INDEX}'
54 | self.SetThumbCommand = f'setthumb{CMD_INDEX}'
55 | self.BtSelectCommand = f'btsel{CMD_INDEX}'
56 | self.RssListCommand = getCommand('RSSLIST_CMD', f'rsslist{CMD_INDEX}')
57 | self.RssGetCommand = getCommand('RSSGET_CMD', f'rssget{CMD_INDEX}')
58 | self.RssSubCommand = getCommand('RSSSUB_CMD', f'rsssub{CMD_INDEX}')
59 | self.RssUnSubCommand = getCommand('RSSUNSUB_CMD', f'rssunsub{CMD_INDEX}')
60 | self.RssSettingsCommand = getCommand('RSSSET_CMD', f'rssset{CMD_INDEX}')
61 | self.AddleechlogCommand = getCommand('ADDLEECHLOG_CMD', f'addleechlog{CMD_INDEX}')
62 | self.RmleechlogCommand = getCommand('RMLEECHLOG_CMD', f'rmleechlog{CMD_INDEX}')
63 | BotCommands = _BotCommands()
64 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/aria_download_status.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from bot import aria2, LOGGER
4 | from bot.helper.ext_utils.bot_utils import MirrorStatus, EngineStatus, get_readable_time
5 |
6 | def get_download(gid):
7 | try:
8 | return aria2.get_download(gid)
9 | except Exception as e:
10 | LOGGER.error(f'{e}: while getting torrent info')
11 |
12 |
13 | class AriaDownloadStatus:
14 |
15 | def __init__(self, gid, listener):
16 | self.__gid = gid
17 | self.__download = get_download(gid)
18 | self.__listener = listener
19 | self.start_time = 0
20 | self.message = listener.message
21 |
22 | def __update(self):
23 | self.__download = self.__download.live
24 | if self.__download.followed_by_ids:
25 | self.__gid = self.__download.followed_by_ids[0]
26 | self.__download = get_download(self.__gid)
27 |
28 | def progress(self):
29 | """
30 | Calculates the progress of the mirror (upload or download)
31 | :return: returns progress in percentage
32 | """
33 | return self.__download.progress_string()
34 |
35 | def size_raw(self):
36 | """
37 | Gets total size of the mirror file/folder
38 | :return: total size of mirror
39 | """
40 | return self.__download.total_length
41 |
42 | def processed_bytes(self):
43 | return self.__download.completed_length
44 |
45 | def speed(self):
46 | self.__update()
47 | return self.__download.download_speed_string()
48 |
49 | def name(self):
50 | self.__update()
51 | return self.__download.name
52 |
53 | def size(self):
54 | return self.__download.total_length_string()
55 |
56 | def eta(self):
57 | return self.__download.eta_string()
58 |
59 | def status(self):
60 | download = self.__download
61 | if download.is_waiting:
62 | return MirrorStatus.STATUS_WAITING
63 | elif download.is_paused:
64 | return MirrorStatus.STATUS_PAUSED
65 | elif download.seeder and hasattr(self.__listener, 'uploaded'):
66 | return MirrorStatus.STATUS_SEEDING
67 | else:
68 | return MirrorStatus.STATUS_DOWNLOADING
69 |
70 | def seeders_num(self):
71 | return self.__download.num_seeders
72 |
73 | def leechers_num(self):
74 | return self.__download.connections
75 |
76 | def uploaded_bytes(self):
77 | return self.__download.upload_length_string()
78 |
79 | def upload_speed(self):
80 | return self.__download.upload_speed_string()
81 |
82 | def ratio(self):
83 | return f"{round(self.__download.upload_length / self.__download.completed_length, 3)}"
84 |
85 | def seeding_time(self):
86 | return f"{get_readable_time(time() - self.start_time)}"
87 |
88 | def download(self):
89 | return self
90 |
91 | def listener(self):
92 | return self.__listener
93 |
94 | def gid(self):
95 | self.__update()
96 | return self.__gid
97 |
98 | def cancel_download(self):
99 | self.__update()
100 | if self.__download.seeder:
101 | LOGGER.info(f"Cancelling Seed: {self.name}")
102 | self.__listener.onUploadError(f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}")
103 | aria2.remove([self.__download], force=True, files=True)
104 | elif len(self.__download.followed_by_ids) != 0:
105 | LOGGER.info(f"Cancelling Download: {self.name()}")
106 | downloads = aria2.get_downloads(self.__download.followed_by_ids)
107 | self.__listener.onDownloadError('Download stopped by user!')
108 | aria2.remove(downloads, force=True, files=True)
109 | else:
110 | LOGGER.info(f"Cancelling Download: {self.name()}")
111 | self.__listener.onDownloadError('Download stopped by user!')
112 | aria2.remove([self.__download], force=True, files=True)
113 |
114 | def eng(self):
115 | return EngineStatus.STATUS_ARIA
116 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/html_helper.py:
--------------------------------------------------------------------------------
1 | hmtl_content = """
2 |
3 |
{gid} Not Found.", context.bot, update.message)
21 | return
22 | elif update.message.reply_to_message:
23 | mirror_message = update.message.reply_to_message
24 | with download_dict_lock:
25 | if mirror_message.message_id in download_dict:
26 | dl = download_dict[mirror_message.message_id]
27 | else:
28 | dl = None
29 | if not dl:
30 | sendMessage("This is not an active task!", context.bot, update.message)
31 | return
32 | elif len(context.args) == 0:
33 | msg = f"Reply to an active /{BotCommands.MirrorCommand} message which \
34 | was used to start the download or send /{BotCommands.CancelMirror} GID to cancel it!"
35 | sendMessage(msg, context.bot, update.message)
36 | return
37 |
38 | if OWNER_ID != user_id and dl.message.from_user.id != user_id and user_id not in SUDO_USERS:
39 | sendMessage("This task is not for you!", context.bot, update.message)
40 | return
41 |
42 | dl.download().cancel_download()
43 |
44 | def cancel_all(status):
45 | gid = ''
46 | while dl := getAllDownload(status):
47 | if dl.gid() != gid:
48 | gid = dl.gid()
49 | dl.download().cancel_download()
50 | sleep(1)
51 |
52 | def cancell_all_buttons(update, context):
53 | with download_dict_lock:
54 | count = len(download_dict)
55 | if count == 0:
56 | sendMessage("No active tasks!", context.bot, update.message)
57 | return
58 | buttons = button_build.ButtonMaker()
59 | buttons.sbutton("Downloading", f"canall {MirrorStatus.STATUS_DOWNLOADING}")
60 | buttons.sbutton("Uploading", f"canall {MirrorStatus.STATUS_UPLOADING}")
61 | buttons.sbutton("Seeding", f"canall {MirrorStatus.STATUS_SEEDING}")
62 | buttons.sbutton("Cloning", f"canall {MirrorStatus.STATUS_CLONING}")
63 | buttons.sbutton("Extracting", f"canall {MirrorStatus.STATUS_EXTRACTING}")
64 | buttons.sbutton("Archiving", f"canall {MirrorStatus.STATUS_ARCHIVING}")
65 | buttons.sbutton("Queued", f"canall {MirrorStatus.STATUS_WAITING}")
66 | buttons.sbutton("Paused", f"canall {MirrorStatus.STATUS_PAUSED}")
67 | buttons.sbutton("All", "canall all")
68 | if AUTO_DELETE_MESSAGE_DURATION == -1:
69 | buttons.sbutton("Close", "canall close")
70 | button = buttons.build_menu(2)
71 | can_msg = sendMarkup('Choose tasks to cancel.', context.bot, update.message, button)
72 | Thread(target=auto_delete_message, args=(context.bot, update.message, can_msg)).start()
73 |
74 | def cancel_all_update(update, context):
75 | query = update.callback_query
76 | user_id = query.from_user.id
77 | data = query.data
78 | data = data.split()
79 | if CustomFilters._owner_query(user_id):
80 | query.answer()
81 | query.message.delete()
82 | if data[1] == 'close':
83 | return
84 | cancel_all(data[1])
85 | else:
86 | query.answer(text="You don't have permission to use these buttons!", show_alert=True)
87 |
88 |
89 |
90 | cancel_mirror_handler = CommandHandler(BotCommands.CancelMirror, cancel_mirror,
91 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user), run_async=True)
92 |
93 | cancel_all_handler = CommandHandler(BotCommands.CancelAllCommand, cancell_all_buttons,
94 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
95 |
96 | cancel_all_buttons_handler = CallbackQueryHandler(cancel_all_update, pattern="canall", run_async=True)
97 |
98 | dispatcher.add_handler(cancel_all_handler)
99 | dispatcher.add_handler(cancel_mirror_handler)
100 | dispatcher.add_handler(cancel_all_buttons_handler)
101 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/direct_link_generator_license.md:
--------------------------------------------------------------------------------
1 | RAPHIELSCAPE PUBLIC LICENSE
2 | Version 1.c, June 2019
3 |
4 | Copyright (C) 2019 Raphielscape LLC.
5 | Copyright (C) 2019 Devscapes Open Source Holding GmbH.
6 |
7 | Everyone is permitted to copy and distribute verbatim or modified
8 | copies of this license document, and changing it is allowed as long
9 | as the name is changed.
10 |
11 | RAPHIELSCAPE PUBLIC LICENSE
12 | A-1. DEFINITIONS
13 |
14 | 0. “This License” refers to version 1.c of the Raphielscape Public License.
15 |
16 | 1. “Copyright” also means copyright-like laws that apply to other kinds of works.
17 |
18 | 2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”.
19 | “Licensees” and “recipients” may be individuals or organizations.
20 |
21 | 3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
22 | other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work
23 | or a work “based on” the earlier work.
24 |
25 | 4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it.
26 | “Object code” means any non-source form of a work.
27 |
28 | The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and
29 | (for an executable work) run the object code and to modify the work, including scripts to control those activities.
30 |
31 | The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
32 | The Corresponding Source for a work in source code form is that same work.
33 |
34 | 5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of
35 | the Corresponding Source.
36 |
37 | 6. "Owner" refers to any parties which is made the early form of the Corresponding Source.
38 |
39 | A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
40 |
41 | 0. You must give any other recipients of the Work or Derivative Works a copy of this License; and
42 |
43 | 1. You must cause any modified files to carry prominent notices stating that You changed the files; and
44 |
45 | 2. You must retain, in the Source form of any Derivative Works that You distribute,
46 | this license, all copyright, patent, trademark, authorships and attribution notices
47 | from the Source form of the Work; and
48 |
49 | 3. Respecting the author and owner of works that are distributed in any way.
50 |
51 | You may add Your own copyright statement to Your modifications and may provide
52 | additional or different license terms and conditions for use, reproduction,
53 | or distribution of Your modifications, or for any such Derivative Works as a whole,
54 | provided Your use, reproduction, and distribution of the Work otherwise complies
55 | with the conditions stated in this License.
56 |
57 | B. DISCLAIMER OF WARRANTY
58 |
59 | THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR
60 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
61 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS
62 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
63 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
64 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
65 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
66 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
67 |
68 |
69 | C. REVISED VERSION OF THIS LICENSE
70 |
71 | The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the
72 | Raphielscape Public License from time to time. Such new versions will be similar in spirit
73 | to the present version, but may differ in detail to address new problems or concerns.
74 |
75 | Each version is given a distinguishing version number. If the Program specifies that a
76 | certain numbered version of the Raphielscape Public License "or any later version" applies to it,
77 | you have the option of following the terms and conditions either of that numbered version or of
78 | any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a
79 | version number of the Raphielscape Public License, you may choose any version ever published
80 | by the Devscapes Open Source Holding GmbH.
81 |
82 | END OF LICENSE
--------------------------------------------------------------------------------
/bot/modules/bt_select.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler, CallbackQueryHandler
2 | from os import remove, path as ospath
3 |
4 | from bot import aria2, BASE_URL, download_dict, dispatcher, download_dict_lock, SUDO_USERS, OWNER_ID
5 | from bot.helper.telegram_helper.bot_commands import BotCommands
6 | from bot.helper.telegram_helper.filters import CustomFilters
7 | from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, sendStatusMessage
8 | from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, bt_selection_buttons
9 |
10 | def select(update, context):
11 | user_id = update.message.from_user.id
12 | if len(context.args) == 1:
13 | gid = context.args[0]
14 | dl = getDownloadByGid(gid)
15 | if not dl:
16 | sendMessage(f"GID: {gid} Not Found.", context.bot, update.message)
17 | return
18 | elif update.message.reply_to_message:
19 | mirror_message = update.message.reply_to_message
20 | with download_dict_lock:
21 | if mirror_message.message_id in download_dict:
22 | dl = download_dict[mirror_message.message_id]
23 | else:
24 | dl = None
25 | if not dl:
26 | sendMessage("This is not an active task!", context.bot, update.message)
27 | return
28 | elif len(context.args) == 0:
29 | msg = "Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n"
30 | msg += "This command mainly for selection incase you decided to select files from already added torrent. "
31 | msg += "But you can always use /cmd with arg `s` to select files before download start."
32 | sendMessage(msg, context.bot, update.message)
33 | return
34 |
35 | if OWNER_ID != user_id and dl.message.from_user.id != user_id and user_id not in SUDO_USERS:
36 | sendMessage("This task is not for you!", context.bot, update.message)
37 | return
38 | if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_WAITING]:
39 | sendMessage('Task should be in downloading status or in pause status incase message deleted by wrong or in queued status incase you used torrent file!', context.bot, update.message)
40 | return
41 | if dl.name().startswith('[METADATA]'):
42 | sendMessage('Try after downloading metadata finished!', context.bot, update.message)
43 | return
44 |
45 | try:
46 | if dl.listener().isQbit:
47 | id_ = dl.download().ext_hash
48 | client = dl.client()
49 | client.torrents_pause(torrent_hashes=id_)
50 | else:
51 | id_ = dl.gid()
52 | aria2.client.force_pause(id_)
53 | except:
54 | sendMessage("This is not a bittorrent task!", context.bot, update.message)
55 | return
56 |
57 | SBUTTONS = bt_selection_buttons(id_)
58 | msg = "Your download paused. Choose files then press Done Selecting button to resume downloading."
59 | sendMarkup(msg, context.bot, update.message, SBUTTONS)
60 |
61 | def get_confirm(update, context):
62 | query = update.callback_query
63 | user_id = query.from_user.id
64 | data = query.data
65 | data = data.split()
66 | dl = getDownloadByGid(data[2])
67 | if not dl:
68 | query.answer(text="This task has been cancelled!", show_alert=True)
69 | query.message.delete()
70 | return
71 | listener = dl.listener()
72 | if user_id != listener.message.from_user.id:
73 | query.answer(text="This task is not for you!", show_alert=True)
74 | elif data[1] == "pin":
75 | query.answer(text=data[3], show_alert=True)
76 | elif data[1] == "done":
77 | query.answer()
78 | id_ = data[3]
79 | if len(id_) > 20:
80 | client = dl.client()
81 | tor_info = client.torrents_info(torrent_hash=id_)[0]
82 | path = tor_info.content_path.rsplit('/', 1)[0]
83 | res = client.torrents_files(torrent_hash=id_)
84 | for f in res:
85 | if f.priority == 0:
86 | f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"]
87 | for f_path in f_paths:
88 | if ospath.exists(f_path):
89 | try:
90 | remove(f_path)
91 | except:
92 | pass
93 | client.torrents_resume(torrent_hashes=id_)
94 | else:
95 | res = aria2.client.get_files(id_)
96 | for f in res:
97 | if f['selected'] == 'false' and ospath.exists(f['path']):
98 | try:
99 | remove(f['path'])
100 | except:
101 | pass
102 | aria2.client.unpause(id_)
103 | sendStatusMessage(listener.message, listener.bot)
104 | query.message.delete()
105 |
106 |
107 | select_handler = CommandHandler(BotCommands.BtSelectCommand, select,
108 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user), run_async=True)
109 | bts_handler = CallbackQueryHandler(get_confirm, pattern="btsel", run_async=True)
110 | dispatcher.add_handler(select_handler)
111 | dispatcher.add_handler(bts_handler)
112 |
--------------------------------------------------------------------------------
/bot/modules/leech_settings.py:
--------------------------------------------------------------------------------
1 | from os import remove as osremove, path as ospath, mkdir
2 | from threading import Thread
3 | from PIL import Image
4 | from telegram.ext import CommandHandler, CallbackQueryHandler
5 |
6 | from bot import AS_DOC_USERS, AS_MEDIA_USERS, dispatcher, AS_DOCUMENT, AUTO_DELETE_MESSAGE_DURATION, DB_URI
7 | from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, editMessage, auto_delete_message
8 | from bot.helper.telegram_helper.filters import CustomFilters
9 | from bot.helper.telegram_helper.bot_commands import BotCommands
10 | from bot.helper.telegram_helper import button_build
11 | from bot.helper.ext_utils.db_handler import DbManger
12 |
13 |
14 | def getleechinfo(from_user):
15 | user_id = from_user.id
16 | name = from_user.full_name
17 | buttons = button_build.ButtonMaker()
18 | thumbpath = f"Thumbnails/{user_id}.jpg"
19 | if (
20 | user_id in AS_DOC_USERS
21 | or user_id not in AS_MEDIA_USERS
22 | and AS_DOCUMENT
23 | ):
24 | ltype = "DOCUMENT"
25 | buttons.sbutton("Send As Media", f"leechset {user_id} med")
26 | else:
27 | ltype = "MEDIA"
28 | buttons.sbutton("Send As Document", f"leechset {user_id} doc")
29 |
30 | if ospath.exists(thumbpath):
31 | thumbmsg = "Exists"
32 | buttons.sbutton("Delete Thumbnail", f"leechset {user_id} thumb")
33 | else:
34 | thumbmsg = "Not Exists"
35 |
36 | if AUTO_DELETE_MESSAGE_DURATION == -1:
37 | buttons.sbutton("Close", f"leechset {user_id} close")
38 |
39 | button = buttons.build_menu(1)
40 |
41 | text = f"Leech Settings for {name}\n"\
42 | f"Leech Type {ltype}\n"\
43 | f"Custom Thumbnail {thumbmsg}"
44 | return text, button
45 |
46 | def editLeechType(message, query):
47 | msg, button = getleechinfo(query.from_user)
48 | editMessage(msg, message, button)
49 |
50 | def leechSet(update, context):
51 | msg, button = getleechinfo(update.message.from_user)
52 | choose_msg = sendMarkup(msg, context.bot, update.message, button)
53 | Thread(target=auto_delete_message, args=(context.bot, update.message, choose_msg)).start()
54 |
55 | def setLeechType(update, context):
56 | query = update.callback_query
57 | message = query.message
58 | user_id = query.from_user.id
59 | data = query.data
60 | data = data.split()
61 | if user_id != int(data[1]):
62 | query.answer(text="Not Yours!", show_alert=True)
63 | elif data[2] == "doc":
64 | if user_id in AS_MEDIA_USERS:
65 | AS_MEDIA_USERS.remove(user_id)
66 | AS_DOC_USERS.add(user_id)
67 | if DB_URI is not None:
68 | DbManger().user_doc(user_id)
69 | query.answer(text="Your File Will Deliver As Document!", show_alert=True)
70 | editLeechType(message, query)
71 | elif data[2] == "med":
72 | if user_id in AS_DOC_USERS:
73 | AS_DOC_USERS.remove(user_id)
74 | AS_MEDIA_USERS.add(user_id)
75 | if DB_URI is not None:
76 | DbManger().user_media(user_id)
77 | query.answer(text="Your File Will Deliver As Media!", show_alert=True)
78 | editLeechType(message, query)
79 | elif data[2] == "thumb":
80 | path = f"Thumbnails/{user_id}.jpg"
81 | if ospath.lexists(path):
82 | osremove(path)
83 | if DB_URI is not None:
84 | DbManger().user_rm_thumb(user_id, path)
85 | query.answer(text="Thumbnail Removed!", show_alert=True)
86 | editLeechType(message, query)
87 | else:
88 | query.answer(text="Old Settings", show_alert=True)
89 | else:
90 | query.answer()
91 | try:
92 | query.message.delete()
93 | query.message.reply_to_message.delete()
94 | except:
95 | pass
96 |
97 | def setThumb(update, context):
98 | user_id = update.message.from_user.id
99 | reply_to = update.message.reply_to_message
100 | if reply_to is not None and reply_to.photo:
101 | path = "Thumbnails/"
102 | if not ospath.isdir(path):
103 | mkdir(path)
104 | photo_dir = reply_to.photo[-1].get_file().download()
105 | des_dir = ospath.join(path, f'{user_id}.jpg')
106 | Image.open(photo_dir).convert("RGB").save(des_dir, "JPEG")
107 | osremove(photo_dir)
108 | if DB_URI is not None:
109 | DbManger().user_save_thumb(user_id, des_dir)
110 | msg = f"Custom thumbnail saved for {update.message.from_user.mention_html(update.message.from_user.first_name)}."
111 | sendMessage(msg, context.bot, update.message)
112 | else:
113 | sendMessage("Reply to a photo to save custom thumbnail.", context.bot, update.message)
114 |
115 | leech_set_handler = CommandHandler(BotCommands.LeechSetCommand, leechSet, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
116 | set_thumbnail_handler = CommandHandler(BotCommands.SetThumbCommand, setThumb, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
117 | but_set_handler = CallbackQueryHandler(setLeechType, pattern="leechset", run_async=True)
118 |
119 | dispatcher.add_handler(leech_set_handler)
120 | dispatcher.add_handler(but_set_handler)
121 | dispatcher.add_handler(set_thumbnail_handler)
122 |
123 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/telegram_downloader.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger, WARNING
2 | from time import time
3 | from threading import RLock, Lock
4 |
5 | from bot import LOGGER, download_dict, download_dict_lock, STOP_DUPLICATE, app, STORAGE_THRESHOLD
6 | from ..status_utils.telegram_download_status import TelegramDownloadStatus
7 | from bot.helper.telegram_helper.message_utils import sendStatusMessage, sendMarkup, sendMessage
8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
9 | from bot.helper.ext_utils.fs_utils import check_storage_threshold
10 | from bot.helper.ext_utils.bot_utils import get_readable_file_size
11 |
12 | global_lock = Lock()
13 | GLOBAL_GID = set()
14 | getLogger("pyrogram").setLevel(WARNING)
15 |
16 |
17 | class TelegramDownloadHelper:
18 |
19 | def __init__(self, listener):
20 | self.name = ""
21 | self.size = 0
22 | self.progress = 0
23 | self.downloaded_bytes = 0
24 | self.__start_time = time()
25 | self.__listener = listener
26 | self.__id = ""
27 | self.__is_cancelled = False
28 | self.__resource_lock = RLock()
29 |
30 | @property
31 | def download_speed(self):
32 | with self.__resource_lock:
33 | return self.downloaded_bytes / (time() - self.__start_time)
34 |
35 | def __onDownloadStart(self, name, size, file_id):
36 | with global_lock:
37 | GLOBAL_GID.add(file_id)
38 | with self.__resource_lock:
39 | self.name = name
40 | self.size = size
41 | self.__id = file_id
42 | with download_dict_lock:
43 | download_dict[self.__listener.uid] = TelegramDownloadStatus(self, self.__listener, self.__id)
44 | self.__listener.onDownloadStart()
45 | sendStatusMessage(self.__listener.message, self.__listener.bot)
46 |
47 | def __onDownloadProgress(self, current, total):
48 | if self.__is_cancelled:
49 | app.stop_transmission()
50 | return
51 | with self.__resource_lock:
52 | self.downloaded_bytes = current
53 | try:
54 | self.progress = current / self.size * 100
55 | except ZeroDivisionError:
56 | pass
57 |
58 | def __onDownloadError(self, error):
59 | with global_lock:
60 | try:
61 | GLOBAL_GID.remove(self.__id)
62 | except:
63 | pass
64 | self.__listener.onDownloadError(error)
65 |
66 | def __onDownloadComplete(self):
67 | with global_lock:
68 | GLOBAL_GID.remove(self.__id)
69 | self.__listener.onDownloadComplete()
70 |
71 | def __download(self, message, path):
72 | try:
73 | download = message.download(file_name=path, progress=self.__onDownloadProgress)
74 | except Exception as e:
75 | LOGGER.error(str(e))
76 | return self.__onDownloadError(str(e))
77 | if download is not None:
78 | self.__onDownloadComplete()
79 | elif not self.__is_cancelled:
80 | self.__onDownloadError('Internal error occurred')
81 |
82 | def add_download(self, message, path, filename):
83 | _dmsg = app.get_messages(message.chat.id, reply_to_message_ids=message.message_id)
84 | media = _dmsg.document or _dmsg.video or _dmsg.audio or None
85 | if media is not None:
86 | with global_lock:
87 | # For avoiding locking the thread lock for long time unnecessarily
88 | download = media.file_unique_id not in GLOBAL_GID
89 | if filename == "":
90 | name = media.file_name
91 | else:
92 | name = filename
93 | path = path + name
94 |
95 | if download:
96 | size = media.file_size
97 | if STOP_DUPLICATE and not self.__listener.isLeech:
98 | LOGGER.info('Checking File/Folder if already in Drive...')
99 | smsg, button = GoogleDriveHelper().drive_list(name, True, True)
100 | if smsg:
101 | msg = "File/Folder is already available in Drive.\nHere are the search results:"
102 | return sendMarkup(msg, self.__listener.bot, self.__listener.message, button)
103 | if STORAGE_THRESHOLD is not None:
104 | arch = any([self.__listener.isZip, self.__listener.extract])
105 | acpt = check_storage_threshold(size, arch)
106 | if not acpt:
107 | msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
108 | msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
109 | return sendMessage(msg, self.__listener.bot, self.__listener.message)
110 | self.__onDownloadStart(name, size, media.file_unique_id)
111 | LOGGER.info(f'Downloading Telegram file with id: {media.file_unique_id}')
112 | self.__download(_dmsg, path)
113 | else:
114 | self.__onDownloadError('File already being downloaded!')
115 | else:
116 | self.__onDownloadError('No document in the replied message')
117 |
118 | def cancel_download(self):
119 | LOGGER.info(f'Cancelling download on user request: {self.__id}')
120 | self.__is_cancelled = True
121 | self.__onDownloadError('Cancelled by user!')
122 |
--------------------------------------------------------------------------------
/config_sample.env:
--------------------------------------------------------------------------------
1 | # Remove this line before deploying
2 | _____REMOVE_THIS_LINE_____=True
3 | # REQUIRED CONFIG
4 | BOT_TOKEN = ""
5 | GDRIVE_FOLDER_ID = ""
6 | OWNER_ID =
7 | DOWNLOAD_DIR = "/usr/src/app/downloads"
8 | DOWNLOAD_STATUS_UPDATE_INTERVAL = 10
9 | AUTO_DELETE_MESSAGE_DURATION = 20
10 | TELEGRAM_API =
11 | TELEGRAM_HASH = ""
12 | # OPTIONAL CONFIG
13 | AUTO_DELETE_UPLOAD_MESSAGE_DURATION = -1
14 | IS_TEAM_DRIVE = ""
15 | DATABASE_URL = ""
16 | AUTHORIZED_CHATS = ""
17 | SUDO_USERS = ""
18 | IGNORE_PENDING_REQUESTS = ""
19 | USE_SERVICE_ACCOUNTS = ""
20 | INDEX_URL = ""
21 | STATUS_LIMIT = ""
22 | STOP_DUPLICATE = ""
23 | CMD_INDEX = ""
24 | UPTOBOX_TOKEN = ""
25 | TORRENT_TIMEOUT = ""
26 | EXTENSION_FILTER = ""
27 | INCOMPLETE_TASK_NOTIFIER = ""
28 | # Mirror
29 | MIRROR_LOGS = ""
30 | BOT_PM = ""
31 | FORCE_BOT_PM = ""
32 | # Leech
33 | LEECH_LOG = ""
34 | USER_SESSION_STRING = ""
35 | LEECH_SPLIT_SIZE = ""
36 | AS_DOCUMENT = ""
37 | EQUAL_SPLITS = ""
38 | CUSTOM_FILENAME = ""
39 | # Update
40 | UPSTREAM_REPO = ""
41 | UPSTREAM_BRANCH = ""
42 | # qBittorrent/Aria2c
43 | BASE_URL_OF_BOT = ""
44 | SERVER_PORT = ""
45 | WEB_PINCODE = ""
46 | # RSS
47 | RSS_DELAY = ""
48 | RSS_COMMAND = ""
49 | RSS_CHAT_ID = ""
50 | RSS_USER_SESSION_STRING = ""
51 | # Private Files
52 | ACCOUNTS_ZIP_URL = ""
53 | TOKEN_PICKLE_URL = ""
54 | MULTI_SEARCH_URL = ""
55 | YT_COOKIES_URL = ""
56 | NETRC_URL = ""
57 | # Mega
58 | MEGA_API_KEY = ""
59 | MEGA_EMAIL_ID = ""
60 | MEGA_PASSWORD = ""
61 | # Gdtot
62 | CRYPT = ""
63 | # Appdrive
64 | APPDRIVE_EMAIL = ""
65 | APPDRIVE_PASS = ""
66 | # Size Limits
67 | TORRENT_DIRECT_LIMIT = ""
68 | ZIP_UNZIP_LIMIT = ""
69 | CLONE_LIMIT = ""
70 | MEGA_LIMIT = ""
71 | STORAGE_THRESHOLD = ""
72 | # Telegraph ui
73 | TITLE_NAME = "Helios-Mirror-Search"
74 | AUTHOR_NAME = "Helios-mirror"
75 | AUTHOR_URL = "https://t.me/heliosmirror"
76 | # Buttons
77 | VIEW_LINK = ""
78 | SOURCE_LINK = ""
79 | START_BTN1_NAME = ""
80 | START_BTN1_URL = ""
81 | START_BTN2_NAME = ""
82 | START_BTN2_URL = ""
83 | # Torrent Search
84 | SEARCH_API_LINK = ""
85 | SEARCH_LIMIT = ""
86 | SEARCH_PLUGINS = '["https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/rarbg.py",
87 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py",
88 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/legittorrents.py",
89 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py",
90 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py",
91 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/zooqle.py",
92 | "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py",
93 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py",
94 | "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py",
95 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py",
96 | "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py",
97 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py",
98 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py",
99 | "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py",
100 | "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py",
101 | "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py"]'
102 | # Custom bot commands
103 | # Mirror Related Commands
104 | START_CMD = "" #Default is "start"
105 | MIRROR_CMD = "" #Default is "mirror"
106 | ZIP_CMD = "" #Default is "zipmirror"
107 | UNZIP_CMD = "" #Default is "unzipmirror"
108 | CANCEL_CMD = "" #Default is "cancel"
109 | LIST_CMD = "" #Default is "list"
110 | SEARCH_CMD = "" #Default is "search"
111 | STATUS_CMD = "" #Default is "status"
112 | STATS_CMD = "" #Default is "stats"
113 | HELP_CMD = "" #Default is "help"
114 | CLONE_CMD = "" #Default is "clone"
115 | COUNT_CMD = "" #Default is "count"
116 | YTDL_CMD = "" #Default is "ytdl"
117 | YTDLZIP_CMD = "" #Default is "ytdlzip"
118 | QBMIRROR_CMD = "" #Default is "qbmirror"
119 | QBZIP_CMD = "" #Default is "qbzipmirror"
120 | QBUNZIP_CMD = "" #Default is "qbunzipmirror"
121 |
122 | #Leech Related CMDs
123 | LEECH_CMD = "" #Default is "leech"
124 | LEECHSET_CMD = "" #Default is "leechset"
125 | SETTHUMB_CMD = "" #Default is "setthumb"
126 | UNZIPLEECH_CMD = "" #Default is "unzipleech"
127 | ZIPLEECH_CMD = "" #Default is "zipleech"
128 | QBLEECH_CMD = "" #Default is "qbleech"
129 | QBUNZIPLEECH_CMD = "" #Default is "qbunzipleech"
130 | QBZIPLEECH_CMD = "" #Default is "qbzipleech"
131 | YTDLLEECH_CMD = "" #Default is "ytdlleech"
132 | YTDLZIPLEECH_CMD = "" #Default is "ytdlzipleech"
133 | CANCEL_ALL_CMD = "" #Default is "cancelall
134 | DELETE_CMD = "" #Default is "del"
135 |
136 | # RSS Realted Commands
137 | RSSLIST_CMD = ""
138 | RSSGET_CMD = ""
139 | RSSSUB_CMD = ""
140 | RSSUNSUB_CMD = ""
141 | RSSSET_CMD = ""
142 | ADDLEECHLOG_CMD = ""
143 | RMLEECHLOG_CMD = ""
144 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/message_utils.py:
--------------------------------------------------------------------------------
1 | from time import sleep, time
2 | from telegram import InlineKeyboardMarkup
3 | from telegram.message import Message
4 | from telegram.error import RetryAfter
5 | from pyrogram.errors import FloodWait
6 | from os import remove
7 |
8 | from bot import AUTO_DELETE_MESSAGE_DURATION, LOGGER, status_reply_dict, status_reply_dict_lock, \
9 | Interval, DOWNLOAD_STATUS_UPDATE_INTERVAL, RSS_CHAT_ID, bot, rss_session, AUTO_DELETE_UPLOAD_MESSAGE_DURATION
10 | from bot.helper.ext_utils.bot_utils import get_readable_message, setInterval
11 |
12 |
13 | def sendMessage(text: str, bot, message: Message):
14 | try:
15 | return bot.sendMessage(message.chat_id,
16 | reply_to_message_id=message.message_id,
17 | text=text, allow_sending_without_reply=True, parse_mode='HTML', disable_web_page_preview=True)
18 | except RetryAfter as r:
19 | LOGGER.warning(str(r))
20 | sleep(r.retry_after * 1.5)
21 | return sendMessage(text, bot, message)
22 | except Exception as e:
23 | LOGGER.error(str(e))
24 | return
25 |
26 | def sendMarkup(text: str, bot, message: Message, reply_markup: InlineKeyboardMarkup):
27 | try:
28 | return bot.sendMessage(message.chat_id,
29 | reply_to_message_id=message.message_id,
30 | text=text, reply_markup=reply_markup, allow_sending_without_reply=True,
31 | parse_mode='HTML', disable_web_page_preview=True)
32 | except RetryAfter as r:
33 | LOGGER.warning(str(r))
34 | sleep(r.retry_after * 1.5)
35 | return sendMarkup(text, bot, message, reply_markup)
36 | except Exception as e:
37 | LOGGER.error(str(e))
38 | return
39 |
40 | def editMessage(text: str, message: Message, reply_markup=None):
41 | try:
42 | bot.editMessageText(text=text, message_id=message.message_id,
43 | chat_id=message.chat.id,reply_markup=reply_markup,
44 | parse_mode='HTML', disable_web_page_preview=True)
45 | except RetryAfter as r:
46 | LOGGER.warning(str(r))
47 | sleep(r.retry_after * 1.5)
48 | return editMessage(text, message, reply_markup)
49 | except Exception as e:
50 | LOGGER.error(str(e))
51 | return str(e)
52 |
53 | def sendRss(text: str, bot):
54 | if rss_session is None:
55 | try:
56 | return bot.sendMessage(RSS_CHAT_ID, text, parse_mode='HTML', disable_web_page_preview=True)
57 | except RetryAfter as r:
58 | LOGGER.warning(str(r))
59 | sleep(r.retry_after * 1.5)
60 | return sendRss(text, bot)
61 | except Exception as e:
62 | LOGGER.error(str(e))
63 | return
64 | else:
65 | try:
66 | with rss_session:
67 | return rss_session.send_message(RSS_CHAT_ID, text, disable_web_page_preview=True)
68 | except FloodWait as e:
69 | LOGGER.warning(str(e))
70 | sleep(e.value * 1.5)
71 | return sendRss(text, bot)
72 | except Exception as e:
73 | LOGGER.error(str(e))
74 | return
75 |
76 | def deleteMessage(bot, message: Message):
77 | try:
78 | bot.deleteMessage(chat_id=message.chat.id,
79 | message_id=message.message_id)
80 | except Exception as e:
81 | LOGGER.error(str(e))
82 |
83 | def sendLogFile(bot, message: Message):
84 | with open('log.txt', 'rb') as f:
85 | bot.sendDocument(document=f, filename=f.name,
86 | reply_to_message_id=message.message_id,
87 | chat_id=message.chat_id)
88 |
89 | def sendFile(bot, message: Message, name: str, caption=""):
90 | try:
91 | with open(name, 'rb') as f:
92 | bot.sendDocument(document=f, filename=f.name, reply_to_message_id=message.message_id,
93 | caption=caption, parse_mode='HTML',chat_id=message.chat_id)
94 | remove(name)
95 | return
96 | except RetryAfter as r:
97 | LOGGER.warning(str(r))
98 | sleep(r.retry_after * 1.5)
99 | return sendFile(bot, message, name, caption)
100 | except Exception as e:
101 | LOGGER.error(str(e))
102 | return
103 |
104 | def auto_delete_message(bot, cmd_message: Message, bot_message: Message):
105 | if AUTO_DELETE_MESSAGE_DURATION != -1:
106 | sleep(AUTO_DELETE_MESSAGE_DURATION)
107 | try:
108 | # Skip if None is passed meaning we don't want to delete bot xor cmd message
109 | deleteMessage(bot, cmd_message)
110 | deleteMessage(bot, bot_message)
111 | except AttributeError:
112 | pass
113 | def auto_delete_upload_message(bot, cmd_message: Message, bot_message: Message):
114 | if cmd_message.chat.type == 'private':
115 | pass
116 | elif AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
117 | sleep(AUTO_DELETE_UPLOAD_MESSAGE_DURATION)
118 | try:
119 | # Skip if None is passed meaning we don't want to delete bot or cmd message
120 | deleteMessage(bot, cmd_message)
121 | deleteMessage(bot, bot_message)
122 | except AttributeError:
123 | pass
124 | def delete_all_messages():
125 | with status_reply_dict_lock:
126 | for data in list(status_reply_dict.values()):
127 | try:
128 | deleteMessage(bot, data[0])
129 | del status_reply_dict[data[0].chat.id]
130 | except Exception as e:
131 | LOGGER.error(str(e))
132 |
133 | def update_all_messages(force=False):
134 | with status_reply_dict_lock:
135 | if not force and (not status_reply_dict or not Interval or time() - list(status_reply_dict.values())[0][1] < 3):
136 | return
137 | for chat_id in status_reply_dict:
138 | status_reply_dict[chat_id][1] = time()
139 |
140 | msg, buttons = get_readable_message()
141 | if msg is None:
142 | return
143 | with status_reply_dict_lock:
144 | for chat_id in status_reply_dict:
145 | if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id][0].text:
146 | if buttons == "":
147 | rmsg = editMessage(msg, status_reply_dict[chat_id][0])
148 | else:
149 | rmsg = editMessage(msg, status_reply_dict[chat_id][0], buttons)
150 | if rmsg == "Message to edit not found":
151 | del status_reply_dict[chat_id]
152 | return
153 | status_reply_dict[chat_id][0].text = msg
154 | status_reply_dict[chat_id][1] = time()
155 |
156 | def sendStatusMessage(msg, bot):
157 | progress, buttons = get_readable_message()
158 | if progress is None:
159 | return
160 | with status_reply_dict_lock:
161 | if msg.chat.id in status_reply_dict:
162 | message = status_reply_dict[msg.chat.id][0]
163 | deleteMessage(bot, message)
164 | del status_reply_dict[msg.chat.id]
165 | if buttons == "":
166 | message = sendMessage(progress, bot, msg)
167 | else:
168 | message = sendMarkup(progress, bot, msg, buttons)
169 | status_reply_dict[msg.chat.id] = [message, time()]
170 | if not Interval:
171 | Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
172 |
--------------------------------------------------------------------------------
/bot/modules/authorize.py:
--------------------------------------------------------------------------------
1 | from bot import AUTHORIZED_CHATS, SUDO_USERS, dispatcher, DB_URI, LEECH_LOG, MIRROR_LOGS
2 | from bot.helper.telegram_helper.message_utils import sendMessage
3 | from telegram.ext import CommandHandler
4 | from bot.helper.telegram_helper.filters import CustomFilters
5 | from bot.helper.telegram_helper.bot_commands import BotCommands
6 | from bot.helper.ext_utils.db_handler import DbManger
7 |
8 |
9 | def authorize(update, context):
10 | user_id = ""
11 | reply_message = update.message.reply_to_message
12 | if len(context.args) == 1:
13 | user_id = int(context.args[0])
14 | elif reply_message:
15 | user_id = reply_message.from_user.id
16 | if user_id:
17 | if user_id in AUTHORIZED_CHATS:
18 | msg = 'User Already Authorized!'
19 | elif DB_URI is not None:
20 | msg = DbManger().user_auth(user_id)
21 | AUTHORIZED_CHATS.add(user_id)
22 | else:
23 | AUTHORIZED_CHATS.add(user_id)
24 | msg = 'User Authorized'
25 | else:
26 | chat_id = update.effective_chat.id
27 | if chat_id in AUTHORIZED_CHATS:
28 | msg = 'Chat Already Authorized!'
29 | elif DB_URI is not None:
30 | msg = DbManger().user_auth(chat_id)
31 | AUTHORIZED_CHATS.add(chat_id)
32 | else:
33 | AUTHORIZED_CHATS.add(chat_id)
34 | msg = 'Chat Authorized'
35 | sendMessage(msg, context.bot, update.message)
36 |
37 | def addleechlog(update, context):
38 | user_id = ""
39 | reply_message = update.message.reply_to_message
40 | if len(context.args) == 1:
41 | user_id = int(context.args[0])
42 | elif reply_message:
43 | user_id = reply_message.from_user.id
44 | if user_id:
45 | if user_id in LEECH_LOG:
46 | msg = 'Chat Already in Leech Logs'
47 | elif DB_URI is not None:
48 | msg = DbManger().addleech_log(user_id)
49 | LEECH_LOG.add(user_id)
50 | else:
51 | LEECH_LOG.add(user_id)
52 | msg = 'Chat Added in Leech Logs'
53 | else:
54 | chat_id = update.effective_chat.id
55 | if chat_id in LEECH_LOG:
56 | msg = 'Chat Already in Leech Logs'
57 | elif DB_URI is not None:
58 | msg = DbManger().addleech_log(chat_id)
59 | LEECH_LOG.add(chat_id)
60 | else:
61 | LEECH_LOG.add(chat_id)
62 | msg = 'Chat Added to Leech Logs'
63 | sendMessage(msg, context.bot, update.message)
64 |
65 | def rmleechlog(update, context):
66 | user_id = ""
67 | reply_message = update.message.reply_to_message
68 | if len(context.args) == 1:
69 | user_id = int(context.args[0])
70 | elif reply_message:
71 | user_id = reply_message.from_user.id
72 | if user_id:
73 | if user_id in LEECH_LOG:
74 | if DB_URI is not None:
75 | msg = DbManger().rmleech_log(user_id)
76 | else:
77 | msg = 'User removed from leech logs'
78 | LEECH_LOG.remove(user_id)
79 | else:
80 | msg = 'User does not exist in leech logs!'
81 | else:
82 | chat_id = update.effective_chat.id
83 | if chat_id in LEECH_LOG:
84 | if DB_URI is not None:
85 | msg = DbManger().rmleech_log(chat_id)
86 | else:
87 | msg = 'Chat removed from leech logs!'
88 | LEECH_LOG.remove(chat_id)
89 | else:
90 | msg = 'Chat does not exist in leech logs!'
91 | sendMessage(msg, context.bot, update.message)
92 |
93 |
94 | def unauthorize(update, context):
95 | user_id = ""
96 | reply_message = update.message.reply_to_message
97 | if len(context.args) == 1:
98 | user_id = int(context.args[0])
99 | elif reply_message:
100 | user_id = reply_message.from_user.id
101 | if user_id:
102 | if user_id in AUTHORIZED_CHATS:
103 | if DB_URI is not None:
104 | msg = DbManger().user_unauth(user_id)
105 | else:
106 | msg = 'User Unauthorized'
107 | AUTHORIZED_CHATS.remove(user_id)
108 | else:
109 | msg = 'User Already Unauthorized!'
110 | else:
111 | chat_id = update.effective_chat.id
112 | if chat_id in AUTHORIZED_CHATS:
113 | if DB_URI is not None:
114 | msg = DbManger().user_unauth(chat_id)
115 | else:
116 | msg = 'Chat Unauthorized'
117 | AUTHORIZED_CHATS.remove(chat_id)
118 | else:
119 | msg = 'Chat Already Unauthorized!'
120 | sendMessage(msg, context.bot, update.message)
121 |
122 | def addSudo(update, context):
123 | user_id = ""
124 | reply_message = update.message.reply_to_message
125 | if len(context.args) == 1:
126 | user_id = int(context.args[0])
127 | elif reply_message:
128 | user_id = reply_message.from_user.id
129 | if user_id:
130 | if user_id in SUDO_USERS:
131 | msg = 'Already Sudo!'
132 | elif DB_URI is not None:
133 | msg = DbManger().user_addsudo(user_id)
134 | SUDO_USERS.add(user_id)
135 | else:
136 | SUDO_USERS.add(user_id)
137 | msg = 'Promoted as Sudo'
138 | else:
139 | msg = "Give ID or Reply To message of whom you want to Promote."
140 | sendMessage(msg, context.bot, update.message)
141 |
142 | def removeSudo(update, context):
143 | user_id = ""
144 | reply_message = update.message.reply_to_message
145 | if len(context.args) == 1:
146 | user_id = int(context.args[0])
147 | elif reply_message:
148 | user_id = reply_message.from_user.id
149 | if user_id and user_id in SUDO_USERS:
150 | msg = DbManger().user_rmsudo(user_id) if DB_URI is not None else 'Demoted'
151 | SUDO_USERS.remove(user_id)
152 | else:
153 | msg = "Give ID or Reply To message of whom you want to remove from Sudo"
154 | sendMessage(msg, context.bot, update.message)
155 |
156 | def sendAuthChats(update, context):
157 | user = sudo = leechlog = ''
158 | user += '\n'.join(f"{uid}" for uid in AUTHORIZED_CHATS)
159 | sudo += '\n'.join(f"{uid}" for uid in SUDO_USERS)
160 | leechlog += '\n'.join(f"{uid}" for uid in LEECH_LOG)
161 | sendMessage(f'Authorized Chats:\n{user}\nSudo Users:\n{sudo}\nLeech Log:\n{leechlog}\n', context.bot, update.message)
162 |
163 |
164 | send_auth_handler = CommandHandler(command=BotCommands.AuthorizedUsersCommand, callback=sendAuthChats,
165 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
166 | authorize_handler = CommandHandler(command=BotCommands.AuthorizeCommand, callback=authorize,
167 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
168 | unauthorize_handler = CommandHandler(command=BotCommands.UnAuthorizeCommand, callback=unauthorize,
169 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
170 | addsudo_handler = CommandHandler(command=BotCommands.AddSudoCommand, callback=addSudo,
171 | filters=CustomFilters.owner_filter, run_async=True)
172 | removesudo_handler = CommandHandler(command=BotCommands.RmSudoCommand, callback=removeSudo,
173 | filters=CustomFilters.owner_filter, run_async=True)
174 | addleechlog_handler = CommandHandler(command=BotCommands.AddleechlogCommand, callback=addleechlog,
175 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
176 | rmleechlog_handler = CommandHandler(command=BotCommands.RmleechlogCommand, callback=rmleechlog,
177 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
178 | dispatcher.add_handler(send_auth_handler)
179 | dispatcher.add_handler(authorize_handler)
180 | dispatcher.add_handler(unauthorize_handler)
181 | dispatcher.add_handler(addsudo_handler)
182 | dispatcher.add_handler(removesudo_handler)
183 | dispatcher.add_handler(addleechlog_handler)
184 | dispatcher.add_handler(rmleechlog_handler)
185 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/yt_dlp_download_helper.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 | from logging import getLogger
4 | from yt_dlp import YoutubeDL, DownloadError
5 | from threading import RLock
6 | from time import time
7 | from re import search as re_search
8 | from json import loads as jsonloads
9 |
10 | from bot import download_dict_lock, download_dict
11 | from bot.helper.telegram_helper.message_utils import sendStatusMessage
12 | from ..status_utils.youtube_dl_download_status import YoutubeDLDownloadStatus
13 |
14 | LOGGER = getLogger(__name__)
15 |
16 |
17 | class MyLogger:
18 | def __init__(self, obj):
19 | self.obj = obj
20 |
21 | def debug(self, msg):
22 | # Hack to fix changing extension
23 | if not self.obj.is_playlist:
24 | if match := re_search(r'.Merger..Merging formats into..(.*?).$', msg) or \
25 | re_search(r'.ExtractAudio..Destination..(.*?)$', msg):
26 | LOGGER.info(msg)
27 | newname = match.group(1)
28 | newname = newname.rsplit("/", 1)[-1]
29 | self.obj.name = newname
30 |
31 | @staticmethod
32 | def warning(msg):
33 | LOGGER.warning(msg)
34 |
35 | @staticmethod
36 | def error(msg):
37 | if msg != "ERROR: Cancelling...":
38 | LOGGER.error(msg)
39 |
40 |
41 | class YoutubeDLHelper:
42 | def __init__(self, listener):
43 | self.name = ""
44 | self.is_playlist = False
45 | self.size = 0
46 | self.progress = 0
47 | self.downloaded_bytes = 0
48 | self._last_downloaded = 0
49 | self.__download_speed = 0
50 | self.__start_time = time()
51 | self.__listener = listener
52 | self.__gid = ""
53 | self.__is_cancelled = False
54 | self.__downloading = False
55 | self.__resource_lock = RLock()
56 | self.opts = {'progress_hooks': [self.__onDownloadProgress],
57 | 'logger': MyLogger(self),
58 | 'usenetrc': True,
59 | 'cookiefile': 'cookies.txt',
60 | 'allow_multiple_video_streams': True,
61 | 'allow_multiple_audio_streams': True,
62 | 'noprogress': True,
63 | 'allow_playlist_files': True,
64 | 'overwrites': True,
65 | 'trim_file_name': 200}
66 |
67 | @property
68 | def download_speed(self):
69 | with self.__resource_lock:
70 | return self.__download_speed
71 |
72 | def __onDownloadProgress(self, d):
73 | self.__downloading = True
74 | if self.__is_cancelled:
75 | raise ValueError("Cancelling...")
76 | if d['status'] == "finished":
77 | if self.is_playlist:
78 | self._last_downloaded = 0
79 | elif d['status'] == "downloading":
80 | with self.__resource_lock:
81 | self.__download_speed = d['speed']
82 | if self.is_playlist:
83 | downloadedBytes = d['downloaded_bytes']
84 | chunk_size = downloadedBytes - self._last_downloaded
85 | self._last_downloaded = downloadedBytes
86 | self.downloaded_bytes += chunk_size
87 | else:
88 | if d.get('total_bytes'):
89 | self.size = d['total_bytes']
90 | elif d.get('total_bytes_estimate'):
91 | self.size = d['total_bytes_estimate']
92 | self.downloaded_bytes = d['downloaded_bytes']
93 | try:
94 | self.progress = (self.downloaded_bytes / self.size) * 100
95 | except ZeroDivisionError:
96 | pass
97 |
98 | def __onDownloadStart(self):
99 | with download_dict_lock:
100 | download_dict[self.__listener.uid] = YoutubeDLDownloadStatus(self, self.__listener, self.__gid)
101 | self.__listener.onDownloadStart()
102 | sendStatusMessage(self.__listener.message, self.__listener.bot)
103 |
104 | def __onDownloadComplete(self):
105 | self.__listener.onDownloadComplete()
106 |
107 | def __onDownloadError(self, error):
108 | self.__is_cancelled = True
109 | self.__listener.onDownloadError(error)
110 |
111 | def extractMetaData(self, link, name, args, get_info=False):
112 | if args is not None:
113 | self.__set_args(args)
114 | if get_info:
115 | self.opts['playlist_items'] = '0'
116 | if link.startswith(('rtmp', 'mms', 'rstp')):
117 | self.opts['external_downloader'] = 'ffmpeg'
118 | with YoutubeDL(self.opts) as ydl:
119 | try:
120 | result = ydl.extract_info(link, download=False)
121 | if get_info:
122 | return result
123 | elif result is None:
124 | raise ValueError('Info result is None')
125 | realName = ydl.prepare_filename(result)
126 | except Exception as e:
127 | if get_info:
128 | raise e
129 | return self.__onDownloadError(str(e))
130 | if 'entries' in result:
131 | for v in result['entries']:
132 | if not v:
133 | continue
134 | elif 'filesize_approx' in v:
135 | self.size += v['filesize_approx']
136 | elif 'filesize' in v:
137 | self.size += v['filesize']
138 | if name == "":
139 | self.name = realName.split(f" [{result['id'].replace('*', '_')}]")[0]
140 | else:
141 | self.name = name
142 | else:
143 | ext = realName.split('.')[-1]
144 | if name == "":
145 | newname = realName.split(f" [{result['id'].replace('*', '_')}]")
146 | self.name = newname[0] + '.' + ext if len(newname) > 1 else newname[0]
147 | else:
148 | self.name = f"{name}.{ext}"
149 |
150 | def __download(self, link):
151 | try:
152 | with YoutubeDL(self.opts) as ydl:
153 | try:
154 | ydl.download([link])
155 | except DownloadError as e:
156 | if not self.__is_cancelled:
157 | self.__onDownloadError(str(e))
158 | return
159 | if self.__is_cancelled:
160 | raise ValueError
161 | self.__onDownloadComplete()
162 | except ValueError:
163 | self.__onDownloadError("Download Stopped by User!")
164 |
165 | def add_download(self, link, path, name, qual, playlist, args):
166 | if playlist:
167 | self.opts['ignoreerrors'] = True
168 | self.is_playlist = True
169 | self.__gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=10))
170 | self.__onDownloadStart()
171 | if qual.startswith('ba/b-'):
172 | mp3_info = qual.split('-')
173 | qual = mp3_info[0]
174 | rate = mp3_info[1]
175 | self.opts['postprocessors'] = [{'key': 'FFmpegExtractAudio', 'preferredcodec': 'mp3', 'preferredquality': rate}]
176 | self.opts['format'] = qual
177 | LOGGER.info(f"Downloading with YT-DLP: {link}")
178 | self.extractMetaData(link, name, args)
179 | if self.__is_cancelled:
180 | return
181 | if self.is_playlist:
182 | self.opts['outtmpl'] = f"{path}/{self.name}/%(title)s.%(ext)s"
183 | elif args is None:
184 | self.opts['outtmpl'] = f"{path}/{self.name}"
185 | else:
186 | folder_name = self.name.rsplit('.', 1)[0]
187 | self.opts['outtmpl'] = f"{path}/{folder_name}/{self.name}"
188 | self.name = folder_name
189 | self.__download(link)
190 |
191 | def cancel_download(self):
192 | self.__is_cancelled = True
193 | LOGGER.info(f"Cancelling Download: {self.name}")
194 | if not self.__downloading:
195 | self.__onDownloadError("Download Cancelled by User!")
196 |
197 | def __set_args(self, args):
198 | args = args.split('|')
199 | for arg in args:
200 | xy = arg.split(':', 1)
201 | karg = xy[0].strip()
202 | varg = xy[1].strip()
203 | if varg.startswith('^'):
204 | varg = int(varg.split('^')[1])
205 | elif varg.lower() == 'true':
206 | varg = True
207 | elif varg.lower() == 'false':
208 | varg = False
209 | elif varg.startswith('(') and varg.endswith(')'):
210 | varg = varg.replace('(', '').replace(')', '')
211 | varg = tuple(map(int, varg.split(',')))
212 | elif varg.startswith('{') and varg.endswith('}'):
213 | varg = jsonloads(varg)
214 | self.opts[karg] = varg
215 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/mega_downloader.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 | from os import makedirs
4 | from threading import Event
5 | from mega import (MegaApi, MegaListener, MegaRequest, MegaTransfer, MegaError)
6 |
7 | from bot import LOGGER, MEGA_API_KEY, download_dict_lock, download_dict, MEGA_EMAIL_ID, MEGA_PASSWORD, STOP_DUPLICATE, MEGA_LIMIT, ZIP_UNZIP_LIMIT, STORAGE_THRESHOLD
8 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage, sendMarkup
9 | from bot.helper.ext_utils.bot_utils import get_mega_link_type, get_readable_file_size
10 | from bot.helper.mirror_utils.status_utils.mega_download_status import MegaDownloadStatus
11 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
12 | from bot.helper.ext_utils.fs_utils import get_base_name, check_storage_threshold
13 |
14 |
15 | class MegaAppListener(MegaListener):
16 | _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN,MegaRequest.TYPE_FETCH_NODES)
17 | NO_ERROR = "no error"
18 |
19 | def __init__(self, continue_event: Event, listener):
20 | self.continue_event = continue_event
21 | self.node = None
22 | self.public_node = None
23 | self.listener = listener
24 | self.__bytes_transferred = 0
25 | self.is_cancelled = False
26 | self.__speed = 0
27 | self.__name = ''
28 | self.__size = 0
29 | self.error = None
30 | self.gid = ""
31 | super(MegaAppListener, self).__init__()
32 |
33 | @property
34 | def speed(self):
35 | """Returns speed of the download in bytes/second"""
36 | return self.__speed
37 |
38 | @property
39 | def name(self):
40 | """Returns name of the download"""
41 | return self.__name
42 |
43 | def setValues(self, name, size, gid):
44 | self.__name = name
45 | self.__size = size
46 | self.gid = gid
47 |
48 | @property
49 | def size(self):
50 | """Size of download in bytes"""
51 | return self.__size
52 |
53 | @property
54 | def downloaded_bytes(self):
55 | return self.__bytes_transferred
56 |
57 | def onRequestFinish(self, api, request, error):
58 | if str(error).lower() != "no error":
59 | self.error = error.copy()
60 | LOGGER.error(self.error)
61 | self.continue_event.set()
62 | return
63 | request_type = request.getType()
64 | if request_type == MegaRequest.TYPE_LOGIN:
65 | api.fetchNodes()
66 | elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE:
67 | self.public_node = request.getPublicMegaNode()
68 | elif request_type == MegaRequest.TYPE_FETCH_NODES:
69 | LOGGER.info("Fetching Root Node.")
70 | self.node = api.getRootNode()
71 | LOGGER.info(f"Node Name: {self.node.getName()}")
72 | if request_type not in self._NO_EVENT_ON or self.node and "cloud drive" not in self.node.getName().lower():
73 | self.continue_event.set()
74 |
75 | def onRequestTemporaryError(self, api, request, error: MegaError):
76 | LOGGER.error(f'Mega Request error in {error}')
77 | if not self.is_cancelled:
78 | self.is_cancelled = True
79 | self.listener.onDownloadError(f"RequestTempError: {error.toString()}")
80 | self.error = error.toString()
81 | self.continue_event.set()
82 |
83 | def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer):
84 | if self.is_cancelled:
85 | api.cancelTransfer(transfer, None)
86 | self.continue_event.set()
87 | return
88 | self.__speed = transfer.getSpeed()
89 | self.__bytes_transferred = transfer.getTransferredBytes()
90 |
91 | def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error):
92 | try:
93 | if self.is_cancelled:
94 | self.continue_event.set()
95 | elif transfer.isFinished() and (transfer.isFolderTransfer() or transfer.getFileName() == self.name):
96 | self.listener.onDownloadComplete()
97 | self.continue_event.set()
98 | except Exception as e:
99 | LOGGER.error(e)
100 |
101 | def onTransferTemporaryError(self, api, transfer, error):
102 | filen = transfer.getFileName()
103 | state = transfer.getState()
104 | errStr = error.toString()
105 | LOGGER.error(f'Mega download error in file {transfer} {filen}: {error}')
106 | if state in [1, 4]:
107 | # Sometimes MEGA (offical client) can't stream a node either and raises a temp failed error.
108 | # Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault]
109 | return
110 |
111 | self.error = errStr
112 | if not self.is_cancelled:
113 | self.is_cancelled = True
114 | self.listener.onDownloadError(f"TransferTempError: {errStr} ({filen})")
115 | self.continue_event.set()
116 |
117 | def cancel_download(self):
118 | self.is_cancelled = True
119 | self.listener.onDownloadError("Download Canceled by user")
120 |
121 |
122 | class AsyncExecutor:
123 |
124 | def __init__(self):
125 | self.continue_event = Event()
126 |
127 | def do(self, function, args):
128 | self.continue_event.clear()
129 | function(*args)
130 | self.continue_event.wait()
131 |
132 |
133 | def add_mega_download(mega_link: str, path: str, listener, name: str):
134 | executor = AsyncExecutor()
135 | api = MegaApi(MEGA_API_KEY, None, None, 'mirror-leech-telegram-bot')
136 | folder_api = None
137 | mega_listener = MegaAppListener(executor.continue_event, listener)
138 | api.addListener(mega_listener)
139 | if MEGA_EMAIL_ID is not None and MEGA_PASSWORD is not None:
140 | executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD))
141 | if get_mega_link_type(mega_link) == "file":
142 | executor.do(api.getPublicNode, (mega_link,))
143 | node = mega_listener.public_node
144 | else:
145 | folder_api = MegaApi(MEGA_API_KEY, None, None, 'mltb')
146 | folder_api.addListener(mega_listener)
147 | executor.do(folder_api.loginToFolder, (mega_link,))
148 | node = folder_api.authorizeNode(mega_listener.node)
149 | if mega_listener.error is not None:
150 | sendMessage(str(mega_listener.error), listener.bot, listener.message)
151 | api.removeListener(mega_listener)
152 | if folder_api is not None:
153 | folder_api.removeListener(mega_listener)
154 | return
155 | mname = name or node.getName()
156 | if STOP_DUPLICATE and not listener.isLeech:
157 | LOGGER.info('Checking File/Folder if already in Drive')
158 | if listener.isZip:
159 | mname = f"{mname}.zip"
160 | elif listener.extract:
161 | try:
162 | mname = get_base_name(mname)
163 | except:
164 | mname = None
165 | if mname is not None:
166 | smsg, button = GoogleDriveHelper().drive_list(mname, True)
167 | if smsg:
168 | msg1 = "File/Folder is already available in Drive.\nHere are the search results:"
169 | sendMarkup(msg1, listener.bot, listener.message, button)
170 | api.removeListener(mega_listener)
171 | if folder_api is not None:
172 | folder_api.removeListener(mega_listener)
173 | return
174 | if any([STORAGE_THRESHOLD, MEGA_LIMIT]):
175 | size = api.getSize(node)
176 | arch = any([listener.isZip, listener.isLeech, listener.extract])
177 | if STORAGE_THRESHOLD is not None:
178 | acpt = check_storage_threshold(size, arch)
179 | if not acpt:
180 | msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
181 | msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
182 | return sendMessage(msg, listener.bot, listener.message)
183 | limit = None
184 | if MEGA_LIMIT is not None:
185 | msg3 = f'Failed, Mega limit is {MEGA_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.'
186 | limit = MEGA_LIMIT
187 | if limit is not None:
188 | LOGGER.info('Checking File/Folder Size...')
189 | if size > limit * 1024**3:
190 | return sendMessage(msg3, listener.bot, listener.message)
191 | with download_dict_lock:
192 | download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener)
193 | listener.onDownloadStart()
194 | makedirs(path)
195 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=8))
196 | mname = name or node.getName()
197 | mega_listener.setValues(mname, api.getSize(node), gid)
198 | sendStatusMessage(listener.message, listener.bot)
199 | executor.do(api.startDownload, (node, path, name, None, False, None))
200 | api.removeListener(mega_listener)
201 | if folder_api is not None:
202 | folder_api.removeListener(mega_listener)
203 |
--------------------------------------------------------------------------------
/bot/modules/clone.py:
--------------------------------------------------------------------------------
1 | from random import SystemRandom
2 | from string import ascii_letters, digits
3 | from telegram.ext import CommandHandler
4 | from threading import Thread
5 | from time import sleep
6 |
7 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
8 | from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, delete_all_messages, update_all_messages, sendStatusMessage, sendMarkup, auto_delete_message, auto_delete_upload_message
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.mirror_utils.status_utils.clone_status import CloneStatus
12 | from bot import bot, dispatcher, LOGGER, STOP_DUPLICATE, download_dict, download_dict_lock, Interval, MIRROR_LOGS, BOT_PM, AUTO_DELETE_UPLOAD_MESSAGE_DURATION, CLONE_LIMIT, FORCE_BOT_PM
13 | from bot.helper.ext_utils.bot_utils import is_gdrive_link, new_thread, is_appdrive_link, is_gdtot_link, get_readable_file_size
14 | from bot.helper.mirror_utils.download_utils.direct_link_generator import appdrive, gdtot
15 | from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
16 | from bot.helper.telegram_helper.button_build import ButtonMaker
17 | def _clone(message, bot):
18 | buttons = ButtonMaker()
19 | if AUTO_DELETE_UPLOAD_MESSAGE_DURATION != -1:
20 | reply_to = message.reply_to_message
21 | if reply_to is not None:
22 | reply_to.delete()
23 | if BOT_PM and message.chat.type != 'private':
24 | try:
25 | msg1 = f'Added your Requested link to Download\n'
26 | send = bot.sendMessage(message.from_user.id, text=msg1)
27 | send.delete()
28 | except Exception as e:
29 | LOGGER.warning(e)
30 | bot_d = bot.get_me()
31 | b_uname = bot_d.username
32 | uname = message.from_user.mention_html(message.from_user.first_name)
33 | botstart = f"http://t.me/{b_uname}"
34 | buttons.buildbutton("Click Here to Start Me", f"{botstart}")
35 | startwarn = f"Dear {uname}, Start me in PM to use me."
36 | mesg = sendMarkup(startwarn, bot, message, buttons.build_menu(2))
37 | sleep(15)
38 | mesg.delete()
39 | message.delete()
40 | return
41 | args = message.text.split()
42 | reply_to = message.reply_to_message
43 | link = ''
44 | multi=1
45 | if len(args) > 1:
46 | link = args[1].strip()
47 | if link.strip().isdigit():
48 | multi = int(link)
49 | link = ''
50 | elif message.from_user.username:
51 | tag = f"@{message.from_user.username}"
52 | else:
53 | tag = message.from_user.mention_html(message.from_user.first_name)
54 | if reply_to:
55 | if len(link) == 0:
56 | link = reply_to.text.split(maxsplit=1)[0].strip()
57 | if reply_to.from_user.username:
58 | tag = f"@{reply_to.from_user.username}"
59 | else:
60 | tag = reply_to.from_user.mention_html(reply_to.from_user.first_name)
61 | is_appdrive = is_appdrive_link(link)
62 | is_gdtot = is_gdtot_link(link)
63 | if is_appdrive:
64 | msg = sendMessage(f"Processing: {link}", bot, message)
65 | try:
66 | link = appdrive(link)
67 | deleteMessage(bot, msg)
68 | except DirectDownloadLinkException as e:
69 | deleteMessage(bot, msg)
70 | return sendMessage(str(e), bot, message)
71 | if is_gdtot:
72 | try:
73 | msg = sendMessage(f"Processing: {link}", bot, message)
74 | link = gdtot(link)
75 | deleteMessage(bot, msg)
76 | except DirectDownloadLinkException as e:
77 | deleteMessage(bot, msg)
78 | return sendMessage(str(e), bot, message)
79 | if is_gdrive_link(link):
80 | gd = GoogleDriveHelper()
81 | res, size, name, files = gd.helper(link)
82 | if res != "":
83 | return sendMessage(res, bot, message)
84 | if STOP_DUPLICATE:
85 | LOGGER.info('Checking File/Folder if already in Drive...')
86 | smsg, button = gd.drive_list(name, True, True)
87 | if smsg:
88 | msg3 = "File/Folder is already available in Drive.\nHere are the search results:"
89 | return sendMarkup(msg3, bot, message, button)
90 | if CLONE_LIMIT is not None:
91 | LOGGER.info('Checking File/Folder Size...')
92 | if size > CLONE_LIMIT * 1024**3:
93 | msg2 = f'Failed, Clone limit is {CLONE_LIMIT}GB.\nYour File/Folder size is {get_readable_file_size(size)}.'
94 | return sendMessage(msg2, bot, message)
95 | if multi > 1:
96 | sleep(4)
97 | nextmsg = type('nextmsg', (object, ), {'chat_id': message.chat_id, 'message_id': message.reply_to_message.message_id + 1})
98 | nextmsg = sendMessage(message.text.replace(str(multi), str(multi - 1), 1), bot, nextmsg)
99 | nextmsg.from_user.id = message.from_user.id
100 | sleep(4)
101 | Thread(target=_clone, args=(nextmsg, bot)).start()
102 | if files <= 20:
103 | msg = sendMessage(f"Cloning: {link}", bot, message)
104 | result, button = gd.clone(link)
105 | deleteMessage(bot, msg)
106 | if BOT_PM and FORCE_BOT_PM:
107 | botpm = f"\n\nHey {tag}!, I have sent your cloned links in PM.\n"
108 | buttons = ButtonMaker()
109 | b_uname = bot.get_me().username
110 | botstart = f"http://t.me/{b_uname}"
111 | buttons.buildbutton("View links in PM", f"{botstart}")
112 | sendMarkup(result + botpm, bot, message, buttons.build_menu(2))
113 | message.delete()
114 | reply_to = message.reply_to_message
115 | if reply_to is not None and AUTO_DELETE_UPLOAD_MESSAGE_DURATION == -1:
116 | reply_to.delete()
117 |
118 |
119 | else:
120 | drive = GoogleDriveHelper(name)
121 | gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
122 | clone_status = CloneStatus(drive, size, message, gid)
123 | with download_dict_lock:
124 | download_dict[message.message_id] = clone_status
125 | sendStatusMessage(message, bot)
126 | result, button = drive.clone(link)
127 | with download_dict_lock:
128 | del download_dict[message.message_id]
129 | count = len(download_dict)
130 | try:
131 | if count == 0:
132 | Interval[0].cancel()
133 | del Interval[0]
134 | delete_all_messages()
135 | if BOT_PM and FORCE_BOT_PM:
136 | botpm = f"\n\nHey {tag}!, I have sent your cloned links in PM.\n"
137 | buttons = ButtonMaker()
138 | b_uname = bot.get_me().username
139 | botstart = f"http://t.me/{b_uname}"
140 | buttons.buildbutton("View links in PM", f"{botstart}")
141 | sendMarkup(result + botpm, bot, message, buttons.build_menu(2))
142 | message.delete()
143 | reply_to = message.reply_to_message
144 | if reply_to is not None and AUTO_DELETE_UPLOAD_MESSAGE_DURATION == -1:
145 | reply_to.delete()
146 |
147 | else:
148 | update_all_messages()
149 | except IndexError:
150 | pass
151 | cc = f'\n\ncc: {tag}'
152 | if button in ["cancelled", ""]:
153 | sendMessage(f"{tag} {result}", bot, message)
154 | else:
155 | LOGGER.info(f'Cloning Done: {name}')
156 | if FORCE_BOT_PM is False:
157 | upldmsg = sendMarkup(result + cc, bot, message, button)
158 | Thread(target=auto_delete_upload_message, args=(bot, message, upldmsg)).start()
159 | if is_gdtot:
160 | LOGGER.info(f"Deleting: {link}")
161 | gd.deletefile(link)
162 | elif is_appdrive:
163 | LOGGER.info(f"Deleting: {link}")
164 | gd.deletefile(link)
165 | if MIRROR_LOGS:
166 | try:
167 | for chatid in MIRROR_LOGS:
168 | bot.sendMessage(chat_id=chatid, text=result + cc, reply_markup=button, parse_mode='HTML')
169 | except Exception as e:
170 | LOGGER.warning(e)
171 | if BOT_PM and message.chat.type != 'private':
172 | try:
173 | bot.sendMessage(message.from_user.id, text=result + cc, reply_markup=button, parse_mode='HTML')
174 | except Exception as e:
175 | LOGGER.warning(e)
176 | return
177 | else:
178 | sendMessage("Send Gdrive or Gdtot or Appdrive link along with command or by replying to the link by command\n\nMulti links only by replying to first link/file:\n/cmd 10(number of links/files)", bot, message)
179 |
180 | @new_thread
181 | def cloneNode(update, context):
182 | _clone(update.message, context.bot)
183 |
184 | clone_handler = CommandHandler(BotCommands.CloneCommand, cloneNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
185 | dispatcher.add_handler(clone_handler)
186 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/aria2_download.py:
--------------------------------------------------------------------------------
1 | from time import sleep, time
2 | from os import remove, path as ospath
3 |
4 | from bot import aria2, download_dict_lock, download_dict, STOP_DUPLICATE, BASE_URL, LOGGER, TORRENT_DIRECT_LIMIT, ZIP_UNZIP_LIMIT, STORAGE_THRESHOLD
5 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
6 | from bot.helper.ext_utils.bot_utils import is_magnet, getDownloadByGid, new_thread, bt_selection_buttons, get_readable_file_size
7 | from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus
8 | from bot.helper.telegram_helper.message_utils import sendMarkup, sendStatusMessage, sendMessage, deleteMessage, update_all_messages
9 | from bot.helper.ext_utils.fs_utils import get_base_name, clean_unwanted, check_storage_threshold
10 |
11 |
12 | @new_thread
13 | def __onDownloadStarted(api, gid):
14 | download = api.get_download(gid)
15 | if download.is_metadata:
16 | LOGGER.info(f'onDownloadStarted: {gid} METADATA')
17 | sleep(1)
18 | if dl := getDownloadByGid(gid):
19 | listener = dl.listener()
20 | if listener.select:
21 | metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait."
22 | meta = sendMessage(metamsg, listener.bot, listener.message)
23 | while True:
24 | if download.is_removed or download.followed_by_ids:
25 | deleteMessage(listener.bot, meta)
26 | break
27 | download = download.live
28 | return
29 | else:
30 | LOGGER.info(f'onDownloadStarted: {download.name} - Gid: {gid}')
31 | try:
32 | if any([STOP_DUPLICATE, TORRENT_DIRECT_LIMIT, ZIP_UNZIP_LIMIT, STORAGE_THRESHOLD]):
33 | sleep(1)
34 | if dl := getDownloadByGid(gid):
35 | listener = dl.listener()
36 | if listener.isLeech or listener.select:
37 | return
38 | download = api.get_download(gid)
39 | if not download.is_torrent:
40 | sleep(3)
41 | download = download.live
42 | if STOP_DUPLICATE and not dl.listener().isLeech:
43 | LOGGER.info('Checking File/Folder if already in Drive...')
44 | sname = download.name
45 | if listener.isZip:
46 | sname = f"{sname}.zip"
47 | elif listener.extract:
48 | try:
49 | sname = get_base_name(sname)
50 | except:
51 | sname = None
52 | if sname is not None:
53 | smsg, button = GoogleDriveHelper().drive_list(sname, True)
54 | if smsg:
55 | listener.onDownloadError('File/Folder already available in Drive.')
56 | api.remove([download], force=True, files=True)
57 | return sendMarkup("Here are the search results:", listener.bot, listener.message, button)
58 | if any([ZIP_UNZIP_LIMIT, TORRENT_DIRECT_LIMIT, STORAGE_THRESHOLD]):
59 | sleep(1)
60 | limit = None
61 | size = download.total_length
62 | arch = any([listener.isZip, listener.isLeech, listener.extract])
63 | if STORAGE_THRESHOLD is not None:
64 | acpt = check_storage_threshold(size, arch, True)
65 | if not acpt:
66 | msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
67 | msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
68 | listener.onDownloadError(msg)
69 | return api.remove([download], force=True, files=True)
70 | if ZIP_UNZIP_LIMIT is not None and arch:
71 | mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
72 | limit = ZIP_UNZIP_LIMIT
73 | elif TORRENT_DIRECT_LIMIT is not None:
74 | mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}GB'
75 | limit = TORRENT_DIRECT_LIMIT
76 | if limit is not None:
77 | LOGGER.info('Checking File/Folder Size...')
78 | if size > limit * 1024**3:
79 | listener.onDownloadError(f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}')
80 | return api.remove([download], force=True, files=True)
81 | except Exception as e:
82 | LOGGER.error(f"{e} onDownloadStart: {gid} check duplicate didn't pass")
83 |
84 | @new_thread
85 | def __onDownloadComplete(api, gid):
86 | try:
87 | download = api.get_download(gid)
88 | except:
89 | return
90 | if download.followed_by_ids:
91 | new_gid = download.followed_by_ids[0]
92 | LOGGER.info(f'Gid changed from {gid} to {new_gid}')
93 | if dl := getDownloadByGid(new_gid):
94 | listener = dl.listener()
95 | if BASE_URL is not None and listener.select:
96 | api.client.force_pause(new_gid)
97 | SBUTTONS = bt_selection_buttons(new_gid)
98 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
99 | sendMarkup(msg, listener.bot, listener.message, SBUTTONS)
100 | elif download.is_torrent:
101 | if dl := getDownloadByGid(gid):
102 | if hasattr(dl, 'listener'):
103 | listener = dl.listener()
104 | if hasattr(listener, 'uploaded'):
105 | LOGGER.info(f"Cancelling Seed: {download.name} onDownloadComplete")
106 | listener.onUploadError(f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}")
107 | api.remove([download], force=True, files=True)
108 | else:
109 | LOGGER.info(f"onDownloadComplete: {download.name} - Gid: {gid}")
110 | if dl := getDownloadByGid(gid):
111 | dl.listener().onDownloadComplete()
112 | api.remove([download], force=True, files=True)
113 |
114 | @new_thread
115 | def __onBtDownloadComplete(api, gid):
116 | seed_start_time = time()
117 | sleep(1)
118 | download = api.get_download(gid)
119 | LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}")
120 | if dl := getDownloadByGid(gid):
121 | listener = dl.listener()
122 | if listener.select:
123 | res = download.files
124 | for file_o in res:
125 | f_path = file_o.path
126 | if not file_o.selected and ospath.exists(f_path):
127 | try:
128 | remove(f_path)
129 | except:
130 | pass
131 | clean_unwanted(download.dir)
132 | if listener.seed:
133 | try:
134 | api.set_options({'max-upload-limit': '0'}, [download])
135 | except Exception as e:
136 | LOGGER.error(f'{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent')
137 | else:
138 | api.client.force_pause(gid)
139 | listener.onDownloadComplete()
140 | if listener.seed:
141 | with download_dict_lock:
142 | if listener.uid not in download_dict:
143 | api.remove([download], force=True, files=True)
144 | return
145 | download_dict[listener.uid] = AriaDownloadStatus(gid, listener)
146 | download_dict[listener.uid].start_time = seed_start_time
147 | LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}")
148 | download = download.live
149 | if download.is_complete:
150 | if dl := getDownloadByGid(gid):
151 | LOGGER.info(f"Cancelling Seed: {download.name}")
152 | listener.onUploadError(f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}")
153 | api.remove([download], force=True, files=True)
154 | else:
155 | listener.uploaded = True
156 | update_all_messages()
157 | else:
158 | api.remove([download], force=True, files=True)
159 |
160 | @new_thread
161 | def __onDownloadStopped(api, gid):
162 | sleep(6)
163 | if dl := getDownloadByGid(gid):
164 | dl.listener().onDownloadError('Dead torrent!')
165 |
166 | @new_thread
167 | def __onDownloadError(api, gid):
168 | LOGGER.info(f"onDownloadError: {gid}")
169 | error = "None"
170 | try:
171 | download = api.get_download(gid)
172 | error = download.error_message
173 | LOGGER.info(f"Download Error: {error}")
174 | except:
175 | pass
176 | if dl := getDownloadByGid(gid):
177 | dl.listener().onDownloadError(error)
178 |
179 | def start_listener():
180 | aria2.listen_to_notifications(threaded=True,
181 | on_download_start=__onDownloadStarted,
182 | on_download_error=__onDownloadError,
183 | on_download_stop=__onDownloadStopped,
184 | on_download_complete=__onDownloadComplete,
185 | on_bt_download_complete=__onBtDownloadComplete,
186 | timeout=60)
187 |
188 | def add_aria2c_download(link: str, path, listener, filename, auth, select, ratio, seed_time):
189 | args = {'dir': path, 'max-upload-limit': '1K'}
190 | if filename:
191 | args['out'] = filename
192 | if auth:
193 | args['header'] = f"authorization: {auth}"
194 | if ratio:
195 | args['seed-ratio'] = ratio
196 | if seed_time:
197 | args['seed-time'] = seed_time
198 | if is_magnet(link):
199 | download = aria2.add_magnet(link, args)
200 | else:
201 | download = aria2.add_uris([link], args)
202 | if download.error_message:
203 | error = str(download.error_message).replace('<', ' ').replace('>', ' ')
204 | LOGGER.info(f"Download Error: {error}")
205 | return sendMessage(error, listener.bot, listener.message)
206 | with download_dict_lock:
207 | download_dict[listener.uid] = AriaDownloadStatus(download.gid, listener)
208 | LOGGER.info(f"Aria2Download started: {download.gid}")
209 | listener.onDownloadStart()
210 | if not select:
211 | sendStatusMessage(listener.message, listener.bot)
212 |
213 | start_listener()
214 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/fs_utils.py:
--------------------------------------------------------------------------------
1 | from os import remove as osremove, path as ospath, mkdir, walk, listdir, rmdir, makedirs
2 | from sys import exit as sysexit
3 | from json import loads as jsonloads
4 | from shutil import rmtree, disk_usage
5 | from PIL import Image
6 | from magic import Magic
7 | from subprocess import run as srun, check_output, Popen
8 | from time import time
9 | from math import ceil
10 | from re import split as re_split, I
11 |
12 | from .exceptions import NotSupportedExtractionArchive
13 | from bot import aria2, app, LOGGER, DOWNLOAD_DIR, get_client, LEECH_SPLIT_SIZE, EQUAL_SPLITS, IS_PREMIUM_USER, MAX_SPLIT_SIZE, STORAGE_THRESHOLD
14 |
15 | ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
16 | ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
17 | ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr",
18 | ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"]
19 |
20 | def clean_target(path: str):
21 | if ospath.exists(path):
22 | LOGGER.info(f"Cleaning Target: {path}")
23 | if ospath.isdir(path):
24 | try:
25 | rmtree(path)
26 | except:
27 | pass
28 | elif ospath.isfile(path):
29 | try:
30 | osremove(path)
31 | except:
32 | pass
33 |
34 | def clean_download(path: str):
35 | if ospath.exists(path):
36 | LOGGER.info(f"Cleaning Download: {path}")
37 | try:
38 | rmtree(path)
39 | except:
40 | pass
41 |
42 | def start_cleanup():
43 | try:
44 | rmtree(DOWNLOAD_DIR)
45 | except:
46 | pass
47 | makedirs(DOWNLOAD_DIR)
48 |
49 | def clean_all():
50 | aria2.remove_all(True)
51 | get_client().torrents_delete(torrent_hashes="all")
52 | app.stop()
53 | try:
54 | rmtree(DOWNLOAD_DIR)
55 | except:
56 | pass
57 |
58 | def exit_clean_up(signal, frame):
59 | try:
60 | LOGGER.info("Please wait, while we clean up the downloads and stop running downloads")
61 | clean_all()
62 | sysexit(0)
63 | except KeyboardInterrupt:
64 | LOGGER.warning("Force Exiting before the cleanup finishes!")
65 | sysexit(1)
66 |
67 | def clean_unwanted(path: str):
68 | LOGGER.info(f"Cleaning unwanted files/folders: {path}")
69 | for dirpath, subdir, files in walk(path, topdown=False):
70 | for filee in files:
71 | if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'):
72 | osremove(ospath.join(dirpath, filee))
73 | if dirpath.endswith((".unwanted", "splited_files_mltb")):
74 | rmtree(dirpath)
75 | for dirpath, subdir, files in walk(path, topdown=False):
76 | if not listdir(dirpath):
77 | rmdir(dirpath)
78 |
79 | def get_path_size(path: str):
80 | if ospath.isfile(path):
81 | return ospath.getsize(path)
82 | total_size = 0
83 | for root, dirs, files in walk(path):
84 | for f in files:
85 | abs_path = ospath.join(root, f)
86 | total_size += ospath.getsize(abs_path)
87 | return total_size
88 |
89 | def get_base_name(orig_path: str):
90 | ext = [ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)]
91 | if len(ext) > 0:
92 | ext = ext[0]
93 | return re_split(ext + '$', orig_path, maxsplit=1, flags=I)[0]
94 | else:
95 | raise NotSupportedExtractionArchive('File format not supported for extraction')
96 |
97 | def get_mime_type(file_path):
98 | mime = Magic(mime=True)
99 | mime_type = mime.from_file(file_path)
100 | mime_type = mime_type or "text/plain"
101 | return mime_type
102 |
103 | def take_ss(video_file, duration):
104 | des_dir = 'Thumbnails'
105 | if not ospath.exists(des_dir):
106 | mkdir(des_dir)
107 | des_dir = ospath.join(des_dir, f"{time()}.jpg")
108 | if duration is None:
109 | duration = get_media_info(video_file)[0]
110 | if duration == 0:
111 | duration = 3
112 | duration = duration // 2
113 |
114 | status = srun(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
115 | "-i", video_file, "-frames:v", "1", des_dir])
116 |
117 | if status.returncode != 0 or not ospath.lexists(des_dir):
118 | return None
119 |
120 | with Image.open(des_dir) as img:
121 | img.convert("RGB").save(des_dir, "JPEG")
122 |
123 | return des_dir
124 |
125 | def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, noMap=False):
126 | if listener.seed and not listener.newDir:
127 | dirpath = f"{dirpath}/splited_files_mltb"
128 | if not ospath.exists(dirpath):
129 | mkdir(dirpath)
130 | parts = ceil(size/LEECH_SPLIT_SIZE)
131 | if EQUAL_SPLITS and not inLoop:
132 | split_size = ceil(size/parts) + 1000
133 | if get_media_streams(path)[0]:
134 | duration = get_media_info(path)[0]
135 | base_name, extension = ospath.splitext(file_)
136 | split_size = split_size - 5000000
137 | while i <= parts:
138 | parted_name = "{}.part{}{}".format(str(base_name), str(i).zfill(3), str(extension))
139 | out_path = ospath.join(dirpath, parted_name)
140 | if not noMap:
141 | listener.suproc = Popen(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time),
142 | "-i", path, "-fs", str(split_size), "-map", "0", "-map_chapters", "-1",
143 | "-c", "copy", out_path])
144 | else:
145 | listener.suproc = Popen(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time),
146 | "-i", path, "-fs", str(split_size), "-map_chapters", "-1", "-c", "copy",
147 | out_path])
148 | listener.suproc.wait()
149 | if listener.suproc.returncode == -9:
150 | return False
151 | elif listener.suproc.returncode != 0 and not noMap:
152 | LOGGER.warning(f"Retrying without map, -map 0 not working in all situations. Path: {path}")
153 | try:
154 | osremove(out_path)
155 | except:
156 | pass
157 | return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, True)
158 | elif listener.suproc.returncode != 0:
159 | LOGGER.warning(f"Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}")
160 | try:
161 | osremove(out_path)
162 | except:
163 | pass
164 | return "errored"
165 | out_size = get_path_size(out_path)
166 | if out_size > MAX_SPLIT_SIZE:
167 | dif = out_size - MAX_SPLIT_SIZE
168 | split_size = split_size - dif + 5000000
169 | osremove(out_path)
170 | return split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, noMap)
171 | lpd = get_media_info(out_path)[0]
172 | if lpd == 0:
173 | LOGGER.error(f'Something went wrong while splitting, mostly file is corrupted. Path: {path}')
174 | break
175 | elif duration == lpd:
176 | LOGGER.warning(f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. noMap={noMap}. Path: {path}")
177 | break
178 | elif lpd <= 4:
179 | osremove(out_path)
180 | break
181 | start_time += lpd - 3
182 | i = i + 1
183 | else:
184 | out_path = ospath.join(dirpath, file_ + ".")
185 | listener.suproc = Popen(["split", "--numeric-suffixes=1", "--suffix-length=3",
186 | f"--bytes={split_size}", path, out_path])
187 | listener.suproc.wait()
188 | if listener.suproc.returncode == -9:
189 | return False
190 | return True
191 |
192 | def get_media_info(path):
193 |
194 | try:
195 | result = check_output(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
196 | "json", "-show_format", "-show_streams", path]).decode('utf-8')
197 | except Exception as e:
198 | LOGGER.error(f'{e}. Mostly file not found!')
199 | return 0, None, None
200 |
201 | fields = jsonloads(result).get('format')
202 | if fields is None:
203 | LOGGER.error(f"get_media_info: {result}")
204 | return 0, None, None
205 |
206 | duration = round(float(fields.get('duration', 0)))
207 |
208 | fields = fields.get('tags')
209 | if fields:
210 | artist = fields.get('artist')
211 | if artist is None:
212 | artist = fields.get('ARTIST')
213 | title = fields.get('title')
214 | if title is None:
215 | title = fields.get('TITLE')
216 | else:
217 | title = None
218 | artist = None
219 |
220 | return duration, artist, title
221 |
222 | def get_media_streams(path):
223 |
224 | is_video = False
225 | is_audio = False
226 |
227 | mime_type = get_mime_type(path)
228 | if not mime_type.startswith(('video', 'audio')):
229 | return is_video, is_audio
230 |
231 | try:
232 | result = check_output(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
233 | "json", "-show_streams", path]).decode('utf-8')
234 | except Exception as e:
235 | LOGGER.error(f'{e}. Mostly file not found!')
236 | return is_video, is_audio
237 |
238 | fields = jsonloads(result).get('streams')
239 | if fields is None:
240 | LOGGER.error(f"get_media_streams: {result}")
241 | return is_video, is_audio
242 |
243 | for stream in fields:
244 | if stream.get('codec_type') == 'video':
245 | is_video = True
246 | elif stream.get('codec_type') == 'audio':
247 | is_audio = True
248 | return is_video, is_audio
249 |
250 | def check_storage_threshold(size: int, arch=False, alloc=False):
251 | if not alloc:
252 | if not arch:
253 | if disk_usage(DOWNLOAD_DIR).free - size < STORAGE_THRESHOLD * 1024**3:
254 | return False
255 | elif disk_usage(DOWNLOAD_DIR).free - (size * 2) < STORAGE_THRESHOLD * 1024**3:
256 | return False
257 | elif not arch:
258 | if disk_usage(DOWNLOAD_DIR).free < STORAGE_THRESHOLD * 1024**3:
259 | return False
260 | elif disk_usage(DOWNLOAD_DIR).free - size < STORAGE_THRESHOLD * 1024**3:
261 | return False
262 | return True
--------------------------------------------------------------------------------
/bot/helper/ext_utils/db_handler.py:
--------------------------------------------------------------------------------
1 | from os import path as ospath, makedirs
2 | from psycopg2 import connect, DatabaseError
3 |
4 | from bot import DB_URI, AUTHORIZED_CHATS, SUDO_USERS, AS_DOC_USERS, AS_MEDIA_USERS, rss_dict, LOGGER, botname, LEECH_LOG
5 |
6 | class DbManger:
7 | def __init__(self):
8 | self.err = False
9 | self.connect()
10 |
11 | def connect(self):
12 | try:
13 | self.conn = connect(DB_URI)
14 | self.cur = self.conn.cursor()
15 | except DatabaseError as error:
16 | LOGGER.error(f"Error in DB connection: {error}")
17 | self.err = True
18 |
19 | def disconnect(self):
20 | self.cur.close()
21 | self.conn.close()
22 |
23 | def db_init(self):
24 | if self.err:
25 | return
26 | sql = """CREATE TABLE IF NOT EXISTS users (
27 | uid bigint,
28 | sudo boolean DEFAULT FALSE,
29 | auth boolean DEFAULT FALSE,
30 | media boolean DEFAULT FALSE,
31 | doc boolean DEFAULT FALSE,
32 | thumb bytea DEFAULT NULL,
33 | leechlog boolean DEFAULT FALSE
34 | )
35 | """
36 | self.cur.execute(sql)
37 | sql = """CREATE TABLE IF NOT EXISTS rss (
38 | name text,
39 | link text,
40 | last text,
41 | title text,
42 | filters text
43 | )
44 | """
45 | self.cur.execute(sql)
46 | self.cur.execute("CREATE TABLE IF NOT EXISTS {} (cid bigint, link text, tag text)".format(botname))
47 | self.conn.commit()
48 | LOGGER.info("Database Initiated")
49 | self.db_load()
50 |
51 | def db_load(self):
52 | # User Data
53 | self.cur.execute("SELECT * from users")
54 | rows = self.cur.fetchall() # return a list ==> (uid, sudo, auth, media, doc, thumb)
55 | if rows:
56 | for row in rows:
57 | if row[1] and row[0] not in SUDO_USERS:
58 | SUDO_USERS.add(row[0])
59 | elif row[2] and row[0] not in AUTHORIZED_CHATS:
60 | AUTHORIZED_CHATS.add(row[0])
61 | if row[3]:
62 | AS_MEDIA_USERS.add(row[0])
63 | elif row[4]:
64 | AS_DOC_USERS.add(row[0])
65 | path = f"Thumbnails/{row[0]}.jpg"
66 | if row[5] is not None and not ospath.exists(path):
67 | if not ospath.exists('Thumbnails'):
68 | makedirs('Thumbnails')
69 | with open(path, 'wb+') as f:
70 | f.write(row[5])
71 | if row[6] and row[0] not in LEECH_LOG:
72 | LEECH_LOG.add(row[0])
73 | LOGGER.info("Users data has been imported from Database")
74 | # Rss Data
75 | self.cur.execute("SELECT * FROM rss")
76 | rows = self.cur.fetchall() # return a list ==> (name, feed_link, last_link, last_title, filters)
77 | if rows:
78 | for row in rows:
79 | f_lists = []
80 | if row[4] is not None:
81 | filters_list = row[4].split('|')
82 | for x in filters_list:
83 | y = x.split(' or ')
84 | f_lists.append(y)
85 | rss_dict[row[0]] = [row[1], row[2], row[3], f_lists]
86 | LOGGER.info("Rss data has been imported from Database.")
87 | self.disconnect()
88 |
89 | def user_auth(self, chat_id: int):
90 | if self.err:
91 | return "Error in DB connection, check log for details"
92 | elif not self.user_check(chat_id):
93 | sql = 'INSERT INTO users (uid, auth) VALUES ({}, TRUE)'.format(chat_id)
94 | else:
95 | sql = 'UPDATE users SET auth = TRUE WHERE uid = {}'.format(chat_id)
96 | self.cur.execute(sql)
97 | self.conn.commit()
98 | self.disconnect()
99 | return 'Authorized successfully'
100 |
101 | def user_unauth(self, chat_id: int):
102 | if self.err:
103 | return "Error in DB connection, check log for details"
104 | elif self.user_check(chat_id):
105 | sql = 'UPDATE users SET auth = FALSE WHERE uid = {}'.format(chat_id)
106 | self.cur.execute(sql)
107 | self.conn.commit()
108 | self.disconnect()
109 | return 'Unauthorized successfully'
110 |
111 | def user_addsudo(self, user_id: int):
112 | if self.err:
113 | return "Error in DB connection, check log for details"
114 | elif not self.user_check(user_id):
115 | sql = 'INSERT INTO users (uid, sudo) VALUES ({}, TRUE)'.format(user_id)
116 | else:
117 | sql = 'UPDATE users SET sudo = TRUE WHERE uid = {}'.format(user_id)
118 | self.cur.execute(sql)
119 | self.conn.commit()
120 | self.disconnect()
121 | return 'Successfully Promoted as Sudo'
122 |
123 | def user_rmsudo(self, user_id: int):
124 | if self.err:
125 | return "Error in DB connection, check log for details"
126 | elif self.user_check(user_id):
127 | sql = 'UPDATE users SET sudo = FALSE WHERE uid = {}'.format(user_id)
128 | self.cur.execute(sql)
129 | self.conn.commit()
130 | self.disconnect()
131 | return 'Successfully removed from Sudo'
132 |
133 | def user_media(self, user_id: int):
134 | if self.err:
135 | return
136 | elif not self.user_check(user_id):
137 | sql = 'INSERT INTO users (uid, media) VALUES ({}, TRUE)'.format(user_id)
138 | else:
139 | sql = 'UPDATE users SET media = TRUE, doc = FALSE WHERE uid = {}'.format(user_id)
140 | self.cur.execute(sql)
141 | self.conn.commit()
142 | self.disconnect()
143 |
144 | def user_doc(self, user_id: int):
145 | if self.err:
146 | return
147 | elif not self.user_check(user_id):
148 | sql = 'INSERT INTO users (uid, doc) VALUES ({}, TRUE)'.format(user_id)
149 | else:
150 | sql = 'UPDATE users SET media = FALSE, doc = TRUE WHERE uid = {}'.format(user_id)
151 | self.cur.execute(sql)
152 | self.conn.commit()
153 | self.disconnect()
154 |
155 | def user_save_thumb(self, user_id: int, path):
156 | if self.err:
157 | return
158 | image = open(path, 'rb+')
159 | image_bin = image.read()
160 | if not self.user_check(user_id):
161 | sql = 'INSERT INTO users (thumb, uid) VALUES (%s, %s)'
162 | else:
163 | sql = 'UPDATE users SET thumb = %s WHERE uid = %s'
164 | self.cur.execute(sql, (image_bin, user_id))
165 | self.conn.commit()
166 | self.disconnect()
167 |
168 | def user_rm_thumb(self, user_id: int, path):
169 | if self.err:
170 | return
171 | elif self.user_check(user_id):
172 | sql = 'UPDATE users SET thumb = NULL WHERE uid = {}'.format(user_id)
173 | self.cur.execute(sql)
174 | self.conn.commit()
175 | self.disconnect()
176 | # For Leech log
177 | def addleech_log(self, chat_id: int):
178 | if self.err:
179 | return "Error in DB connection, check log for details"
180 | elif not self.user_check(chat_id):
181 | sql = 'INSERT INTO users (uid, leechlog) VALUES ({}, TRUE)'.format(chat_id)
182 | else:
183 | sql = 'UPDATE users SET leechlog = TRUE WHERE uid = {}'.format(chat_id)
184 | self.cur.execute(sql)
185 | self.conn.commit()
186 | self.disconnect()
187 | return f'Successfully added to leech logs'
188 |
189 | def rmleech_log(self, chat_id: int):
190 | if self.err:
191 | return "Error in DB connection, check log for details"
192 | elif self.user_check(chat_id):
193 | sql = 'UPDATE users SET leechlog = FALSE WHERE uid = {}'.format(chat_id)
194 | self.cur.execute(sql)
195 | self.conn.commit()
196 | self.disconnect()
197 | return 'Removed from leech logs successfully'
198 | def user_check(self, uid: int):
199 | self.cur.execute("SELECT * FROM users WHERE uid = {}".format(uid))
200 | res = self.cur.fetchone()
201 | return res
202 |
203 | def rss_add(self, name, link, last, title, filters):
204 | if self.err:
205 | return
206 | q = (name, link, last, title, filters)
207 | self.cur.execute("INSERT INTO rss (name, link, last, title, filters) VALUES (%s, %s, %s, %s, %s)", q)
208 | self.conn.commit()
209 | self.disconnect()
210 |
211 | def rss_update(self, name, last, title):
212 | if self.err:
213 | return
214 | q = (last, title, name)
215 | self.cur.execute("UPDATE rss SET last = %s, title = %s WHERE name = %s", q)
216 | self.conn.commit()
217 | self.disconnect()
218 |
219 | def rss_delete(self, name):
220 | if self.err:
221 | return
222 | self.cur.execute("DELETE FROM rss WHERE name = %s", (name,))
223 | self.conn.commit()
224 | self.disconnect()
225 |
226 | def add_incomplete_task(self, cid: int, link: str, tag: str):
227 | if self.err:
228 | return
229 | q = (cid, link, tag)
230 | self.cur.execute("INSERT INTO {} (cid, link, tag) VALUES (%s, %s, %s)".format(botname), q)
231 | self.conn.commit()
232 | self.disconnect()
233 |
234 | def rm_complete_task(self, link: str):
235 | if self.err:
236 | return
237 | self.cur.execute("DELETE FROM {} WHERE link = %s".format(botname), (link,))
238 | self.conn.commit()
239 | self.disconnect()
240 |
241 | def get_incomplete_tasks(self):
242 | if self.err:
243 | return False
244 | self.cur.execute("SELECT * from {}".format(botname))
245 | rows = self.cur.fetchall() # return a list ==> (cid, link, tag)
246 | notifier_dict = {}
247 | if rows:
248 | for row in rows:
249 | if row[0] in list(notifier_dict.keys()):
250 | if row[2] in list(notifier_dict[row[0]].keys()):
251 | notifier_dict[row[0]][row[2]].append(row[1])
252 | else:
253 | notifier_dict[row[0]][row[2]] = [row[1]]
254 | else:
255 | usr_dict = {}
256 | usr_dict[row[2]] = [row[1]]
257 | notifier_dict[row[0]] = usr_dict
258 | self.cur.execute("TRUNCATE TABLE {}".format(botname))
259 | self.conn.commit()
260 | self.disconnect()
261 | return notifier_dict # return a dict ==> {cid: {tag: [mid, mid, ...]}}
262 |
263 |
264 | def trunc_table(self, name):
265 | if self.err:
266 | return
267 | self.cur.execute("TRUNCATE TABLE {}".format(name))
268 | self.conn.commit()
269 | self.disconnect()
270 |
271 | if DB_URI is not None:
272 | DbManger().db_init()
273 |
274 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger, ERROR
2 | from os import remove as osremove, walk, path as ospath, rename as osrename
3 | from time import time, sleep
4 | from pyrogram.errors import FloodWait, RPCError
5 | from PIL import Image
6 | from threading import RLock
7 |
8 | from bot import AS_DOCUMENT, AS_DOC_USERS, AS_MEDIA_USERS, CUSTOM_FILENAME, EXTENSION_FILTER, app, app_session, BOT_PM, LEECH_LOG
9 | from bot.helper.ext_utils.fs_utils import take_ss, get_media_info, get_media_streams, clean_unwanted
10 | from bot.helper.ext_utils.bot_utils import get_readable_file_size
11 |
12 | LOGGER = getLogger(__name__)
13 | getLogger("pyrogram").setLevel(ERROR)
14 |
15 | IMAGE_SUFFIXES = ("JPG", "JPX", "PNG", "CR2", "TIF", "BMP", "JXR", "PSD", "ICO", "HEIC", "JPEG")
16 |
17 |
18 | class TgUploader:
19 |
20 | def __init__(self, name=None, path=None, size=0, listener=None):
21 | self.name = name
22 | self.uploaded_bytes = 0
23 | self._last_uploaded = 0
24 | self.__listener = listener
25 | self.__path = path
26 | self.__start_time = time()
27 | self.__total_files = 0
28 | self.__is_cancelled = False
29 | self.__as_doc = AS_DOCUMENT
30 | self.__thumb = f"Thumbnails/{listener.message.from_user.id}.jpg"
31 | self.__msgs_dict = {}
32 | self.__corrupted = 0
33 | self.__resource_lock = RLock()
34 | self.__is_corrupted = False
35 | self.__sent_msg = app.get_messages(self.__listener.message.chat.id, self.__listener.uid)
36 | self.__size = size
37 | self.__user_settings()
38 | self.isPrivate = listener.message.chat.type in ['private', 'group']
39 | self.__app = app
40 | self.__user_id = listener.message.from_user.id
41 |
42 | def upload(self, o_files):
43 | for dirpath, subdir, files in sorted(walk(self.__path)):
44 | for file_ in sorted(files):
45 | if file_ in o_files:
46 | continue
47 | if not file_.lower().endswith(tuple(EXTENSION_FILTER)):
48 | up_path = ospath.join(dirpath, file_)
49 | self.__total_files += 1
50 | try:
51 | if ospath.getsize(up_path) == 0:
52 | LOGGER.error(f"{up_path} size is zero, telegram don't upload zero size files")
53 | self.__corrupted += 1
54 | continue
55 | except Exception as e:
56 | if self.__is_cancelled:
57 | return
58 | else:
59 | LOGGER.error(e)
60 | continue
61 | self.__upload_file(up_path, file_, dirpath)
62 | if self.__is_cancelled:
63 | return
64 | if not self.__listener.isPrivate and not self.__is_corrupted:
65 | self.__msgs_dict[self.__sent_msg.link] = file_
66 | self._last_uploaded = 0
67 | sleep(1)
68 | if self.__listener.seed and not self.__listener.newDir:
69 | clean_unwanted(self.__path)
70 | if self.__total_files <= self.__corrupted:
71 | return self.__listener.onUploadError('Files Corrupted. Check logs')
72 | LOGGER.info(f"Leech Completed: {self.name}")
73 | size = get_readable_file_size(self.__size)
74 | self.__listener.onUploadComplete(None, size, self.__msgs_dict, self.__total_files, self.__corrupted, self.name)
75 |
76 | def __upload_file(self, up_path, file_, dirpath):
77 | fsize = ospath.getsize(up_path)
78 | if fsize > 2097152000:
79 | client = app_session
80 | else:
81 | client = app
82 | if LEECH_LOG:
83 | set = LEECH_LOG.copy()
84 | setstr = str(set)[1:-1]
85 | LEECH_DUMP = int(setstr)
86 | leechchat = LEECH_DUMP
87 | else: leechchat = self.__listener.message.chat.id
88 | if CUSTOM_FILENAME is not None:
89 | cap_mono = f"{CUSTOM_FILENAME} {file_}"
90 | file_ = f"{CUSTOM_FILENAME} {file_}"
91 | new_path = ospath.join(dirpath, file_)
92 | osrename(up_path, new_path)
93 | up_path = new_path
94 | else:
95 | cap_mono = f"{file_}"
96 | notMedia = False
97 | thumb = self.__thumb
98 | self.__is_corrupted = False
99 | try:
100 | is_video, is_audio = get_media_streams(up_path)
101 | if not self.__as_doc:
102 | if is_video:
103 | duration = get_media_info(up_path)[0]
104 | if thumb is None:
105 | thumb = take_ss(up_path, duration)
106 | if self.__is_cancelled:
107 | if self.__thumb is None and thumb is not None and ospath.lexists(thumb):
108 | osremove(thumb)
109 | return
110 | if thumb is not None:
111 | with Image.open(thumb) as img:
112 | width, height = img.size
113 | else:
114 | width = 480
115 | height = 320
116 | if not file_.upper().endswith(("MKV", "MP4")):
117 | file_ = f"{ospath.splitext(file_)[0]}.mp4"
118 | new_path = ospath.join(dirpath, file_)
119 | osrename(up_path, new_path)
120 | up_path = new_path
121 | self.__sent_msg = client.send_video(chat_id=leechchat, video=up_path,
122 | caption=cap_mono,
123 | duration=duration,
124 | width=width,
125 | height=height,
126 | thumb=thumb,
127 | supports_streaming=True,
128 | disable_notification=True,
129 | progress=self.__upload_progress)
130 | if not self.isPrivate and BOT_PM:
131 | try:
132 | app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
133 | except Exception as err:
134 | LOGGER.error(f"Failed To Send Video in PM:\n{err}")
135 | elif is_audio:
136 | duration , artist, title = get_media_info(up_path)
137 | self.__sent_msg = client.send_audio(chat_id=leechchat, audio=up_path,
138 | caption=cap_mono,
139 | duration=duration,
140 | performer=artist,
141 | title=title,
142 | thumb=thumb,
143 | disable_notification=True,
144 | progress=self.__upload_progress)
145 | if not self.isPrivate and BOT_PM:
146 | try:
147 | app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
148 | except Exception as err:
149 | LOGGER.error(f"Failed To Send Audio in PM:\n{err}")
150 | elif file_.upper().endswith(IMAGE_SUFFIXES):
151 | self.__sent_msg = self.__app.send_photo(chat_id=leechchat, photo=up_path,
152 | caption=cap_mono,
153 | disable_notification=True,
154 | progress=self.__upload_progress)
155 | if not self.isPrivate and BOT_PM:
156 | try:
157 | app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
158 | except Exception as err:
159 | LOGGER.error(f"Failed To Send Image in PM:\n{err}")
160 | else:
161 | notMedia = True
162 | if self.__as_doc or notMedia:
163 | if is_video and thumb is None:
164 | thumb = take_ss(up_path, None)
165 | if self.__is_cancelled:
166 | if self.__thumb is None and thumb is not None and ospath.lexists(thumb):
167 | osremove(thumb)
168 | return
169 | self.__sent_msg = client.send_document(chat_id=leechchat, document=up_path,
170 | thumb=thumb,
171 | caption=cap_mono,
172 | disable_notification=True,
173 | progress=self.__upload_progress)
174 | if not self.isPrivate and BOT_PM:
175 | try:
176 | app.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id)
177 | except Exception as err:
178 | LOGGER.error(f"Failed To Send Document in PM:\n{err}")
179 | except FloodWait as f:
180 | LOGGER.warning(str(f))
181 | sleep(f.value)
182 | except RPCError as e:
183 | LOGGER.error(f"RPCError: Make Sure Leech Log chat id is correct and BOT have admin privileges in Leech log channel/group {e} Path: {up_path}")
184 | self.__corrupted += 1
185 | self.__is_corrupted = True
186 | except Exception as err:
187 | LOGGER.error(f"{err} Path: {up_path}")
188 | self.__corrupted += 1
189 | self.__is_corrupted = True
190 | if self.__thumb is None and thumb is not None and ospath.lexists(thumb):
191 | osremove(thumb)
192 | if not self.__is_cancelled and \
193 | (not self.__listener.seed or self.__listener.newDir or dirpath.endswith("splited_files_mltb")):
194 | try:
195 | osremove(up_path)
196 | except:
197 | pass
198 |
199 | def __upload_progress(self, current, total):
200 | if self.__is_cancelled:
201 | app.stop_transmission()
202 | return
203 | with self.__resource_lock:
204 | chunk_size = current - self._last_uploaded
205 | self._last_uploaded = current
206 | self.uploaded_bytes += chunk_size
207 |
208 | def __user_settings(self):
209 | if self.__listener.message.from_user.id in AS_DOC_USERS:
210 | self.__as_doc = True
211 | elif self.__listener.message.from_user.id in AS_MEDIA_USERS:
212 | self.__as_doc = False
213 | if not ospath.lexists(self.__thumb):
214 | self.__thumb = None
215 |
216 | @property
217 | def speed(self):
218 | with self.__resource_lock:
219 | try:
220 | return self.uploaded_bytes / (time() - self.__start_time)
221 | except:
222 | return 0
223 |
224 | def cancel_download(self):
225 | self.__is_cancelled = True
226 | LOGGER.info(f"Cancelling Upload: {self.name}")
227 | self.__listener.onUploadError('your upload has been stopped!')
228 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/qbit_downloader.py:
--------------------------------------------------------------------------------
1 | from hashlib import sha1
2 | from base64 import b16encode, b32decode
3 | from bencoding import bencode, bdecode
4 | from time import sleep, time
5 | from re import search as re_search
6 |
7 | from bot import download_dict, download_dict_lock, BASE_URL, get_client, STOP_DUPLICATE, TORRENT_TIMEOUT, LOGGER, TORRENT_DIRECT_LIMIT, ZIP_UNZIP_LIMIT, STORAGE_THRESHOLD
8 | from bot.helper.mirror_utils.status_utils.qbit_download_status import QbDownloadStatus
9 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
10 | from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, deleteMessage, sendStatusMessage, update_all_messages
11 | from bot.helper.ext_utils.bot_utils import get_readable_time, setInterval, bt_selection_buttons, get_readable_file_size
12 | from bot.helper.ext_utils.fs_utils import clean_unwanted, get_base_name, check_storage_threshold
13 |
14 |
15 | class QbDownloader:
16 | POLLING_INTERVAL = 3
17 |
18 | def __init__(self, listener):
19 | self.is_seeding = False
20 | self.ext_hash = ''
21 | self.client = get_client()
22 | self.__listener = listener
23 | self.__path = ''
24 | self.__name = ''
25 | self.__stalled_time = time()
26 | self.__uploaded = False
27 | self.__rechecked = False
28 | self.__stopDup_check = False
29 | self.__select = False
30 | self.__periodic = None
31 | self.__sizeChecked = False
32 |
33 | def add_qb_torrent(self, link, path, select, ratio, seed_time):
34 | self.__path = path
35 | self.__select = select
36 | try:
37 | if link.startswith('magnet:'):
38 | self.ext_hash = _get_hash_magnet(link)
39 | else:
40 | self.ext_hash = _get_hash_file(link)
41 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
42 | if len(tor_info) > 0:
43 | sendMessage("This Torrent already added!", self.__listener.bot, self.__listener.message)
44 | return self.client.auth_log_out()
45 | if link.startswith('magnet:'):
46 | op = self.client.torrents_add(link, save_path=path, ratio_limit=ratio, seeding_time_limit=seed_time)
47 | else:
48 | op = self.client.torrents_add(torrent_files=[link], save_path=path, ratio_limit=ratio, seeding_time_limit=seed_time)
49 | sleep(0.3)
50 | if op.lower() == "ok.":
51 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
52 | if len(tor_info) == 0:
53 | while True:
54 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
55 | if len(tor_info) > 0:
56 | break
57 | elif time() - self.__stalled_time >= 30:
58 | msg = "Not a torrent. If something wrong please report."
59 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True)
60 | sendMessage(msg, self.__listener.bot, self.__listener.message)
61 | return self.client.auth_log_out()
62 | else:
63 | sendMessage("This is an unsupported/invalid link.", self.__listener.bot, self.__listener.message)
64 | return self.client.auth_log_out()
65 | tor_info = tor_info[0]
66 | self.__name = tor_info.name
67 | self.ext_hash = tor_info.hash
68 | with download_dict_lock:
69 | download_dict[self.__listener.uid] = QbDownloadStatus(self.__listener, self)
70 | self.__listener.onDownloadStart()
71 | LOGGER.info(f"QbitDownload started: {self.__name} - Hash: {self.ext_hash}")
72 | self.__periodic = setInterval(self.POLLING_INTERVAL, self.__qb_listener)
73 | if BASE_URL is not None and select:
74 | if link.startswith('magnet:'):
75 | metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait."
76 | meta = sendMessage(metamsg, self.__listener.bot, self.__listener.message)
77 | while True:
78 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
79 | if len(tor_info) == 0:
80 | deleteMessage(self.__listener.bot, meta)
81 | return
82 | try:
83 | tor_info = tor_info[0]
84 | if tor_info.state not in ["metaDL", "checkingResumeData", "pausedDL"]:
85 | deleteMessage(self.__listener.bot, meta)
86 | break
87 | except:
88 | return deleteMessage(self.__listener.bot, meta)
89 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
90 | SBUTTONS = bt_selection_buttons(self.ext_hash)
91 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
92 | sendMarkup(msg, self.__listener.bot, self.__listener.message, SBUTTONS)
93 | else:
94 | sendStatusMessage(self.__listener.message, self.__listener.bot)
95 | except Exception as e:
96 | sendMessage(str(e), self.__listener.bot, self.__listener.message)
97 | self.client.auth_log_out()
98 |
99 | def __qb_listener(self):
100 | try:
101 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
102 | if len(tor_info) == 0:
103 | return
104 | tor_info = tor_info[0]
105 | if tor_info.state == "metaDL":
106 | self.__stalled_time = time()
107 | if TORRENT_TIMEOUT is not None and time() - tor_info.added_on >= TORRENT_TIMEOUT:
108 | self.__onDownloadError("Dead Torrent!")
109 | elif tor_info.state == "downloading":
110 | self.__stalled_time = time()
111 | if not self.__stopDup_check and not self.__select and STOP_DUPLICATE and not self.__listener.isLeech:
112 | LOGGER.info('Checking File/Folder if already in Drive')
113 | qbname = tor_info.content_path.rsplit('/', 1)[-1].rsplit('.!qB', 1)[0]
114 | if self.__listener.isZip:
115 | qbname = f"{qbname}.zip"
116 | elif self.__listener.extract:
117 | try:
118 | qbname = get_base_name(qbname)
119 | except:
120 | qbname = None
121 | if qbname is not None:
122 | qbmsg, button = GoogleDriveHelper().drive_list(qbname, True)
123 | if qbmsg:
124 | self.__onDownloadError("File/Folder is already available in Drive.")
125 | sendMarkup("Here are the search results:", self.__listener.bot, self.__listener.message,button)
126 | self.__stopDup_check = True
127 | if not self.__sizeChecked:
128 | size = tor_info.size
129 | arch = any([self.__listener.isZip, self.__listener.extract])
130 | if STORAGE_THRESHOLD is not None:
131 | acpt = check_storage_threshold(size, arch)
132 | if not acpt:
133 | msg = f'You must leave {STORAGE_THRESHOLD}GB free storage.'
134 | msg += f'\nYour File/Folder size is {get_readable_file_size(size)}'
135 | self.__onDownloadError(msg)
136 | return
137 | limit = None
138 | if ZIP_UNZIP_LIMIT is not None and arch:
139 | mssg = f'Zip/Unzip limit is {ZIP_UNZIP_LIMIT}GB'
140 | limit = ZIP_UNZIP_LIMIT
141 | elif TORRENT_DIRECT_LIMIT is not None:
142 | mssg = f'Torrent limit is {TORRENT_DIRECT_LIMIT}GB'
143 | limit = TORRENT_DIRECT_LIMIT
144 | if limit is not None:
145 | LOGGER.info('Checking File/Folder Size...')
146 | if size > limit * 1024**3:
147 | fmsg = f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}"
148 | self.__onDownloadError(fmsg)
149 | self.__sizeChecked = True
150 | elif tor_info.state == "stalledDL":
151 | if not self.__rechecked and 0.99989999999999999 < tor_info.progress < 1:
152 | msg = f"Force recheck - Name: {self.__name} Hash: "
153 | msg += f"{self.ext_hash} Downloaded Bytes: {tor_info.downloaded} "
154 | msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
155 | LOGGER.info(msg)
156 | self.client.torrents_recheck(torrent_hashes=self.ext_hash)
157 | self.__rechecked = True
158 | elif TORRENT_TIMEOUT is not None and time() - self.__stalled_time >= TORRENT_TIMEOUT:
159 | self.__onDownloadError("Dead Torrent!")
160 | elif tor_info.state == "missingFiles":
161 | self.client.torrents_recheck(torrent_hashes=self.ext_hash)
162 | elif tor_info.state == "error":
163 | self.__onDownloadError("No enough space for this torrent on device")
164 | elif (tor_info.state.lower().endswith("up") or tor_info.state == "uploading") and not self.__uploaded:
165 | self.__uploaded = True
166 | if not self.__listener.seed:
167 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
168 | if self.__select:
169 | clean_unwanted(self.__path)
170 | self.__listener.onDownloadComplete()
171 | if self.__listener.seed:
172 | with download_dict_lock:
173 | if self.__listener.uid not in download_dict:
174 | self.__remove_torrent()
175 | return
176 | download_dict[self.__listener.uid] = QbDownloadStatus(self.__listener, self)
177 | self.is_seeding = True
178 | update_all_messages()
179 | LOGGER.info(f"Seeding started: {self.__name} - Hash: {self.ext_hash}")
180 | else:
181 | self.__remove_torrent()
182 | elif tor_info.state == 'pausedUP' and self.__listener.seed:
183 | self.__listener.onUploadError(f"Seeding stopped with Ratio: {round(tor_info.ratio, 3)} and Time: {get_readable_time(tor_info.seeding_time)}")
184 | self.__remove_torrent()
185 | elif tor_info.state == 'pausedDL' and tor_info.completion_on != 0:
186 | # recheck torrent incase one of seed limits reached
187 | # sometimes it stuck on pausedDL from maxRatioAction but it should be pausedUP
188 | LOGGER.info("Recheck on complete manually! PausedDL")
189 | self.client.torrents_recheck(torrent_hashes=self.ext_hash)
190 | except Exception as e:
191 | LOGGER.error(str(e))
192 |
193 | def __onDownloadError(self, err):
194 | LOGGER.info(f"Cancelling Download: {self.__name}")
195 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
196 | sleep(0.3)
197 | self.__listener.onDownloadError(err)
198 | self.__remove_torrent()
199 |
200 | def __remove_torrent(self):
201 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True)
202 | self.client.auth_log_out()
203 | self.__periodic.cancel()
204 |
205 | def cancel_download(self):
206 | if self.is_seeding:
207 | LOGGER.info(f"Cancelling Seed: {self.__name}")
208 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
209 | else:
210 | self.__onDownloadError('Download stopped by user!')
211 |
212 | def _get_hash_magnet(mgt: str):
213 | hash_ = re_search(r'(?<=xt=urn:btih:)[a-zA-Z0-9]+', mgt).group(0)
214 | if len(hash_) == 32:
215 | hash_ = b16encode(b32decode(str(hash_))).decode()
216 | return str(hash_)
217 |
218 | def _get_hash_file(path):
219 | with open(path, "rb") as f:
220 | decodedDict = bdecode(f.read())
221 | hash_ = sha1(bencode(decodedDict[b'info'])).hexdigest()
222 | return str(hash_)
223 |
--------------------------------------------------------------------------------
/bot/modules/search.py:
--------------------------------------------------------------------------------
1 | from requests import get as rget
2 | from threading import Thread
3 | from html import escape
4 | from urllib.parse import quote
5 | from telegram.ext import CommandHandler, CallbackQueryHandler
6 |
7 | from bot import dispatcher, LOGGER, SEARCH_API_LINK, SEARCH_PLUGINS, get_client, SEARCH_LIMIT
8 | from bot.helper.telegram_helper.message_utils import editMessage, sendMessage, sendMarkup, deleteMessage, sendFile
9 | from bot.helper.telegram_helper.filters import CustomFilters
10 | from bot.helper.telegram_helper.bot_commands import BotCommands
11 | from bot.helper.ext_utils.bot_utils import get_readable_file_size
12 | from bot.helper.telegram_helper import button_build
13 | from bot.helper.ext_utils.html_helper import html_template
14 |
15 | if SEARCH_PLUGINS is not None:
16 | PLUGINS = []
17 | qbclient = get_client()
18 | qb_plugins = qbclient.search_plugins()
19 | if qb_plugins:
20 | for plugin in qb_plugins:
21 | qbclient.search_uninstall_plugin(names=plugin['name'])
22 | qbclient.search_install_plugin(SEARCH_PLUGINS)
23 | qbclient.auth_log_out()
24 |
25 | if SEARCH_API_LINK:
26 | try:
27 | res = rget(f'{SEARCH_API_LINK}/api/v1/sites').json()
28 | SITES = {str(site): str(site).capitalize() for site in res['supported_sites']}
29 | SITES['all'] = 'All'
30 | except Exception as e:
31 | LOGGER.error("Can't fetching sites from SEARCH_API_LINK make sure use latest version of API")
32 | SITES = None
33 | else:
34 | SITES = None
35 |
36 | def torser(update, context):
37 | user_id = update.message.from_user.id
38 | buttons = button_build.ButtonMaker()
39 | if SITES is None and SEARCH_PLUGINS is None:
40 | sendMessage("No API link or search PLUGINS added for this function", context.bot, update.message)
41 | elif len(context.args) == 0 and SITES is None:
42 | sendMessage("Send a search key along with command", context.bot, update.message)
43 | elif len(context.args) == 0:
44 | buttons.sbutton('Trending', f"torser {user_id} apitrend")
45 | buttons.sbutton('Recent', f"torser {user_id} apirecent")
46 | buttons.sbutton("Cancel", f"torser {user_id} cancel")
47 | button = buttons.build_menu(2)
48 | sendMarkup("Send a search key along with command", context.bot, update.message, button)
49 | elif SITES is not None and SEARCH_PLUGINS is not None:
50 | buttons.sbutton('Api', f"torser {user_id} apisearch")
51 | buttons.sbutton('Plugins', f"torser {user_id} plugin")
52 | buttons.sbutton("Cancel", f"torser {user_id} cancel")
53 | button = buttons.build_menu(2)
54 | sendMarkup('Choose tool to search:', context.bot, update.message, button)
55 | elif SITES is not None:
56 | button = _api_buttons(user_id, "apisearch")
57 | sendMarkup('Choose site to search:', context.bot, update.message, button)
58 | else:
59 | button = _plugin_buttons(user_id)
60 | sendMarkup('Choose site to search:', context.bot, update.message, button)
61 |
62 | def torserbut(update, context):
63 | query = update.callback_query
64 | user_id = query.from_user.id
65 | message = query.message
66 | key = message.reply_to_message.text.split(maxsplit=1)
67 | key = key[1].strip() if len(key) > 1 else None
68 | data = query.data
69 | data = data.split()
70 | if user_id != int(data[1]):
71 | query.answer(text="Not Yours!", show_alert=True)
72 | elif data[2].startswith('api'):
73 | query.answer()
74 | button = _api_buttons(user_id, data[2])
75 | editMessage('Choose site:', message, button)
76 | elif data[2] == 'plugin':
77 | query.answer()
78 | button = _plugin_buttons(user_id)
79 | editMessage('Choose site:', message, button)
80 | elif data[2] != "cancel":
81 | query.answer()
82 | site = data[2]
83 | method = data[3]
84 | if method.startswith('api'):
85 | if key is None:
86 | if method == 'apirecent':
87 | endpoint = 'Recent'
88 | elif method == 'apitrend':
89 | endpoint = 'Trending'
90 | editMessage(f"Listing {endpoint} Items...\nTorrent Site:- {SITES.get(site)}", message)
91 | else:
92 | editMessage(f"Searching for {key}\nTorrent Site:- {SITES.get(site)}", message)
93 | else:
94 | editMessage(f"Searching for {key}\nTorrent Site:- {site.capitalize()}", message)
95 | Thread(target=_search, args=(context.bot, key, site, message, method)).start()
96 | else:
97 | query.answer()
98 | editMessage("Search has been canceled!", message)
99 |
100 | def _search(bot, key, site, message, method):
101 | if method.startswith('api'):
102 | if method == 'apisearch':
103 | LOGGER.info(f"API Searching: {key} from {site}")
104 | if site == 'all':
105 | api = f"{SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={SEARCH_LIMIT}"
106 | else:
107 | api = f"{SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={SEARCH_LIMIT}"
108 | elif method == 'apitrend':
109 | LOGGER.info(f"API Trending from {site}")
110 | if site == 'all':
111 | api = f"{SEARCH_API_LINK}/api/v1/all/trending?limit={SEARCH_LIMIT}"
112 | else:
113 | api = f"{SEARCH_API_LINK}/api/v1/trending?site={site}&limit={SEARCH_LIMIT}"
114 | elif method == 'apirecent':
115 | LOGGER.info(f"API Recent from {site}")
116 | if site == 'all':
117 | api = f"{SEARCH_API_LINK}/api/v1/all/recent?limit={SEARCH_LIMIT}"
118 | else:
119 | api = f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}"
120 | try:
121 | resp = rget(api)
122 | search_results = resp.json()
123 | if "error" in search_results.keys():
124 | return editMessage(f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", message)
125 | cap = f"Found {search_results['total']}"
126 | if method == 'apitrend':
127 | cap += f" trending results\nTorrent Site:- {SITES.get(site)}"
128 | elif method == 'apirecent':
129 | cap += f" recent results\nTorrent Site:- {SITES.get(site)}"
130 | else:
131 | cap += f" results for {key}\nTorrent Site:- {SITES.get(site)}"
132 | search_results = search_results['data']
133 | except Exception as e:
134 | return editMessage(str(e), message)
135 | else:
136 | LOGGER.info(f"PLUGINS Searching: {key} from {site}")
137 | client = get_client()
138 | search = client.search_start(pattern=key, plugins=site, category='all')
139 | search_id = search.id
140 | while True:
141 | result_status = client.search_status(search_id=search_id)
142 | status = result_status[0].status
143 | if status != 'Running':
144 | break
145 | dict_search_results = client.search_results(search_id=search_id)
146 | search_results = dict_search_results.results
147 | total_results = dict_search_results.total
148 | if total_results == 0:
149 | return editMessage(f"No result found for {key}\nTorrent Site:- {site.capitalize()}", message)
150 | cap = f"Found {total_results}"
151 | cap += f" results for {key}\nTorrent Site:- {site.capitalize()}"
152 | hmsg = _getResult(search_results, key, method)
153 | name = f"{method}_{key}_{site}_{message.message_id}.html"
154 | with open(name, "w", encoding='utf-8') as f:
155 | f.write(html_template.replace('{msg}', hmsg).replace('{title}', f'{method}_{key}_{site}'))
156 | deleteMessage(bot, message)
157 | sendFile(bot, message.reply_to_message, name, cap)
158 | if not method.startswith('api'):
159 | client.search_delete(search_id=search_id)
160 |
161 | def _getResult(search_results, key, method):
162 | if method == 'apirecent':
163 | msg = '