├── .github
└── workflows
│ └── deploy.yml
├── .gitignore
├── .netrc
├── Dockerfile
├── LICENSE
├── README.md
├── add_to_team_drive.py
├── alive.py
├── aria.bat
├── aria.sh
├── bot
├── __init__.py
├── __main__.py
├── helper
│ ├── __init__.py
│ ├── ext_utils
│ │ ├── __init__.py
│ │ ├── bot_utils.py
│ │ ├── custom_filters.py
│ │ ├── db_handler.py
│ │ ├── exceptions.py
│ │ ├── fs_utils.py
│ │ └── shortenurl.py
│ ├── mirror_utils
│ │ ├── __init__.py
│ │ ├── download_utils
│ │ │ ├── __init__.py
│ │ │ ├── aria2_download.py
│ │ │ ├── direct_link_generator.py
│ │ │ ├── direct_link_generator_license.md
│ │ │ ├── download_helper.py
│ │ │ ├── mega_downloader.py
│ │ │ ├── qbit_downloader.py
│ │ │ ├── telegram_downloader.py
│ │ │ └── youtube_dl_download_helper.py
│ │ ├── status_utils
│ │ │ ├── __init__.py
│ │ │ ├── aria_download_status.py
│ │ │ ├── clone_status.py
│ │ │ ├── extract_status.py
│ │ │ ├── gdownload_status.py
│ │ │ ├── listeners.py
│ │ │ ├── mega_download_status.py
│ │ │ ├── qbit_download_status.py
│ │ │ ├── split_status.py
│ │ │ ├── status.py
│ │ │ ├── tar_status.py
│ │ │ ├── telegram_download_status.py
│ │ │ ├── tg_upload_status.py
│ │ │ ├── upload_status.py
│ │ │ └── youtube_dl_download_status.py
│ │ └── upload_utils
│ │ │ ├── __init__.py
│ │ │ ├── gdriveTools.py
│ │ │ └── pyrogramEngine.py
│ └── telegram_helper
│ │ ├── __init__.py
│ │ ├── bot_commands.py
│ │ ├── button_build.py
│ │ ├── filters.py
│ │ └── message_utils.py
└── modules
│ ├── __init__.py
│ ├── authorize.py
│ ├── cancel_mirror.py
│ ├── clone.py
│ ├── count.py
│ ├── delete.py
│ ├── eval.py
│ ├── leech_settings.py
│ ├── list.py
│ ├── mirror.py
│ ├── mirror_status.py
│ ├── shell.py
│ ├── speedtest.py
│ ├── torrent_search.py
│ └── watch.py
├── captain-definition
├── config_sample.env
├── docker-compose.yml
├── driveid.py
├── extract
├── gen_sa_accounts.py
├── generate_drive_token.py
├── heroku.yml
├── nodes.py
├── pextract
├── qBittorrent.conf
├── requirements-cli.txt
├── requirements.txt
├── start.sh
└── wserver.py
/.github/workflows/deploy.yml:
--------------------------------------------------------------------------------
1 | name: Manually Deploy to Heroku
2 |
3 | on: workflow_dispatch
4 |
5 | jobs:
6 | deploy:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v2
10 | - uses: akhileshns/heroku-deploy@v3.12.12
11 | with:
12 | heroku_api_key: ${{secrets.HEROKU_API_KEY}}
13 | heroku_app_name: ${{secrets.HEROKU_APP_NAME}}
14 | heroku_email: ${{secrets.HEROKU_EMAIL}}
15 | usedocker: true
16 | docker_heroku_process_type: web
17 | stack: "container"
18 | region: "eu"
19 | env:
20 | HD_CONFIG_FILE_URL: ${{secrets.CONFIG_FILE_URL}}
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | config.env
2 | *auth_token.txt
3 | *.pyc
4 | downloads/*
5 | download/*
6 | data*
7 | .vscode
8 | .idea
9 | *.json
10 | *.pickle
11 | authorized_chats.txt
12 | log.txt
13 | accounts/*
14 |
--------------------------------------------------------------------------------
/.netrc:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM breakdowns/mega-sdk-python:latest
2 |
3 | WORKDIR /usr/src/app
4 | RUN chmod 777 /usr/src/app
5 |
6 | COPY requirements.txt .
7 | RUN pip3 install --no-cache-dir -r requirements.txt
8 |
9 | COPY extract /usr/local/bin
10 | COPY pextract /usr/local/bin
11 | RUN chmod +x /usr/local/bin/extract && chmod +x /usr/local/bin/pextract
12 | COPY . .
13 | COPY .netrc /root/.netrc
14 | RUN chmod 600 /usr/src/app/.netrc
15 | RUN chmod +x aria.sh
16 |
17 | CMD ["bash","start.sh"]
18 |
--------------------------------------------------------------------------------
/add_to_team_drive.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | from google.oauth2.service_account import Credentials
3 | import googleapiclient.discovery, json, progress.bar, glob, sys, argparse, time
4 | from google_auth_oauthlib.flow import InstalledAppFlow
5 | from google.auth.transport.requests import Request
6 | import os, pickle
7 |
8 | stt = time.time()
9 |
10 | parse = argparse.ArgumentParser(
11 | description='A tool to add service accounts to a shared drive from a folder containing credential files.')
12 | parse.add_argument('--path', '-p', default='accounts',
13 | help='Specify an alternative path to the service accounts folder.')
14 | parse.add_argument('--credentials', '-c', default='./credentials.json',
15 | help='Specify the relative path for the credentials file.')
16 | parse.add_argument('--yes', '-y', default=False, action='store_true', help='Skips the sanity prompt.')
17 | parsereq = parse.add_argument_group('required arguments')
18 | parsereq.add_argument('--drive-id', '-d', help='The ID of the Shared Drive.', required=True)
19 |
20 | args = parse.parse_args()
21 | acc_dir = args.path
22 | did = args.drive_id
23 | credentials = glob.glob(args.credentials)
24 |
25 | try:
26 | open(credentials[0], 'r')
27 | print('>> Found credentials.')
28 | except IndexError:
29 | print('>> No credentials found.')
30 | sys.exit(0)
31 |
32 | if not args.yes:
33 | # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
34 | # credentials[0],'r').read()))['installed']['client_id'])
35 | input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
36 | '(shared drive) as Manager\n>> (Press any key to continue)')
37 |
38 | creds = None
39 | if os.path.exists('token_sa.pickle'):
40 | with open('token_sa.pickle', 'rb') as token:
41 | creds = pickle.load(token)
42 | # If there are no (valid) credentials available, let the user log in.
43 | if not creds or not creds.valid:
44 | if creds and creds.expired and creds.refresh_token:
45 | creds.refresh(Request())
46 | else:
47 | flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
48 | 'https://www.googleapis.com/auth/admin.directory.group',
49 | 'https://www.googleapis.com/auth/admin.directory.group.member'
50 | ])
51 | # creds = flow.run_local_server(port=0)
52 | creds = flow.run_console()
53 | # Save the credentials for the next run
54 | with open('token_sa.pickle', 'wb') as token:
55 | pickle.dump(creds, token)
56 |
57 | drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
58 | batch = drive.new_batch_http_request()
59 |
60 | aa = glob.glob('%s/*.json' % acc_dir)
61 | pbar = progress.bar.Bar("Readying accounts", max=len(aa))
62 | for i in aa:
63 | ce = json.loads(open(i, 'r').read())['client_email']
64 | batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
65 | "role": "organizer",
66 | "type": "user",
67 | "emailAddress": ce
68 | }))
69 | pbar.next()
70 | pbar.finish()
71 | print('Adding...')
72 | batch.execute()
73 |
74 | print('Complete.')
75 | hours, rem = divmod((time.time() - stt), 3600)
76 | minutes, sec = divmod(rem, 60)
77 | print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
78 |
--------------------------------------------------------------------------------
/alive.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/0bfba523f095ab1dccad431d72561e0e002e7a59)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | import time
6 | import requests
7 | import os
8 |
9 | BASE_URL = os.environ.get('BASE_URL_OF_BOT', None)
10 | try:
11 | if len(BASE_URL) == 0:
12 | BASE_URL = None
13 | except:
14 | BASE_URL = None
15 | PORT = os.environ.get('PORT', None)
16 | if PORT is not None and BASE_URL is not None:
17 | while True:
18 | time.sleep(600)
19 | status = requests.get(BASE_URL).status_code
20 |
--------------------------------------------------------------------------------
/aria.bat:
--------------------------------------------------------------------------------
1 | aria2c --enable-rpc --rpc-listen-all=false --rpc-listen-port 6800 --max-connection-per-server=10 --rpc-max-request-size=1024M --seed-time=0.01 --min-split-size=10M --follow-torrent=mem --split=10 --daemon=true --allow-overwrite=true
2 |
--------------------------------------------------------------------------------
/aria.sh:
--------------------------------------------------------------------------------
1 | tracker_list=$(curl -Ns https://raw.githubusercontent.com/XIU2/TrackersListCollection/master/all.txt https://ngosang.github.io/trackerslist/trackers_all_http.txt https://newtrackon.com/api/all https://raw.githubusercontent.com/DeSireFire/animeTrackerList/master/AT_all.txt https://raw.githubusercontent.com/hezhijie0327/Trackerslist/main/trackerslist_tracker.txt https://raw.githubusercontent.com/hezhijie0327/Trackerslist/main/trackerslist_exclude.txt | awk '$0' | tr '\n\n' ',')
2 | aria2c --enable-rpc --check-certificate=false \
3 | --max-connection-per-server=10 --rpc-max-request-size=1024M --bt-max-peers=0 \
4 | --bt-stop-timeout=0 --min-split-size=10M --follow-torrent=mem --split=10 \
5 | --daemon=true --allow-overwrite=true --max-overall-download-limit=0 --bt-tracker="[$tracker_list]"\
6 | --max-overall-upload-limit=1K --max-concurrent-downloads=15 --continue=true \
7 | --peer-id-prefix=-qB4380- --user-agent=qBittorrent/4.3.8 --peer-agent=qBittorrent/4.3.8 \
8 | --disk-cache=32M --bt-enable-lpd=true --seed-time=0 --max-file-not-found=0 \
9 | --max-tries=20 --auto-file-renaming=true --reuse-uri=true --http-accept-gzip=true \
10 | --content-disposition-default-utf8=true --netrc-path=/usr/src/app/.netrc
11 |
--------------------------------------------------------------------------------
/bot/__main__.py:
--------------------------------------------------------------------------------
1 | import shutil, psutil
2 | import signal
3 | import os
4 | import asyncio
5 |
6 | from pyrogram import idle
7 | from sys import executable
8 |
9 | from telegram import ParseMode
10 | from telegram.ext import CommandHandler
11 | from telegraph import Telegraph
12 | from wserver import start_server_async
13 | from bot import bot, app, dispatcher, updater, botStartTime, IGNORE_PENDING_REQUESTS, IS_VPS, PORT, alive, web, OWNER_ID, AUTHORIZED_CHATS, telegraph_token
14 | from bot.helper.ext_utils import fs_utils
15 | from bot.helper.telegram_helper.bot_commands import BotCommands
16 | from bot.helper.telegram_helper.message_utils import *
17 | from .helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time
18 | from .helper.telegram_helper.filters import CustomFilters
19 | from bot.helper.telegram_helper import button_build
20 | from .modules import authorize, list, cancel_mirror, mirror_status, mirror, clone, watch, shell, eval, torrent_search, delete, speedtest, count, leech_settings
21 |
22 |
23 | def stats(update, context):
24 | currentTime = get_readable_time(time.time() - botStartTime)
25 | total, used, free = shutil.disk_usage('.')
26 | total = get_readable_file_size(total)
27 | used = get_readable_file_size(used)
28 | free = get_readable_file_size(free)
29 | sent = get_readable_file_size(psutil.net_io_counters().bytes_sent)
30 | recv = get_readable_file_size(psutil.net_io_counters().bytes_recv)
31 | cpuUsage = psutil.cpu_percent(interval=0.5)
32 | memory = psutil.virtual_memory().percent
33 | disk = psutil.disk_usage('/').percent
34 | stats = f'Bot Uptime: {currentTime}
\n' \
35 | f'Total Disk Space: {total}
\n' \
36 | f'Used: {used}
' \
37 | f'Free: {free}
\n\n' \
38 | f'Upload: {sent}
\n' \
39 | f'Download: {recv}
\n\n' \
40 | f'CPU: {cpuUsage}%
' \
41 | f'RAM: {memory}%
' \
42 | f'DISK: {disk}%
'
43 | sendMessage(stats, context.bot, update)
44 |
45 |
46 | def start(update, context):
47 | buttons = button_build.ButtonMaker()
48 | buttons.buildbutton("Repo", "https://github.com/SlamDevs/slam-mirrorbot")
49 | buttons.buildbutton("Channel", "https://t.me/SlamMirrorUpdates")
50 | reply_markup = InlineKeyboardMarkup(buttons.build_menu(2))
51 | if CustomFilters.authorized_user(update) or CustomFilters.authorized_chat(update):
52 | start_string = f'''
53 | This bot can mirror all your links to Google Drive!
54 | Type /{BotCommands.HelpCommand} to get a list of available commands
55 | '''
56 | sendMarkup(start_string, context.bot, update, reply_markup)
57 | else:
58 | sendMarkup(
59 | 'Oops! not a Authorized user.\nPlease deploy your own slam-mirrorbot.',
60 | context.bot,
61 | update,
62 | reply_markup,
63 | )
64 |
65 |
66 | def restart(update, context):
67 | restart_message = sendMessage("Restarting, Please wait!", context.bot, update)
68 | # Save restart message object in order to reply to it after restarting
69 | with open(".restartmsg", "w") as f:
70 | f.truncate(0)
71 | f.write(f"{restart_message.chat.id}\n{restart_message.message_id}\n")
72 | fs_utils.clean_all()
73 | alive.terminate()
74 | web.terminate()
75 | os.execl(executable, executable, "-m", "bot")
76 |
77 |
78 | def ping(update, context):
79 | start_time = int(round(time.time() * 1000))
80 | reply = sendMessage("Starting Ping", context.bot, update)
81 | end_time = int(round(time.time() * 1000))
82 | editMessage(f'{end_time - start_time} ms', reply)
83 |
84 |
85 | def log(update, context):
86 | sendLogFile(context.bot, update)
87 |
88 |
89 | help_string_telegraph = f'''
90 | /{BotCommands.HelpCommand}: To get this message
91 |
92 | /{BotCommands.MirrorCommand} [download_url][magnet_link]: Start mirroring the link to Google Drive.
93 |
94 | /{BotCommands.TarMirrorCommand} [download_url][magnet_link]: Start mirroring and upload the archived (.tar) version of the download
95 |
96 | /{BotCommands.ZipMirrorCommand} [download_url][magnet_link]: Start mirroring and upload the archived (.zip) version of the download
97 |
98 | /{BotCommands.UnzipMirrorCommand} [download_url][magnet_link]: Starts mirroring and if downloaded file is any archive, extracts it to Google Drive
99 |
100 | /{BotCommands.QbMirrorCommand} [magnet_link]: Start Mirroring using qBittorrent, Use /{BotCommands.QbMirrorCommand} s to select files before downloading
101 |
102 | /{BotCommands.QbTarMirrorCommand} [magnet_link]: Start mirroring using qBittorrent and upload the archived (.tar) version of the download
103 |
104 | /{BotCommands.QbZipMirrorCommand} [magnet_link]: Start mirroring using qBittorrent and upload the archived (.zip) version of the download
105 |
106 | /{BotCommands.QbUnzipMirrorCommand} [magnet_link]: Starts mirroring using qBittorrent and if downloaded file is any archive, extracts it to Google Drive
107 |
108 | /{BotCommands.LeechCommand} [download_url][magnet_link]: Start leeching to Telegram, Use /{BotCommands.LeechCommand} s to select files before leeching
109 |
110 | /{BotCommands.TarLeechCommand} [download_url][magnet_link]: Start leeching to Telegram and upload it as (.tar)
111 |
112 | /{BotCommands.ZipLeechCommand} [download_url][magnet_link]: Start leeching to Telegram and upload it as (.zip)
113 |
114 | /{BotCommands.UnzipLeechCommand} [download_url][magnet_link]: Start leeching to Telegram and if downloaded file is any archive, extracts it to Telegram
115 |
116 | /{BotCommands.QbLeechCommand} [magnet_link]: Start leeching to Telegram using qBittorrent, Use /{BotCommands.QbLeechCommand} s to select files before leeching
117 |
118 | /{BotCommands.QbTarLeechCommand} [magnet_link]: Start leeching to Telegram using qBittorrent and upload it as (.tar)
119 |
120 | /{BotCommands.QbZipLeechCommand} [magnet_link]: Start leeching to Telegram using qBittorrent and upload it as (.zip)
121 |
122 | /{BotCommands.QbUnzipLeechCommand} [magnet_link]: Start leeching to Telegram using qBittorrent and if downloaded file is any archive, extracts it to Telegram
123 |
124 | /{BotCommands.CloneCommand} [drive_url]: Copy file/folder to Google Drive
125 |
126 | /{BotCommands.CountCommand} [drive_url]: Count file/folder of Google Drive Links
127 |
128 | /{BotCommands.DeleteCommand} [drive_url]: Delete file from Google Drive (Only Owner & Sudo)
129 |
130 | /{BotCommands.WatchCommand} [youtube-dl supported link]: Mirror through youtube-dl. Click /{BotCommands.WatchCommand} for more help
131 |
132 | /{BotCommands.TarWatchCommand} [youtube-dl supported link]: Mirror through youtube-dl and tar before uploading
133 |
134 | /{BotCommands.ZipWatchCommand} [youtube-dl supported link]: Mirror through youtube-dl and zip before uploading
135 |
136 | /{BotCommands.LeechWatchCommand} [youtube-dl supported link]: Leech through youtube-dl
137 |
138 | /{BotCommands.LeechTarWatchCommand} [youtube-dl supported link]: Leech through youtube-dl and tar before uploading
139 |
140 | /{BotCommands.LeechZipWatchCommand} [youtube-dl supported link]: Leech through youtube-dl and zip before uploading
141 |
142 | /{BotCommands.LeechSetCommand}: Leech Settings
143 |
144 | /{BotCommands.SetThumbCommand}: Reply photo to set it as Thumbnail
145 |
146 | /{BotCommands.CancelMirror}: Reply to the message by which the download was initiated and that download will be cancelled
147 |
148 | /{BotCommands.CancelAllCommand}: Cancel all running tasks
149 |
150 | /{BotCommands.ListCommand} [search term]: Searches the search term in the Google Drive, If found replies with the link
151 |
152 | /{BotCommands.StatusCommand}: Shows a status of all the downloads
153 |
154 | /{BotCommands.StatsCommand}: Show Stats of the machine the bot is hosted on
155 | '''
156 | help = Telegraph(access_token=telegraph_token).create_page(
157 | title='Slam Mirrorbot Help',
158 | author_name='Slam Mirrorbot',
159 | author_url='https://github.com/SlamDevs/slam-mirrorbot',
160 | html_content=help_string_telegraph,
161 | )["path"]
162 |
163 | help_string = f'''
164 | /{BotCommands.PingCommand}: Check how long it takes to Ping the Bot
165 |
166 | /{BotCommands.AuthorizeCommand}: Authorize a chat or a user to use the bot (Can only be invoked by Owner & Sudo of the bot)
167 |
168 | /{BotCommands.UnAuthorizeCommand}: Unauthorize a chat or a user to use the bot (Can only be invoked by Owner & Sudo of the bot)
169 |
170 | /{BotCommands.AuthorizedUsersCommand}: Show authorized users (Only Owner & Sudo)
171 |
172 | /{BotCommands.AddSudoCommand}: Add sudo user (Only Owner)
173 |
174 | /{BotCommands.RmSudoCommand}: Remove sudo users (Only Owner)
175 |
176 | /{BotCommands.RestartCommand}: Restart the bot
177 |
178 | /{BotCommands.LogCommand}: Get a log file of the bot. Handy for getting crash reports
179 |
180 | /{BotCommands.SpeedCommand}: Check Internet Speed of the Host
181 |
182 | /{BotCommands.ShellCommand}: Run commands in Shell (Only Owner)
183 |
184 | /{BotCommands.ExecHelpCommand}: Get help for Executor module (Only Owner)
185 |
186 | /{BotCommands.TsHelpCommand}: Get help for Torrent search module
187 | '''
188 |
189 | def bot_help(update, context):
190 | button = button_build.ButtonMaker()
191 | button.buildbutton("Other Commands", f"https://telegra.ph/{help}")
192 | reply_markup = InlineKeyboardMarkup(button.build_menu(1))
193 | sendMarkup(help_string, context.bot, update, reply_markup)
194 |
195 | '''
196 | botcmds = [
197 | (f'{BotCommands.HelpCommand}','Get Detailed Help'),
198 | (f'{BotCommands.MirrorCommand}', 'Start Mirroring'),
199 | (f'{BotCommands.TarMirrorCommand}','Start mirroring and upload as .tar'),
200 | (f'{BotCommands.ZipMirrorCommand}','Start mirroring and upload as .zip'),
201 | (f'{BotCommands.UnzipMirrorCommand}','Extract files'),
202 | (f'{BotCommands.QbMirrorCommand}','Start Mirroring using qBittorrent'),
203 | (f'{BotCommands.QbTarMirrorCommand}','Start mirroring and upload as .tar using qb'),
204 | (f'{BotCommands.QbZipMirrorCommand}','Start mirroring and upload as .zip using qb'),
205 | (f'{BotCommands.QbUnzipMirrorCommand}','Extract files using qBitorrent'),
206 | (f'{BotCommands.CloneCommand}','Copy file/folder to Drive'),
207 | (f'{BotCommands.CountCommand}','Count file/folder of Drive link'),
208 | (f'{BotCommands.DeleteCommand}','Delete file from Drive'),
209 | (f'{BotCommands.WatchCommand}','Mirror Youtube-dl support link'),
210 | (f'{BotCommands.TarWatchCommand}','Mirror Youtube playlist link as .tar'),
211 | (f'{BotCommands.ZipWatchCommand}','Mirror Youtube playlist link as .zip'),
212 | (f'{BotCommands.CancelMirror}','Cancel a task'),
213 | (f'{BotCommands.CancelAllCommand}','Cancel all tasks'),
214 | (f'{BotCommands.ListCommand}','Searches files in Drive'),
215 | (f'{BotCommands.StatusCommand}','Get Mirror Status message'),
216 | (f'{BotCommands.StatsCommand}','Bot Usage Stats'),
217 | (f'{BotCommands.PingCommand}','Ping the Bot'),
218 | (f'{BotCommands.RestartCommand}','Restart the bot [owner/sudo only]'),
219 | (f'{BotCommands.LogCommand}','Get the Bot Log [owner/sudo only]'),
220 | (f'{BotCommands.TsHelpCommand}','Get help for Torrent search module')
221 | ]
222 | '''
223 |
224 | def main():
225 | fs_utils.start_cleanup()
226 | if IS_VPS:
227 | asyncio.get_event_loop().run_until_complete(start_server_async(PORT))
228 | # Check if the bot is restarting
229 | if os.path.isfile(".restartmsg"):
230 | with open(".restartmsg") as f:
231 | chat_id, msg_id = map(int, f)
232 | bot.edit_message_text("Restarted successfully!", chat_id, msg_id)
233 | os.remove(".restartmsg")
234 | elif OWNER_ID:
235 | try:
236 | text = "Bot Restarted!"
237 | bot.sendMessage(chat_id=OWNER_ID, text=text, parse_mode=ParseMode.HTML)
238 | if AUTHORIZED_CHATS:
239 | for i in AUTHORIZED_CHATS:
240 | bot.sendMessage(chat_id=i, text=text, parse_mode=ParseMode.HTML)
241 | except Exception as e:
242 | LOGGER.warning(e)
243 | # bot.set_my_commands(botcmds)
244 | start_handler = CommandHandler(BotCommands.StartCommand, start, run_async=True)
245 | ping_handler = CommandHandler(BotCommands.PingCommand, ping,
246 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
247 | restart_handler = CommandHandler(BotCommands.RestartCommand, restart,
248 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
249 | help_handler = CommandHandler(BotCommands.HelpCommand,
250 | bot_help, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
251 | stats_handler = CommandHandler(BotCommands.StatsCommand,
252 | stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
253 | log_handler = CommandHandler(BotCommands.LogCommand, log, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
254 | dispatcher.add_handler(start_handler)
255 | dispatcher.add_handler(ping_handler)
256 | dispatcher.add_handler(restart_handler)
257 | dispatcher.add_handler(help_handler)
258 | dispatcher.add_handler(stats_handler)
259 | dispatcher.add_handler(log_handler)
260 | updater.start_polling(drop_pending_updates=IGNORE_PENDING_REQUESTS)
261 | LOGGER.info("Bot Started!")
262 | signal.signal(signal.SIGINT, fs_utils.exit_clean_up)
263 |
264 | app.start()
265 | main()
266 | idle()
267 |
--------------------------------------------------------------------------------
/bot/helper/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/__init__.py
--------------------------------------------------------------------------------
/bot/helper/ext_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/ext_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/ext_utils/bot_utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import re
3 | import threading
4 | import time
5 | import math
6 |
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot import dispatcher, download_dict, download_dict_lock, STATUS_LIMIT
9 | from telegram import InlineKeyboardMarkup
10 | from telegram.ext import CallbackQueryHandler
11 | from bot.helper.telegram_helper import button_build, message_utils
12 |
13 | LOGGER = logging.getLogger(__name__)
14 |
15 | MAGNET_REGEX = r"magnet:\?xt=urn:btih:[a-zA-Z0-9]*"
16 |
17 | URL_REGEX = r"(?:(?:https?|ftp):\/\/)?[\w/\-?=%.]+\.[\w/\-?=%.]+"
18 |
19 | COUNT = 0
20 | PAGE_NO = 1
21 |
22 |
23 | class MirrorStatus:
24 | STATUS_UPLOADING = "Uploading...📤"
25 | STATUS_DOWNLOADING = "Downloading...📥"
26 | STATUS_CLONING = "Cloning...♻️"
27 | STATUS_WAITING = "Queued...📝"
28 | STATUS_FAILED = "Failed 🚫. Cleaning Download..."
29 | STATUS_PAUSE = "Paused...⭕️"
30 | STATUS_ARCHIVING = "Archiving...🔐"
31 | STATUS_EXTRACTING = "Extracting...📂"
32 | STATUS_SPLITTING = "Splitting...✂️"
33 |
34 |
35 | PROGRESS_MAX_SIZE = 100 // 8
36 | PROGRESS_INCOMPLETE = ['▏', '▎', '▍', '▌', '▋', '▊', '▉']
37 |
38 | SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
39 |
40 |
41 | class setInterval:
42 | def __init__(self, interval, action):
43 | self.interval = interval
44 | self.action = action
45 | self.stopEvent = threading.Event()
46 | thread = threading.Thread(target=self.__setInterval)
47 | thread.start()
48 |
49 | def __setInterval(self):
50 | nextTime = time.time() + self.interval
51 | while not self.stopEvent.wait(nextTime - time.time()):
52 | nextTime += self.interval
53 | self.action()
54 |
55 | def cancel(self):
56 | self.stopEvent.set()
57 |
58 | def get_readable_file_size(size_in_bytes) -> str:
59 | if size_in_bytes is None:
60 | return '0B'
61 | index = 0
62 | while size_in_bytes >= 1024:
63 | size_in_bytes /= 1024
64 | index += 1
65 | try:
66 | return f'{round(size_in_bytes, 2)}{SIZE_UNITS[index]}'
67 | except IndexError:
68 | return 'File too large'
69 |
70 | def getDownloadByGid(gid):
71 | with download_dict_lock:
72 | for dl in download_dict.values():
73 | status = dl.status()
74 | if (
75 | status
76 | not in [
77 | MirrorStatus.STATUS_ARCHIVING,
78 | MirrorStatus.STATUS_EXTRACTING,
79 | MirrorStatus.STATUS_SPLITTING,
80 | ]
81 | and dl.gid() == gid
82 | ):
83 | return dl
84 | return None
85 |
86 | def getAllDownload():
87 | with download_dict_lock:
88 | for dlDetails in download_dict.values():
89 | status = dlDetails.status()
90 | if (
91 | status
92 | not in [
93 | MirrorStatus.STATUS_ARCHIVING,
94 | MirrorStatus.STATUS_EXTRACTING,
95 | MirrorStatus.STATUS_SPLITTING,
96 | MirrorStatus.STATUS_CLONING,
97 | MirrorStatus.STATUS_UPLOADING,
98 | ]
99 | and dlDetails
100 | ):
101 | return dlDetails
102 | return None
103 |
104 | def get_progress_bar_string(status):
105 | completed = status.processed_bytes() / 8
106 | total = status.size_raw() / 8
107 | p = 0 if total == 0 else round(completed * 100 / total)
108 | p = min(max(p, 0), 100)
109 | cFull = p // 8
110 | cPart = p % 8 - 1
111 | p_str = '█' * cFull
112 | if cPart >= 0:
113 | p_str += PROGRESS_INCOMPLETE[cPart]
114 | p_str += ' ' * (PROGRESS_MAX_SIZE - cFull)
115 | p_str = f"[{p_str}]"
116 | return p_str
117 |
118 | def get_readable_message():
119 | with download_dict_lock:
120 | msg = ""
121 | start = 0
122 | if STATUS_LIMIT is not None:
123 | dick_no = len(download_dict)
124 | global pages
125 | pages = math.ceil(dick_no/STATUS_LIMIT)
126 | if PAGE_NO > pages and pages != 0:
127 | globals()['COUNT'] -= STATUS_LIMIT
128 | globals()['PAGE_NO'] -= 1
129 | start = COUNT
130 | for index, download in enumerate(list(download_dict.values())[start:], start=1):
131 | msg += f"Filename: {download.name()}
"
132 | msg += f"\nStatus: {download.status()}"
133 | if download.status() not in [
134 | MirrorStatus.STATUS_ARCHIVING,
135 | MirrorStatus.STATUS_EXTRACTING,
136 | MirrorStatus.STATUS_SPLITTING,
137 | ]:
138 | msg += f"\n{get_progress_bar_string(download)} {download.progress()}
"
139 | if download.status() == MirrorStatus.STATUS_CLONING:
140 | msg += f"\nCloned: {get_readable_file_size(download.processed_bytes())}
of {download.size()}
"
141 | elif download.status() == MirrorStatus.STATUS_UPLOADING:
142 | msg += f"\nUploaded: {get_readable_file_size(download.processed_bytes())}
of {download.size()}
"
143 | else:
144 | msg += f"\nDownloaded: {get_readable_file_size(download.processed_bytes())}
of {download.size()}
"
145 | msg += f"\nSpeed: {download.speed()}
ETA: {download.eta()}
"
146 | try:
147 | msg += f"\nSeeders: {download.aria_download().num_seeders}
" \
148 | f" | Peers: {download.aria_download().connections}
"
149 | except:
150 | pass
151 | try:
152 | msg += f"\nSeeders: {download.torrent_info().num_seeds}
" \
153 | f" | Leechers: {download.torrent_info().num_leechs}
"
154 | except:
155 | pass
156 | msg += f"\nTo Cancel: /{BotCommands.CancelMirror} {download.gid()}
"
157 | msg += "\n\n"
158 | if STATUS_LIMIT is not None and index == STATUS_LIMIT:
159 | break
160 | if STATUS_LIMIT is not None and dick_no > STATUS_LIMIT:
161 | msg += f"Page: {PAGE_NO}
/{pages}
| Tasks: {dick_no}
\n"
162 | buttons = button_build.ButtonMaker()
163 | buttons.sbutton("Previous", "pre")
164 | buttons.sbutton("Next", "nex")
165 | button = InlineKeyboardMarkup(buttons.build_menu(2))
166 | return msg, button
167 | return msg, ""
168 |
169 | def flip(update, context):
170 | query = update.callback_query
171 | query.answer()
172 | global COUNT, PAGE_NO
173 | if query.data == "nex":
174 | if PAGE_NO == pages:
175 | COUNT = 0
176 | PAGE_NO = 1
177 | else:
178 | COUNT += STATUS_LIMIT
179 | PAGE_NO += 1
180 | elif query.data == "pre":
181 | if PAGE_NO == 1:
182 | COUNT = STATUS_LIMIT * (pages - 1)
183 | PAGE_NO = pages
184 | else:
185 | COUNT -= STATUS_LIMIT
186 | PAGE_NO -= 1
187 | message_utils.update_all_messages()
188 |
189 | def check_limit(size, limit, tar_unzip_limit=None, is_tar_ext=False):
190 | LOGGER.info('Checking File/Folder Size...')
191 | if is_tar_ext and tar_unzip_limit is not None:
192 | limit = tar_unzip_limit
193 | if limit is not None:
194 | limit = limit.split(' ', maxsplit=1)
195 | limitint = int(limit[0])
196 | if 'G' in limit[1] or 'g' in limit[1]:
197 | if size > limitint * 1024**3:
198 | return True
199 | elif 'T' in limit[1] or 't' in limit[1]:
200 | if size > limitint * 1024**4:
201 | return True
202 |
203 | def get_readable_time(seconds: int) -> str:
204 | result = ''
205 | (days, remainder) = divmod(seconds, 86400)
206 | days = int(days)
207 | if days != 0:
208 | result += f'{days}d'
209 | (hours, remainder) = divmod(remainder, 3600)
210 | hours = int(hours)
211 | if hours != 0:
212 | result += f'{hours}h'
213 | (minutes, seconds) = divmod(remainder, 60)
214 | minutes = int(minutes)
215 | if minutes != 0:
216 | result += f'{minutes}m'
217 | seconds = int(seconds)
218 | result += f'{seconds}s'
219 | return result
220 |
221 | def is_url(url: str):
222 | url = re.findall(URL_REGEX, url)
223 | return bool(url)
224 |
225 | def is_gdrive_link(url: str):
226 | return "drive.google.com" in url
227 |
228 | def is_mega_link(url: str):
229 | return "mega.nz" in url or "mega.co.nz" in url
230 |
231 | def get_mega_link_type(url: str):
232 | if "folder" in url:
233 | return "folder"
234 | elif "file" in url:
235 | return "file"
236 | elif "/#F!" in url:
237 | return "folder"
238 | return "file"
239 |
240 | def is_magnet(url: str):
241 | magnet = re.findall(MAGNET_REGEX, url)
242 | return bool(magnet)
243 |
244 | def new_thread(fn):
245 | """To use as decorator to make a function call threaded.
246 | Needs import
247 | from threading import Thread"""
248 |
249 | def wrapper(*args, **kwargs):
250 | thread = threading.Thread(target=fn, args=args, kwargs=kwargs)
251 | thread.start()
252 | return thread
253 |
254 | return wrapper
255 |
256 |
257 | next_handler = CallbackQueryHandler(flip, pattern="nex", run_async=True)
258 | previous_handler = CallbackQueryHandler(flip, pattern="pre", run_async=True)
259 | dispatcher.add_handler(next_handler)
260 | dispatcher.add_handler(previous_handler)
261 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/custom_filters.py:
--------------------------------------------------------------------------------
1 | from pyrogram import filters
2 |
3 | def callback_data(data):
4 | def func(flt, client, callback_query):
5 | return callback_query.data in flt.data
6 |
7 | data = data if isinstance(data, list) else [data]
8 | return filters.create(
9 | func,
10 | 'CustomCallbackDataFilter',
11 | data=data
12 | )
13 |
14 | def callback_chat(chats):
15 | def func(flt, client, callback_query):
16 | return callback_query.message.chat.id in flt.chats
17 |
18 | chats = chats if isinstance(chats, list) else [chats]
19 | return filters.create(
20 | func,
21 | 'CustomCallbackChatsFilter',
22 | chats=chats
23 | )
24 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/db_handler.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 | from psycopg2 import Error
3 | from bot import AUTHORIZED_CHATS, SUDO_USERS, DB_URI, LOGGER
4 |
5 | class DbManger:
6 | def __init__(self):
7 | self.err = False
8 |
9 | def connect(self):
10 | try:
11 | self.conn = psycopg2.connect(DB_URI)
12 | self.cur = self.conn.cursor()
13 | except psycopg2.DatabaseError as error :
14 | LOGGER.error("Error in dbMang : ", error)
15 | self.err = True
16 |
17 | def disconnect(self):
18 | self.cur.close()
19 | self.conn.close()
20 |
21 | def db_auth(self,chat_id: int):
22 | self.connect()
23 | if self.err:
24 | return "There's some error check log for details"
25 | sql = 'INSERT INTO users VALUES ({});'.format(chat_id)
26 | self.cur.execute(sql)
27 | self.conn.commit()
28 | self.disconnect()
29 | AUTHORIZED_CHATS.add(chat_id)
30 | return 'Authorized successfully'
31 |
32 | def db_unauth(self,chat_id: int):
33 | self.connect()
34 | if self.err:
35 | return "There's some error check log for details"
36 | sql = 'DELETE from users where uid = {};'.format(chat_id)
37 | self.cur.execute(sql)
38 | self.conn.commit()
39 | self.disconnect()
40 | AUTHORIZED_CHATS.remove(chat_id)
41 | return 'Unauthorized successfully'
42 |
43 | def db_addsudo(self,chat_id: int):
44 | self.connect()
45 | if self.err:
46 | return "There's some error check log for details"
47 | if chat_id in AUTHORIZED_CHATS:
48 | sql = 'UPDATE users SET sudo = TRUE where uid = {};'.format(chat_id)
49 | self.cur.execute(sql)
50 | self.conn.commit()
51 | self.disconnect()
52 | SUDO_USERS.add(chat_id)
53 | return 'Successfully promoted as Sudo'
54 | else:
55 | sql = 'INSERT INTO users VALUES ({},TRUE);'.format(chat_id)
56 | self.cur.execute(sql)
57 | self.conn.commit()
58 | self.disconnect()
59 | SUDO_USERS.add(chat_id)
60 | return 'Successfully Authorized and promoted as Sudo'
61 |
62 | def db_rmsudo(self,chat_id: int):
63 | self.connect()
64 | if self.err:
65 | return "There's some error check log for details"
66 | sql = 'UPDATE users SET sudo = FALSE where uid = {};'.format(chat_id)
67 | self.cur.execute(sql)
68 | self.conn.commit()
69 | self.disconnect()
70 | SUDO_USERS.remove(chat_id)
71 | return 'Successfully removed from Sudo'
72 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/exceptions.py:
--------------------------------------------------------------------------------
1 | class DirectDownloadLinkException(Exception):
2 | """Not method found for extracting direct download link from the http link"""
3 | pass
4 |
5 |
6 | class NotSupportedExtractionArchive(Exception):
7 | """The archive format use is trying to extract is not supported"""
8 | pass
9 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/fs_utils.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import shutil
3 | import os
4 | import pathlib
5 | import magic
6 | import tarfile
7 | import subprocess
8 | import time
9 |
10 | from PIL import Image
11 | from hachoir.parser import createParser
12 | from hachoir.metadata import extractMetadata
13 |
14 | from .exceptions import NotSupportedExtractionArchive
15 | from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client, TG_SPLIT_SIZE
16 |
17 | VIDEO_SUFFIXES = ("M4V", "MP4", "MOV", "FLV", "WMV", "3GP", "MPG", "WEBM", "MKV", "AVI")
18 |
19 | def clean_download(path: str):
20 | if os.path.exists(path):
21 | LOGGER.info(f"Cleaning Download: {path}")
22 | shutil.rmtree(path)
23 |
24 | def start_cleanup():
25 | try:
26 | shutil.rmtree(DOWNLOAD_DIR)
27 | except FileNotFoundError:
28 | pass
29 |
30 | def clean_all():
31 | aria2.remove_all(True)
32 | get_client().torrents_delete(torrent_hashes="all", delete_files=True)
33 | get_client().auth_log_out()
34 | try:
35 | shutil.rmtree(DOWNLOAD_DIR)
36 | except FileNotFoundError:
37 | pass
38 |
39 | def exit_clean_up(signal, frame):
40 | try:
41 | LOGGER.info("Please wait, while we clean up the downloads and stop running downloads")
42 | clean_all()
43 | sys.exit(0)
44 | except KeyboardInterrupt:
45 | LOGGER.warning("Force Exiting before the cleanup finishes!")
46 | sys.exit(1)
47 |
48 | def get_path_size(path):
49 | if os.path.isfile(path):
50 | return os.path.getsize(path)
51 | total_size = 0
52 | for root, dirs, files in os.walk(path):
53 | for f in files:
54 | abs_path = os.path.join(root, f)
55 | total_size += os.path.getsize(abs_path)
56 | return total_size
57 |
58 | def tar(org_path):
59 | tar_path = org_path + ".tar"
60 | path = pathlib.PurePath(org_path)
61 | LOGGER.info(f'Tar: orig_path: {org_path}, tar_path: {tar_path}')
62 | tar = tarfile.open(tar_path, "w")
63 | tar.add(org_path, arcname=path.name)
64 | tar.close()
65 | return tar_path
66 |
67 | def get_base_name(orig_path: str):
68 | if orig_path.endswith(".tar.bz2"):
69 | return orig_path.replace(".tar.bz2", "")
70 | elif orig_path.endswith(".tar.gz"):
71 | return orig_path.replace(".tar.gz", "")
72 | elif orig_path.endswith(".bz2"):
73 | return orig_path.replace(".bz2", "")
74 | elif orig_path.endswith(".gz"):
75 | return orig_path.replace(".gz", "")
76 | elif orig_path.endswith(".tar.xz"):
77 | return orig_path.replace(".tar.xz", "")
78 | elif orig_path.endswith(".tar"):
79 | return orig_path.replace(".tar", "")
80 | elif orig_path.endswith(".tbz2"):
81 | return orig_path.replace("tbz2", "")
82 | elif orig_path.endswith(".tgz"):
83 | return orig_path.replace(".tgz", "")
84 | elif orig_path.endswith(".zip"):
85 | return orig_path.replace(".zip", "")
86 | elif orig_path.endswith(".7z"):
87 | return orig_path.replace(".7z", "")
88 | elif orig_path.endswith(".Z"):
89 | return orig_path.replace(".Z", "")
90 | elif orig_path.endswith(".rar"):
91 | return orig_path.replace(".rar", "")
92 | elif orig_path.endswith(".iso"):
93 | return orig_path.replace(".iso", "")
94 | elif orig_path.endswith(".wim"):
95 | return orig_path.replace(".wim", "")
96 | elif orig_path.endswith(".cab"):
97 | return orig_path.replace(".cab", "")
98 | elif orig_path.endswith(".apm"):
99 | return orig_path.replace(".apm", "")
100 | elif orig_path.endswith(".arj"):
101 | return orig_path.replace(".arj", "")
102 | elif orig_path.endswith(".chm"):
103 | return orig_path.replace(".chm", "")
104 | elif orig_path.endswith(".cpio"):
105 | return orig_path.replace(".cpio", "")
106 | elif orig_path.endswith(".cramfs"):
107 | return orig_path.replace(".cramfs", "")
108 | elif orig_path.endswith(".deb"):
109 | return orig_path.replace(".deb", "")
110 | elif orig_path.endswith(".dmg"):
111 | return orig_path.replace(".dmg", "")
112 | elif orig_path.endswith(".fat"):
113 | return orig_path.replace(".fat", "")
114 | elif orig_path.endswith(".hfs"):
115 | return orig_path.replace(".hfs", "")
116 | elif orig_path.endswith(".lzh"):
117 | return orig_path.replace(".lzh", "")
118 | elif orig_path.endswith(".lzma"):
119 | return orig_path.replace(".lzma", "")
120 | elif orig_path.endswith(".lzma2"):
121 | return orig_path.replace(".lzma2", "")
122 | elif orig_path.endswith(".mbr"):
123 | return orig_path.replace(".mbr", "")
124 | elif orig_path.endswith(".msi"):
125 | return orig_path.replace(".msi", "")
126 | elif orig_path.endswith(".mslz"):
127 | return orig_path.replace(".mslz", "")
128 | elif orig_path.endswith(".nsis"):
129 | return orig_path.replace(".nsis", "")
130 | elif orig_path.endswith(".ntfs"):
131 | return orig_path.replace(".ntfs", "")
132 | elif orig_path.endswith(".rpm"):
133 | return orig_path.replace(".rpm", "")
134 | elif orig_path.endswith(".squashfs"):
135 | return orig_path.replace(".squashfs", "")
136 | elif orig_path.endswith(".udf"):
137 | return orig_path.replace(".udf", "")
138 | elif orig_path.endswith(".vhd"):
139 | return orig_path.replace(".vhd", "")
140 | elif orig_path.endswith(".xar"):
141 | return orig_path.replace(".xar", "")
142 | else:
143 | raise NotSupportedExtractionArchive('File format not supported for extraction')
144 |
145 | def get_mime_type(file_path):
146 | mime = magic.Magic(mime=True)
147 | mime_type = mime.from_file(file_path)
148 | mime_type = mime_type or "text/plain"
149 | return mime_type
150 |
151 | def take_ss(video_file):
152 | des_dir = 'Thumbnails'
153 | if not os.path.exists(des_dir):
154 | os.mkdir(des_dir)
155 | des_dir = os.path.join(des_dir, f"{time.time()}.jpg")
156 | metadata = extractMetadata(createParser(video_file))
157 | duration = metadata.get('duration').seconds if metadata.has("duration") else 5
158 | duration = int(duration) / 2
159 | subprocess.run(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
160 | "-i", video_file, "-vframes", "1", des_dir])
161 | if not os.path.lexists(des_dir):
162 | return None
163 |
164 | Image.open(des_dir).convert("RGB").save(des_dir)
165 | img = Image.open(des_dir)
166 | img.resize((480, 320))
167 | img.save(des_dir, "JPEG")
168 | return des_dir
169 |
170 | def split(path, size, file, dirpath, split_size, start_time=0, i=1):
171 | if file.upper().endswith(VIDEO_SUFFIXES):
172 | base_name, extension = os.path.splitext(file)
173 | metadata = extractMetadata(createParser(path))
174 | total_duration = metadata.get('duration').seconds - 8
175 | split_size = split_size - 3000000
176 | while start_time < total_duration:
177 | parted_name = "{}.part{}{}".format(str(base_name), str(i).zfill(3), str(extension))
178 | out_path = os.path.join(dirpath, parted_name)
179 | subprocess.run(["ffmpeg", "-hide_banner", "-loglevel", "error", "-i",
180 | path, "-ss", str(start_time), "-fs", str(split_size),
181 | "-strict", "-2", "-c", "copy", out_path])
182 | out_size = get_path_size(out_path)
183 | if out_size > TG_SPLIT_SIZE:
184 | dif = out_size - TG_SPLIT_SIZE
185 | split_size = split_size - dif + 2000000
186 | os.remove(out_path)
187 | return split(path, size, file, dirpath, split_size, start_time, i)
188 | metadata = extractMetadata(createParser(out_path))
189 | start_time = start_time + metadata.get('duration').seconds - 5
190 | i = i + 1
191 | else:
192 | out_path = os.path.join(dirpath, file + ".")
193 | subprocess.run(["split", "--numeric-suffixes=1", "--suffix-length=3", f"--bytes={split_size}", path, out_path])
194 |
195 |
--------------------------------------------------------------------------------
/bot/helper/ext_utils/shortenurl.py:
--------------------------------------------------------------------------------
1 | # Implemented by https://github.com/junedkh
2 |
3 | import requests
4 | import random
5 | import base64
6 | import pyshorteners
7 | from urllib.parse import quote
8 | from urllib3 import disable_warnings
9 | from bot import SHORTENER, SHORTENER_API
10 |
11 |
12 | def short_url(longurl):
13 | if "shorte.st" in SHORTENER:
14 | disable_warnings()
15 | return requests.get(f'http://api.shorte.st/stxt/{SHORTENER_API}/{longurl}', verify=False).text
16 | elif "linkvertise" in SHORTENER:
17 | url = quote(base64.b64encode(longurl.encode("utf-8")))
18 | linkvertise = [
19 | f"https://link-to.net/{SHORTENER_API}/{random.random() * 1000}/dynamic?r={url}",
20 | f"https://up-to-down.net/{SHORTENER_API}/{random.random() * 1000}/dynamic?r={url}",
21 | f"https://direct-link.net/{SHORTENER_API}/{random.random() * 1000}/dynamic?r={url}",
22 | f"https://file-link.net/{SHORTENER_API}/{random.random() * 1000}/dynamic?r={url}"]
23 | return random.choice(linkvertise)
24 | elif "bitly.com" in SHORTENER:
25 | s = pyshorteners.Shortener(api_key=SHORTENER_API)
26 | return s.bitly.short(longurl)
27 | elif "ouo.io" in SHORTENER:
28 | disable_warnings()
29 | return requests.get(f'http://ouo.io/api/{SHORTENER_API}?s={longurl}', verify=False).text
30 | else:
31 | return requests.get(f'https://{SHORTENER}/api?api={SHORTENER_API}&url={longurl}&format=text').text
32 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/mirror_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/mirror_utils/download_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/aria2_download.py:
--------------------------------------------------------------------------------
1 | from bot import aria2, download_dict_lock, STOP_DUPLICATE, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT
2 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
3 | from bot.helper.ext_utils.bot_utils import *
4 | from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus
5 | from bot.helper.telegram_helper.message_utils import *
6 | import threading
7 | from aria2p import API
8 | from time import sleep
9 |
10 |
11 | class AriaDownloadHelper:
12 |
13 | def __init__(self):
14 | super().__init__()
15 |
16 | @new_thread
17 | def __onDownloadStarted(self, api, gid):
18 | if STOP_DUPLICATE or TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None:
19 | sleep(1)
20 | dl = getDownloadByGid(gid)
21 | download = aria2.get_download(gid)
22 | if STOP_DUPLICATE and dl is not None and not dl.getListener().isLeech:
23 | LOGGER.info('Checking File/Folder if already in Drive...')
24 | sname = aria2.get_download(gid).name
25 | if dl.getListener().isTar:
26 | sname = sname + ".zip" if dl.getListener().isZip else sname + ".tar"
27 | if dl.getListener().extract:
28 | smsg = None
29 | else:
30 | gdrive = GoogleDriveHelper()
31 | smsg, button = gdrive.drive_list(sname, True)
32 | if smsg:
33 | dl.getListener().onDownloadError('File/Folder already available in Drive.\n\n')
34 | aria2.remove([download], force=True)
35 | sendMarkup("Here are the search results:", dl.getListener().bot, dl.getListener().update, button)
36 | return
37 | if (TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None) and dl is not None:
38 | sleep(1)
39 | size = aria2.get_download(gid).total_length
40 | if dl.getListener().isTar or dl.getListener().extract:
41 | is_tar_ext = True
42 | mssg = f'Tar/Unzip limit is {TAR_UNZIP_LIMIT}'
43 | else:
44 | is_tar_ext = False
45 | mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}'
46 | result = check_limit(size, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext)
47 | if result:
48 | dl.getListener().onDownloadError(f'{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}')
49 | aria2.remove([download], force=True)
50 | return
51 | update_all_messages()
52 |
53 | def __onDownloadComplete(self, api: API, gid):
54 | dl = getDownloadByGid(gid)
55 | download = aria2.get_download(gid)
56 | if download.followed_by_ids:
57 | new_gid = download.followed_by_ids[0]
58 | new_download = aria2.get_download(new_gid)
59 | if dl is None:
60 | dl = getDownloadByGid(new_gid)
61 | with download_dict_lock:
62 | download_dict[dl.uid()] = AriaDownloadStatus(new_gid, dl.getListener())
63 | if new_download.is_torrent:
64 | download_dict[dl.uid()].is_torrent = True
65 | update_all_messages()
66 | LOGGER.info(f'Changed gid from {gid} to {new_gid}')
67 | elif dl:
68 | threading.Thread(target=dl.getListener().onDownloadComplete).start()
69 |
70 | @new_thread
71 | def __onDownloadStopped(self, api, gid):
72 | sleep(4)
73 | dl = getDownloadByGid(gid)
74 | if dl:
75 | dl.getListener().onDownloadError('Dead torrent!')
76 |
77 | @new_thread
78 | def __onDownloadError(self, api, gid):
79 | LOGGER.info(f"onDownloadError: {gid}")
80 | sleep(0.5) # sleep for split second to ensure proper dl gid update from onDownloadComplete
81 | dl = getDownloadByGid(gid)
82 | download = aria2.get_download(gid)
83 | error = download.error_message
84 | LOGGER.info(f"Download Error: {error}")
85 | if dl:
86 | dl.getListener().onDownloadError(error)
87 |
88 | def start_listener(self):
89 | aria2.listen_to_notifications(threaded=True, on_download_start=self.__onDownloadStarted,
90 | on_download_error=self.__onDownloadError,
91 | on_download_stop=self.__onDownloadStopped,
92 | on_download_complete=self.__onDownloadComplete)
93 |
94 | def add_download(self, link: str, path, listener, filename):
95 | if is_magnet(link):
96 | download = aria2.add_magnet(link, {'dir': path, 'out': filename})
97 | else:
98 | download = aria2.add_uris([link], {'dir': path, 'out': filename})
99 | if download.error_message: # no need to proceed further at this point
100 | listener.onDownloadError(download.error_message)
101 | return
102 | with download_dict_lock:
103 | download_dict[listener.uid] = AriaDownloadStatus(download.gid, listener)
104 | LOGGER.info(f"Started: {download.gid} DIR:{download.dir} ")
105 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/direct_link_generator_license.md:
--------------------------------------------------------------------------------
1 | RAPHIELSCAPE PUBLIC LICENSE
2 | Version 1.c, June 2019
3 |
4 | Copyright (C) 2019 Raphielscape LLC.
5 | Copyright (C) 2019 Devscapes Open Source Holding GmbH.
6 |
7 | Everyone is permitted to copy and distribute verbatim or modified
8 | copies of this license document, and changing it is allowed as long
9 | as the name is changed.
10 |
11 | RAPHIELSCAPE PUBLIC LICENSE
12 | A-1. DEFINITIONS
13 |
14 | 0. “This License” refers to version 1.c of the Raphielscape Public License.
15 |
16 | 1. “Copyright” also means copyright-like laws that apply to other kinds of works.
17 |
18 | 2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”.
19 | “Licensees” and “recipients” may be individuals or organizations.
20 |
21 | 3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
22 | other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work
23 | or a work “based on” the earlier work.
24 |
25 | 4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it.
26 | “Object code” means any non-source form of a work.
27 |
28 | The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and
29 | (for an executable work) run the object code and to modify the work, including scripts to control those activities.
30 |
31 | The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
32 | The Corresponding Source for a work in source code form is that same work.
33 |
34 | 5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of
35 | the Corresponding Source.
36 |
37 | 6. "Owner" refers to any parties which is made the early form of the Corresponding Source.
38 |
39 | A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
40 |
41 | 0. You must give any other recipients of the Work or Derivative Works a copy of this License; and
42 |
43 | 1. You must cause any modified files to carry prominent notices stating that You changed the files; and
44 |
45 | 2. You must retain, in the Source form of any Derivative Works that You distribute,
46 | this license, all copyright, patent, trademark, authorships and attribution notices
47 | from the Source form of the Work; and
48 |
49 | 3. Respecting the author and owner of works that are distributed in any way.
50 |
51 | You may add Your own copyright statement to Your modifications and may provide
52 | additional or different license terms and conditions for use, reproduction,
53 | or distribution of Your modifications, or for any such Derivative Works as a whole,
54 | provided Your use, reproduction, and distribution of the Work otherwise complies
55 | with the conditions stated in this License.
56 |
57 | B. DISCLAIMER OF WARRANTY
58 |
59 | THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR
60 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
61 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS
62 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
63 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
64 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
65 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
66 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
67 |
68 |
69 | C. REVISED VERSION OF THIS LICENSE
70 |
71 | The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the
72 | Raphielscape Public License from time to time. Such new versions will be similar in spirit
73 | to the present version, but may differ in detail to address new problems or concerns.
74 |
75 | Each version is given a distinguishing version number. If the Program specifies that a
76 | certain numbered version of the Raphielscape Public License "or any later version" applies to it,
77 | you have the option of following the terms and conditions either of that numbered version or of
78 | any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a
79 | version number of the Raphielscape Public License, you may choose any version ever published
80 | by the Devscapes Open Source Holding GmbH.
81 |
82 | END OF LICENSE
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/download_helper.py:
--------------------------------------------------------------------------------
1 | # An abstract class which will be inherited by the tool specific classes like aria2_helper or mega_download_helper
2 | import threading
3 |
4 |
5 | class MethodNotImplementedError(NotImplementedError):
6 | def __init__(self):
7 | super(self, 'Not implemented method')
8 |
9 |
10 | class DownloadHelper:
11 | def __init__(self):
12 | self.name = '' # Name of the download; empty string if no download has been started
13 | self.size = 0.0 # Size of the download
14 | self.downloaded_bytes = 0.0 # Bytes downloaded
15 | self.speed = 0.0 # Download speed in bytes per second
16 | self.progress = 0.0
17 | self.progress_string = '0.00%'
18 | self.eta = 0 # Estimated time of download complete
19 | self.eta_string = '0s' # A listener class which have event callbacks
20 | self._resource_lock = threading.Lock()
21 |
22 | def add_download(self, link: str, path):
23 | raise MethodNotImplementedError
24 |
25 | def cancel_download(self):
26 | # Returns None if successfully cancelled, else error string
27 | raise MethodNotImplementedError
28 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/mega_downloader.py:
--------------------------------------------------------------------------------
1 | from bot import LOGGER, MEGA_API_KEY, download_dict_lock, download_dict, MEGA_EMAIL_ID, MEGA_PASSWORD
2 | import threading
3 | from mega import (MegaApi, MegaListener, MegaRequest, MegaTransfer, MegaError)
4 | from bot.helper.telegram_helper.message_utils import *
5 | import os
6 | from bot.helper.ext_utils.bot_utils import new_thread, get_mega_link_type, get_readable_file_size, check_limit
7 | from bot.helper.mirror_utils.status_utils.mega_download_status import MegaDownloadStatus
8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
9 | from bot import MEGA_LIMIT, STOP_DUPLICATE, TAR_UNZIP_LIMIT
10 | import random
11 | import string
12 |
13 | class MegaDownloaderException(Exception):
14 | pass
15 |
16 |
17 | class MegaAppListener(MegaListener):
18 | _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN,MegaRequest.TYPE_FETCH_NODES)
19 | NO_ERROR = "no error"
20 |
21 | def __init__(self, continue_event: threading.Event, listener):
22 | self.continue_event = continue_event
23 | self.node = None
24 | self.public_node = None
25 | self.listener = listener
26 | self.uid = listener.uid
27 | self.__bytes_transferred = 0
28 | self.is_cancelled = False
29 | self.__speed = 0
30 | self.__name = ''
31 | self.__size = 0
32 | self.error = None
33 | self.gid = ""
34 | super(MegaAppListener, self).__init__()
35 |
36 | @property
37 | def speed(self):
38 | """Returns speed of the download in bytes/second"""
39 | return self.__speed
40 |
41 | @property
42 | def name(self):
43 | """Returns name of the download"""
44 | return self.__name
45 |
46 | def setValues(self, name, size, gid):
47 | self.__name = name
48 | self.__size = size
49 | self.gid = gid
50 |
51 | @property
52 | def size(self):
53 | """Size of download in bytes"""
54 | return self.__size
55 |
56 | @property
57 | def downloaded_bytes(self):
58 | return self.__bytes_transferred
59 |
60 | def onRequestStart(self, api, request):
61 | pass
62 |
63 | def onRequestFinish(self, api, request, error):
64 | if str(error).lower() != "no error":
65 | self.error = error.copy()
66 | return
67 | request_type = request.getType()
68 | if request_type == MegaRequest.TYPE_LOGIN:
69 | api.fetchNodes()
70 | elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE:
71 | self.public_node = request.getPublicMegaNode()
72 | elif request_type == MegaRequest.TYPE_FETCH_NODES:
73 | LOGGER.info("Fetching Root Node.")
74 | self.node = api.getRootNode()
75 | LOGGER.info(f"Node Name: {self.node.getName()}")
76 | if request_type not in self._NO_EVENT_ON or self.node and "cloud drive" not in self.node.getName().lower():
77 | self.continue_event.set()
78 |
79 | def onRequestTemporaryError(self, api, request, error: MegaError):
80 | LOGGER.error(f'Mega Request error in {error}')
81 | if not self.is_cancelled:
82 | self.is_cancelled = True
83 | self.listener.onDownloadError("RequestTempError: " + error.toString())
84 | self.error = error.toString()
85 | self.continue_event.set()
86 |
87 | def onTransferStart(self, api: MegaApi, transfer: MegaTransfer):
88 | pass
89 |
90 | def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer):
91 | if self.is_cancelled:
92 | api.cancelTransfer(transfer, None)
93 | return
94 | self.__speed = transfer.getSpeed()
95 | self.__bytes_transferred = transfer.getTransferredBytes()
96 |
97 | def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error):
98 | try:
99 | if self.is_cancelled:
100 | self.continue_event.set()
101 | elif transfer.isFinished() and (transfer.isFolderTransfer() or transfer.getFileName() == self.name):
102 | self.listener.onDownloadComplete()
103 | self.continue_event.set()
104 | except Exception as e:
105 | LOGGER.error(e)
106 |
107 | def onTransferTemporaryError(self, api, transfer, error):
108 | filen = transfer.getFileName()
109 | state = transfer.getState()
110 | errStr = error.toString()
111 | LOGGER.error(f'Mega download error in file {transfer} {filen}: {error}')
112 |
113 | if state in [1, 4]:
114 | # Sometimes MEGA (offical client) can't stream a node either and raises a temp failed error.
115 | # Don't break the transfer queue if transfer's in queued (1) or retrying (4) state [causes seg fault]
116 | return
117 |
118 | self.error = errStr
119 | if not self.is_cancelled:
120 | self.is_cancelled = True
121 | self.listener.onDownloadError(f"TransferTempError: {errStr} ({filen})")
122 |
123 | def cancel_download(self):
124 | self.is_cancelled = True
125 | self.listener.onDownloadError("Download Canceled by user")
126 |
127 |
128 | class AsyncExecutor:
129 |
130 | def __init__(self):
131 | self.continue_event = threading.Event()
132 |
133 | def do(self, function, args):
134 | self.continue_event.clear()
135 | function(*args)
136 | self.continue_event.wait()
137 |
138 | listeners = []
139 |
140 | class MegaDownloadHelper:
141 | def __init__(self):
142 | pass
143 |
144 | @staticmethod
145 | @new_thread
146 | def add_download(mega_link: str, path: str, listener):
147 | if MEGA_API_KEY is None:
148 | raise MegaDownloaderException('Mega API KEY not provided! Cannot mirror Mega links')
149 | executor = AsyncExecutor()
150 | api = MegaApi(MEGA_API_KEY, None, None, 'telegram-mirror-bot')
151 | global listeners
152 | mega_listener = MegaAppListener(executor.continue_event, listener)
153 | listeners.append(mega_listener)
154 | api.addListener(mega_listener)
155 | if MEGA_EMAIL_ID is not None and MEGA_PASSWORD is not None:
156 | executor.do(api.login, (MEGA_EMAIL_ID, MEGA_PASSWORD))
157 | link_type = get_mega_link_type(mega_link)
158 | if link_type == "file":
159 | LOGGER.info("File. If your download didn't start, then check your link if it's available to download")
160 | executor.do(api.getPublicNode, (mega_link,))
161 | node = mega_listener.public_node
162 | else:
163 | LOGGER.info("Folder. If your download didn't start, then check your link if it's available to download")
164 | folder_api = MegaApi(MEGA_API_KEY,None,None,'TgBot')
165 | folder_api.addListener(mega_listener)
166 | executor.do(folder_api.loginToFolder, (mega_link,))
167 | node = folder_api.authorizeNode(mega_listener.node)
168 | if mega_listener.error is not None:
169 | return sendMessage(str(mega_listener.error), listener.bot, listener.update)
170 | if STOP_DUPLICATE and not listener.isLeech:
171 | LOGGER.info('Checking File/Folder if already in Drive')
172 | mname = node.getName()
173 | if listener.isTar:
174 | mname = mname + ".zip" if listener.isZip else mname + ".tar"
175 | if listener.extract:
176 | smsg = None
177 | else:
178 | gd = GoogleDriveHelper()
179 | smsg, button = gd.drive_list(mname, True)
180 | if smsg:
181 | msg1 = "File/Folder is already available in Drive.\nHere are the search results:"
182 | sendMarkup(msg1, listener.bot, listener.update, button)
183 | executor.continue_event.set()
184 | return
185 | if MEGA_LIMIT is not None or TAR_UNZIP_LIMIT is not None:
186 | size = api.getSize(node)
187 | if listener.isTar or listener.extract:
188 | is_tar_ext = True
189 | msg3 = f'Failed, Tar/Unzip limit is {TAR_UNZIP_LIMIT}.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.'
190 | else:
191 | is_tar_ext = False
192 | msg3 = f'Failed, Mega limit is {MEGA_LIMIT}.\nYour File/Folder size is {get_readable_file_size(api.getSize(node))}.'
193 | result = check_limit(size, MEGA_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext)
194 | if result:
195 | sendMessage(msg3, listener.bot, listener.update)
196 | executor.continue_event.set()
197 | return
198 | with download_dict_lock:
199 | download_dict[listener.uid] = MegaDownloadStatus(mega_listener, listener)
200 | os.makedirs(path)
201 | gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=8))
202 | mega_listener.setValues(node.getName(), api.getSize(node), gid)
203 | sendStatusMessage(listener.update, listener.bot)
204 | executor.do(api.startDownload,(node,path))
205 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/qbit_downloader.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/0bfba523f095ab1dccad431d72561e0e002e7a59)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | import os
6 | import random
7 | import string
8 | import time
9 | import logging
10 | import shutil
11 |
12 | import qbittorrentapi as qba
13 | from fnmatch import fnmatch
14 | from urllib.parse import urlparse, parse_qs
15 | from torrentool.api import Torrent
16 | from telegram import InlineKeyboardMarkup
17 | from telegram.ext import CallbackQueryHandler
18 |
19 | from bot import download_dict, download_dict_lock, BASE_URL, dispatcher, get_client, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT
20 | from bot.helper.mirror_utils.status_utils.qbit_download_status import QbDownloadStatus
21 | from bot.helper.telegram_helper.message_utils import *
22 | from bot.helper.ext_utils.bot_utils import setInterval, new_thread, MirrorStatus, getDownloadByGid, get_readable_file_size, check_limit
23 | from bot.helper.telegram_helper import button_build
24 |
25 | LOGGER = logging.getLogger(__name__)
26 | logging.getLogger('qbittorrentapi').setLevel(logging.ERROR)
27 | logging.getLogger('requests').setLevel(logging.ERROR)
28 | logging.getLogger('urllib3').setLevel(logging.ERROR)
29 |
30 | class QbitTorrent:
31 |
32 |
33 | def __init__(self):
34 | self.update_interval = 2
35 | self.meta_time = time.time()
36 | self.stalled_time = time.time()
37 | self.checked = False
38 |
39 | @new_thread
40 | def add_torrent(self, link, dire, listener, qbitsel):
41 | self.client = get_client()
42 | self.listener = listener
43 | self.dire = dire
44 | self.qbitsel = qbitsel
45 | is_file = False
46 | count = 0
47 | pincode = ""
48 | try:
49 | if os.path.exists(link):
50 | is_file = True
51 | self.ext_hash = get_hash_file(link)
52 | else:
53 | self.ext_hash = get_hash_magnet(link)
54 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
55 | if len(tor_info) > 0:
56 | sendMessage("This Torrent is already in list.", listener.bot, listener.update)
57 | self.client.auth_log_out()
58 | return
59 | if is_file:
60 | op = self.client.torrents_add(torrent_files=[link], save_path=dire)
61 | os.remove(link)
62 | else:
63 | op = self.client.torrents_add(link, save_path=dire)
64 | if op.lower() == "ok.":
65 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
66 | if len(tor_info) == 0:
67 | while True:
68 | if time.time() - self.meta_time >= 20:
69 | sendMessage("The Torrent was not added. Report when you see this error", listener.bot, listener.update)
70 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True)
71 | self.client.auth_log_out()
72 | return False
73 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
74 | if len(tor_info) > 0:
75 | break
76 | else:
77 | sendMessage("This is an unsupported/invalid link.", listener.bot, listener.update)
78 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True)
79 | self.client.auth_log_out()
80 | return
81 | gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=14))
82 | with download_dict_lock:
83 | download_dict[listener.uid] = QbDownloadStatus(gid, listener, self.ext_hash, self.client)
84 | tor_info = tor_info[0]
85 | LOGGER.info(f"QbitDownload started: {tor_info.name}")
86 | self.updater = setInterval(self.update_interval, self.update)
87 | if BASE_URL is not None and qbitsel:
88 | if not is_file:
89 | meta = sendMessage("Downloading Metadata...Please wait then you can select files or mirror Torrent file if it have low seeders", listener.bot, listener.update)
90 | while True:
91 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
92 | if len(tor_info) == 0:
93 | deleteMessage(listener.bot, meta)
94 | return False
95 | try:
96 | tor_info = tor_info[0]
97 | if tor_info.state == "metaDL" or tor_info.state == "checkingResumeData":
98 | time.sleep(1)
99 | else:
100 | deleteMessage(listener.bot, meta)
101 | break
102 | except:
103 | deleteMessage(listener.bot, meta)
104 | return False
105 | time.sleep(0.5)
106 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
107 | for n in str(self.ext_hash):
108 | if n.isdigit():
109 | pincode += str(n)
110 | count += 1
111 | if count == 4:
112 | break
113 | URL = f"{BASE_URL}/slam/files/{self.ext_hash}"
114 | pindata = f"pin {gid} {pincode}"
115 | donedata = f"done {gid} {self.ext_hash}"
116 | buttons = button_build.ButtonMaker()
117 | buttons.buildbutton("Select Files", URL)
118 | buttons.sbutton("Pincode", pindata)
119 | buttons.sbutton("Done Selecting", donedata)
120 | QBBUTTONS = InlineKeyboardMarkup(buttons.build_menu(2))
121 | msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
122 | sendMarkup(msg, listener.bot, listener.update, QBBUTTONS)
123 | else:
124 | sendStatusMessage(listener.update, listener.bot)
125 | except qba.UnsupportedMediaType415Error as e:
126 | LOGGER.error(str(e))
127 | sendMessage("This is an unsupported/invalid link: {str(e)}", listener.bot, listener.update)
128 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True)
129 | self.client.auth_log_out()
130 | except Exception as e:
131 | LOGGER.error(str(e))
132 | sendMessage(str(e), listener.bot, listener.update)
133 | self.client.torrents_delete(torrent_hashes=self.ext_hash, delete_files=True)
134 | self.client.auth_log_out()
135 |
136 |
137 | def update(self):
138 | tor_info = self.client.torrents_info(torrent_hashes=self.ext_hash)
139 | if len(tor_info) == 0:
140 | self.client.auth_log_out()
141 | self.updater.cancel()
142 | return
143 | try:
144 | tor_info = tor_info[0]
145 | if tor_info.state == "metaDL":
146 | self.stalled_time = time.time()
147 | if time.time() - self.meta_time >= 999999999: # timeout while downloading metadata
148 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
149 | time.sleep(0.3)
150 | self.listener.onDownloadError("Dead Torrent!")
151 | self.client.torrents_delete(torrent_hashes=self.ext_hash)
152 | self.client.auth_log_out()
153 | self.updater.cancel()
154 | elif tor_info.state == "downloading":
155 | self.stalled_time = time.time()
156 | if (TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None) and not self.checked:
157 | if self.listener.isTar or self.listener.extract:
158 | is_tar_ext = True
159 | mssg = f'Tar/Unzip limit is {TAR_UNZIP_LIMIT}'
160 | else:
161 | is_tar_ext = False
162 | mssg = f'Torrent/Direct limit is {TORRENT_DIRECT_LIMIT}'
163 | size = tor_info.size
164 | result = check_limit(size, TORRENT_DIRECT_LIMIT, TAR_UNZIP_LIMIT, is_tar_ext)
165 | self.checked = True
166 | if result:
167 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
168 | time.sleep(0.3)
169 | self.listener.onDownloadError(f"{mssg}.\nYour File/Folder size is {get_readable_file_size(size)}")
170 | self.client.torrents_delete(torrent_hashes=self.ext_hash)
171 | self.client.auth_log_out()
172 | self.updater.cancel()
173 | elif tor_info.state == "stalledDL":
174 | if time.time() - self.stalled_time >= 999999999: # timeout after downloading metadata
175 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
176 | time.sleep(0.3)
177 | self.listener.onDownloadError("Dead Torrent!")
178 | self.client.torrents_delete(torrent_hashes=self.ext_hash)
179 | self.client.auth_log_out()
180 | self.updater.cancel()
181 | elif tor_info.state == "error":
182 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
183 | time.sleep(0.3)
184 | self.listener.onDownloadError("No enough space for this torrent on device")
185 | self.client.torrents_delete(torrent_hashes=self.ext_hash)
186 | self.client.auth_log_out()
187 | self.updater.cancel()
188 | elif tor_info.state == "uploading" or tor_info.state.lower().endswith("up"):
189 | self.client.torrents_pause(torrent_hashes=self.ext_hash)
190 | if self.qbitsel:
191 | for dirpath, subdir, files in os.walk(f"{self.dire}", topdown=False):
192 | for file in files:
193 | if fnmatch(file, "*.!qB"):
194 | os.remove(os.path.join(dirpath, file))
195 | for folder in subdir:
196 | if fnmatch(folder, ".unwanted"):
197 | shutil.rmtree(os.path.join(dirpath, folder))
198 | for dirpath, subdir, files in os.walk(f"{self.dire}", topdown=False):
199 | if not os.listdir(dirpath):
200 | os.rmdir(dirpath)
201 | self.listener.onDownloadComplete()
202 | self.client.torrents_delete(torrent_hashes=self.ext_hash)
203 | self.client.auth_log_out()
204 | self.updater.cancel()
205 | except:
206 | self.client.auth_log_out()
207 | self.updater.cancel()
208 |
209 |
210 | def get_confirm(update, context):
211 | query = update.callback_query
212 | user_id = query.from_user.id
213 | data = query.data
214 | data = data.split(" ")
215 | qdl = getDownloadByGid(data[1])
216 | if qdl is None:
217 | query.answer(text="This task has been cancelled!", show_alert=True)
218 | query.message.delete()
219 |
220 | elif user_id != qdl.listener.message.from_user.id:
221 | query.answer(text="Don't waste your time!", show_alert=True)
222 | elif data[0] == "pin":
223 | query.answer(text=data[2], show_alert=True)
224 | elif data[0] == "done":
225 | query.answer()
226 | qdl.client.torrents_resume(torrent_hashes=data[2])
227 | sendStatusMessage(qdl.listener.update, qdl.listener.bot)
228 | query.message.delete()
229 |
230 |
231 | def get_hash_magnet(mgt):
232 | if mgt.startswith('magnet:'):
233 | _, _, _, _, query, _ = urlparse(mgt)
234 | qs = parse_qs(query)
235 | v = qs.get('xt', None)
236 | if v is None or v == []:
237 | LOGGER.error('Invalid magnet URI: no "xt" query parameter.')
238 | return
239 | v = v[0]
240 | if not v.startswith('urn:btih:'):
241 | LOGGER.error('Invalid magnet URI: "xt" value not valid for BitTorrent.')
242 | return
243 | mgt = v[len('urn:btih:'):]
244 | return mgt.lower()
245 |
246 |
247 | def get_hash_file(path):
248 | tr = Torrent.from_file(path)
249 | mgt = tr.magnet_link
250 | return get_hash_magnet(mgt)
251 |
252 |
253 | pin_handler = CallbackQueryHandler(get_confirm, pattern="pin", run_async=True)
254 | done_handler = CallbackQueryHandler(get_confirm, pattern="done", run_async=True)
255 | dispatcher.add_handler(pin_handler)
256 | dispatcher.add_handler(done_handler)
257 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/telegram_downloader.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import threading
3 | import time
4 | from bot import LOGGER, download_dict, download_dict_lock, app, STOP_DUPLICATE
5 | from .download_helper import DownloadHelper
6 | from ..status_utils.telegram_download_status import TelegramDownloadStatus
7 | from bot.helper.telegram_helper.message_utils import sendMarkup, sendStatusMessage
8 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
9 |
10 | global_lock = threading.Lock()
11 | GLOBAL_GID = set()
12 | logging.getLogger("pyrogram").setLevel(logging.WARNING)
13 |
14 |
15 | class TelegramDownloadHelper(DownloadHelper):
16 | def __init__(self, listener):
17 | super().__init__()
18 | self.__listener = listener
19 | self.__resource_lock = threading.RLock()
20 | self.__name = ""
21 | self.__start_time = time.time()
22 | self.__gid = ""
23 | self._bot = app
24 | self.__is_cancelled = False
25 |
26 | @property
27 | def gid(self):
28 | with self.__resource_lock:
29 | return self.__gid
30 |
31 | @property
32 | def download_speed(self):
33 | with self.__resource_lock:
34 | return self.downloaded_bytes / (time.time() - self.__start_time)
35 |
36 | def __onDownloadStart(self, name, size, file_id):
37 | with download_dict_lock:
38 | download_dict[self.__listener.uid] = TelegramDownloadStatus(self, self.__listener)
39 | with global_lock:
40 | GLOBAL_GID.add(file_id)
41 | with self.__resource_lock:
42 | self.name = name
43 | self.size = size
44 | self.__gid = file_id
45 | self.__listener.onDownloadStarted()
46 |
47 | def __onDownloadProgress(self, current, total):
48 | if self.__is_cancelled:
49 | self.__onDownloadError('Cancelled by user!')
50 | self._bot.stop_transmission()
51 | return
52 | with self.__resource_lock:
53 | self.downloaded_bytes = current
54 | try:
55 | self.progress = current / self.size * 100
56 | except ZeroDivisionError:
57 | self.progress = 0
58 |
59 | def __onDownloadError(self, error):
60 | with global_lock:
61 | try:
62 | GLOBAL_GID.remove(self.gid)
63 | except KeyError:
64 | pass
65 | self.__listener.onDownloadError(error)
66 |
67 | def __onDownloadComplete(self):
68 | with global_lock:
69 | GLOBAL_GID.remove(self.gid)
70 | self.__listener.onDownloadComplete()
71 |
72 | def __download(self, message, path):
73 | download = self._bot.download_media(
74 | message,
75 | progress = self.__onDownloadProgress,
76 | file_name = path
77 | )
78 | if download is not None:
79 | self.__onDownloadComplete()
80 | elif not self.__is_cancelled:
81 | self.__onDownloadError('Internal error occurred')
82 |
83 | def add_download(self, message, path, filename):
84 | _message = self._bot.get_messages(message.chat.id, reply_to_message_ids=message.message_id)
85 | media = None
86 | media_array = [_message.document, _message.video, _message.audio]
87 | for i in media_array:
88 | if i is not None:
89 | media = i
90 | break
91 | if media is not None:
92 | with global_lock:
93 | # For avoiding locking the thread lock for long time unnecessarily
94 | download = media.file_id not in GLOBAL_GID
95 | if filename == "":
96 | name = media.file_name
97 | else:
98 | name = filename
99 | path = path + name
100 |
101 | if download:
102 | if STOP_DUPLICATE and not self.__listener.isLeech:
103 | LOGGER.info('Checking File/Folder if already in Drive...')
104 | gd = GoogleDriveHelper()
105 | smsg, button = gd.drive_list(name, True, True)
106 | if smsg:
107 | sendMarkup("File/Folder is already available in Drive.\nHere are the search results:", self.__listener.bot, self.__listener.update, button)
108 | return
109 | sendStatusMessage(self.__listener.update, self.__listener.bot)
110 | self.__onDownloadStart(name, media.file_size, media.file_id)
111 | LOGGER.info(f'Downloading Telegram file with id: {media.file_id}')
112 | threading.Thread(target=self.__download, args=(_message, path)).start()
113 | else:
114 | self.__onDownloadError('File already being downloaded!')
115 | else:
116 | self.__onDownloadError('No document in the replied message')
117 |
118 | def cancel_download(self):
119 | LOGGER.info(f'Cancelling download on user request: {self.gid}')
120 | self.__is_cancelled = True
121 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py:
--------------------------------------------------------------------------------
1 | from .download_helper import DownloadHelper
2 | import time
3 | from youtube_dl import YoutubeDL, DownloadError
4 | from bot import download_dict_lock, download_dict
5 | from ..status_utils.youtube_dl_download_status import YoutubeDLDownloadStatus
6 | import logging
7 | import re
8 | import threading
9 |
10 | LOGGER = logging.getLogger(__name__)
11 |
12 |
13 | class MyLogger:
14 | def __init__(self, obj):
15 | self.obj = obj
16 |
17 | def debug(self, msg):
18 | LOGGER.debug(msg)
19 | # Hack to fix changing changing extension
20 | match = re.search(r'.ffmpeg..Merging formats into..(.*?).$', msg)
21 | if match and not self.obj.is_playlist:
22 | newname = match.group(1)
23 | newname = newname.split("/")
24 | newname = newname[-1]
25 | self.obj.name = newname
26 |
27 | @staticmethod
28 | def warning(msg):
29 | LOGGER.warning(msg)
30 |
31 | @staticmethod
32 | def error(msg):
33 | LOGGER.error(msg)
34 |
35 |
36 | class YoutubeDLHelper(DownloadHelper):
37 | def __init__(self, listener):
38 | super().__init__()
39 | self.name = ""
40 | self.__start_time = time.time()
41 | self.__listener = listener
42 | self.__gid = ""
43 | self.opts = {
44 | 'progress_hooks': [self.__onDownloadProgress],
45 | 'logger': MyLogger(self),
46 | 'usenetrc': True
47 | }
48 | self.__download_speed = 0
49 | self.downloaded_bytes = 0
50 | self.size = 0
51 | self.is_playlist = False
52 | self.last_downloaded = 0
53 | self.is_cancelled = False
54 | self.vid_id = ''
55 | self.__resource_lock = threading.RLock()
56 |
57 | @property
58 | def download_speed(self):
59 | with self.__resource_lock:
60 | return self.__download_speed
61 |
62 | @property
63 | def gid(self):
64 | with self.__resource_lock:
65 | return self.__gid
66 |
67 | def __onDownloadProgress(self, d):
68 | if self.is_cancelled:
69 | raise ValueError("Cancelling Download..")
70 | if d['status'] == "finished":
71 | if self.is_playlist:
72 | self.last_downloaded = 0
73 | elif d['status'] == "downloading":
74 | with self.__resource_lock:
75 | self.__download_speed = d['speed']
76 | try:
77 | tbyte = d['total_bytes']
78 | except KeyError:
79 | tbyte = d['total_bytes_estimate']
80 | if self.is_playlist:
81 | progress = d['downloaded_bytes'] / tbyte
82 | chunk_size = d['downloaded_bytes'] - self.last_downloaded
83 | self.last_downloaded = tbyte * progress
84 | self.downloaded_bytes += chunk_size
85 | else:
86 | self.size = tbyte
87 | self.downloaded_bytes = d['downloaded_bytes']
88 | try:
89 | self.progress = (self.downloaded_bytes / self.size) * 100
90 | except ZeroDivisionError:
91 | pass
92 |
93 | def __onDownloadStart(self):
94 | with download_dict_lock:
95 | download_dict[self.__listener.uid] = YoutubeDLDownloadStatus(self, self.__listener)
96 |
97 | def __onDownloadComplete(self):
98 | self.__listener.onDownloadComplete()
99 |
100 | def onDownloadError(self, error):
101 | self.__listener.onDownloadError(error)
102 |
103 | def extractMetaData(self, link, qual, name):
104 | if "hotstar" in link or "sonyliv" in link:
105 | self.opts['geo_bypass_country'] = 'IN'
106 |
107 | with YoutubeDL(self.opts) as ydl:
108 | try:
109 | result = ydl.extract_info(link, download=False)
110 | name = ydl.prepare_filename(result) if name == "" else name
111 | # noobway hack for changing extension after converting to mp3
112 | if qual == "audio":
113 | name = name.replace(".mp4", ".mp3").replace(".webm", ".mp3")
114 | except DownloadError as e:
115 | self.onDownloadError(str(e))
116 | return
117 | if result.get('direct'):
118 | return None
119 | if 'entries' in result:
120 | video = result['entries'][0]
121 | for v in result['entries']:
122 | if v and v.get('filesize'):
123 | self.size += float(v['filesize'])
124 | # For playlists, ydl.prepare-filename returns the following format: -.NA
125 | self.name = name.split(f"-{result['id']}")[0]
126 | self.vid_id = video.get('id')
127 | self.is_playlist = True
128 | else:
129 | video = result
130 | if video.get('filesize'):
131 | self.size = float(video.get('filesize'))
132 | self.name = name
133 | self.vid_id = video.get('id')
134 | return video
135 |
136 | def __download(self, link):
137 | try:
138 | with YoutubeDL(self.opts) as ydl:
139 | try:
140 | ydl.download([link])
141 | except DownloadError as e:
142 | self.onDownloadError(str(e))
143 | return
144 | self.__onDownloadComplete()
145 | except ValueError:
146 | LOGGER.info("Download Cancelled by User!")
147 | self.onDownloadError("Download Cancelled by User!")
148 |
149 | def add_download(self, link, path, qual, name):
150 | pattern = '^.*(youtu\.be\/|youtube.com\/)(playlist?)'
151 | if re.match(pattern, link):
152 | self.opts['ignoreerrors'] = True
153 | self.__onDownloadStart()
154 | self.extractMetaData(link, qual, name)
155 | LOGGER.info(f"Downloading with YT-DL: {link}")
156 | self.__gid = f"{self.vid_id}{self.__listener.uid}"
157 | if qual == "audio":
158 | self.opts['format'] = 'bestaudio/best'
159 | self.opts['postprocessors'] = [{'key': 'FFmpegExtractAudio','preferredcodec': 'mp3','preferredquality': '320',}]
160 | else:
161 | self.opts['format'] = qual
162 | if not self.is_playlist:
163 | self.opts['outtmpl'] = f"{path}/{self.name}"
164 | else:
165 | self.opts['outtmpl'] = f"{path}/{self.name}/%(title)s.%(ext)s"
166 | self.__download(link)
167 |
168 | def cancel_download(self):
169 | self.is_cancelled = True
170 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/mirror_utils/status_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/aria_download_status.py:
--------------------------------------------------------------------------------
1 | from bot import aria2, DOWNLOAD_DIR, LOGGER
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus
3 | from .status import Status
4 |
5 | def get_download(gid):
6 | return aria2.get_download(gid)
7 |
8 |
9 | class AriaDownloadStatus(Status):
10 |
11 | def __init__(self, gid, listener):
12 | super().__init__()
13 | self.upload_name = None
14 | self.__gid = gid
15 | self.__download = get_download(self.__gid)
16 | self.__uid = listener.uid
17 | self.__listener = listener
18 | self.message = listener.message
19 |
20 | def __update(self):
21 | self.__download = get_download(self.__gid)
22 | download = self.__download
23 | if download.followed_by_ids:
24 | self.__gid = download.followed_by_ids[0]
25 |
26 | def progress(self):
27 | """
28 | Calculates the progress of the mirror (upload or download)
29 | :return: returns progress in percentage
30 | """
31 | self.__update()
32 | return self.__download.progress_string()
33 |
34 | def size_raw(self):
35 | """
36 | Gets total size of the mirror file/folder
37 | :return: total size of mirror
38 | """
39 | return self.aria_download().total_length
40 |
41 | def processed_bytes(self):
42 | return self.aria_download().completed_length
43 |
44 | def speed(self):
45 | return self.aria_download().download_speed_string()
46 |
47 | def name(self):
48 | return self.aria_download().name
49 |
50 | def path(self):
51 | return f"{DOWNLOAD_DIR}{self.__uid}"
52 |
53 | def size(self):
54 | return self.aria_download().total_length_string()
55 |
56 | def eta(self):
57 | return self.aria_download().eta_string()
58 |
59 | def status(self):
60 | download = self.aria_download()
61 | if download.is_waiting:
62 | return MirrorStatus.STATUS_WAITING
63 | elif download.has_failed:
64 | return MirrorStatus.STATUS_FAILED
65 | else:
66 | return MirrorStatus.STATUS_DOWNLOADING
67 |
68 | def aria_download(self):
69 | self.__update()
70 | return self.__download
71 |
72 | def download(self):
73 | return self
74 |
75 | def getListener(self):
76 | return self.__listener
77 |
78 | def uid(self):
79 | return self.__uid
80 |
81 | def gid(self):
82 | self.__update()
83 | return self.__gid
84 |
85 | def cancel_download(self):
86 | LOGGER.info(f"Cancelling Download: {self.name()}")
87 | download = self.aria_download()
88 | if download.is_waiting:
89 | self.__listener.onDownloadError("Cancelled by user")
90 | aria2.remove([download], force=True)
91 | return
92 | if len(download.followed_by_ids) != 0:
93 | downloads = aria2.get_downloads(download.followed_by_ids)
94 | self.__listener.onDownloadError('Download stopped by user!')
95 | aria2.remove(downloads, force=True)
96 | aria2.remove([download], force=True)
97 | return
98 | self.__listener.onDownloadError('Download stopped by user!')
99 | aria2.remove([download], force=True)
100 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/clone_status.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/80d33430715b4296cd253f62cefc089a81937ebf)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | from .status import Status
6 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
7 |
8 |
9 | class CloneStatus(Status):
10 | def __init__(self, obj, size, update, gid):
11 | self.cobj = obj
12 | self.__csize = size
13 | self.message = update.message
14 | self.__cgid = gid
15 |
16 | def processed_bytes(self):
17 | return self.cobj.transferred_size
18 |
19 | def size_raw(self):
20 | return self.__csize
21 |
22 | def size(self):
23 | return get_readable_file_size(self.__csize)
24 |
25 | def status(self):
26 | return MirrorStatus.STATUS_CLONING
27 |
28 | def name(self):
29 | return self.cobj.name
30 |
31 | def gid(self) -> str:
32 | return self.__cgid
33 |
34 | def progress_raw(self):
35 | try:
36 | return self.cobj.transferred_size / self.__csize * 100
37 | except ZeroDivisionError:
38 | return 0
39 |
40 | def progress(self):
41 | return f'{round(self.progress_raw(), 2)}%'
42 |
43 | def speed_raw(self):
44 | """
45 | :return: Download speed in Bytes/Seconds
46 | """
47 | return self.cobj.cspeed()
48 |
49 | def speed(self):
50 | return f'{get_readable_file_size(self.speed_raw())}/s'
51 |
52 | def eta(self):
53 | try:
54 | seconds = (self.__csize - self.cobj.transferred_size) / self.speed_raw()
55 | return f'{get_readable_time(seconds)}'
56 | except ZeroDivisionError:
57 | return '-'
58 |
59 | def download(self):
60 | return self.cobj
61 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/extract_status.py:
--------------------------------------------------------------------------------
1 | from .status import Status
2 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus
3 |
4 |
5 | class ExtractStatus(Status):
6 | def __init__(self, name, path, size):
7 | self.__name = name
8 | self.__path = path
9 | self.__size = size
10 |
11 | # The progress of extract function cannot be tracked. So we just return dummy values.
12 | # If this is possible in future,we should implement it
13 |
14 | def progress(self):
15 | return '0'
16 |
17 | def speed(self):
18 | return '0'
19 |
20 | def name(self):
21 | return self.__name
22 |
23 | def path(self):
24 | return self.__path
25 |
26 | def size(self):
27 | return get_readable_file_size(self.__size)
28 |
29 | def eta(self):
30 | return '0s'
31 |
32 | def status(self):
33 | return MirrorStatus.STATUS_EXTRACTING
34 |
35 | def processed_bytes(self):
36 | return 0
37 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/gdownload_status.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/pull/220)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | from .status import Status
6 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
7 | from bot import DOWNLOAD_DIR
8 |
9 |
10 | class DownloadStatus(Status):
11 | def __init__(self, obj, size, listener, gid):
12 | self.dobj = obj
13 | self.__dsize = size
14 | self.uid = listener.uid
15 | self.message = listener.message
16 | self.__dgid = gid
17 |
18 | def path(self):
19 | return f"{DOWNLOAD_DIR}{self.uid}"
20 |
21 | def processed_bytes(self):
22 | return self.dobj.downloaded_bytes
23 |
24 | def size_raw(self):
25 | return self.__dsize
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__dsize)
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_DOWNLOADING
32 |
33 | def name(self):
34 | return self.dobj.name
35 |
36 | def gid(self) -> str:
37 | return self.__dgid
38 |
39 | def progress_raw(self):
40 | try:
41 | return self.dobj.downloaded_bytes / self.__dsize * 100
42 | except ZeroDivisionError:
43 | return 0
44 |
45 | def progress(self):
46 | return f'{round(self.progress_raw(), 2)}%'
47 |
48 | def speed_raw(self):
49 | """
50 | :return: Download speed in Bytes/Seconds
51 | """
52 | return self.dobj.dspeed()
53 |
54 | def speed(self):
55 | return f'{get_readable_file_size(self.speed_raw())}/s'
56 |
57 | def eta(self):
58 | try:
59 | seconds = (self.__dsize - self.dobj.downloaded_bytes) / self.speed_raw()
60 | return f'{get_readable_time(seconds)}'
61 | except ZeroDivisionError:
62 | return '-'
63 |
64 | def download(self):
65 | return self.dobj
66 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/listeners.py:
--------------------------------------------------------------------------------
1 | class MirrorListeners:
2 | def __init__(self, context, update):
3 | self.bot = context
4 | self.update = update
5 | self.message = update.message
6 | self.uid = self.message.message_id
7 |
8 | def onDownloadStarted(self):
9 | raise NotImplementedError
10 |
11 | def onDownloadProgress(self):
12 | raise NotImplementedError
13 |
14 | def onDownloadComplete(self):
15 | raise NotImplementedError
16 |
17 | def onDownloadError(self, error: str):
18 | raise NotImplementedError
19 |
20 | def onUploadStarted(self):
21 | raise NotImplementedError
22 |
23 | def onUploadProgress(self):
24 | raise NotImplementedError
25 |
26 | def onUploadComplete(self, link: str):
27 | raise NotImplementedError
28 |
29 | def onUploadError(self, error: str):
30 | raise NotImplementedError
31 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/mega_download_status.py:
--------------------------------------------------------------------------------
1 | from bot.helper.ext_utils.bot_utils import get_readable_file_size,MirrorStatus, get_readable_time
2 | from bot import DOWNLOAD_DIR
3 | from .status import Status
4 |
5 |
6 | class MegaDownloadStatus(Status):
7 |
8 | def __init__(self, obj, listener):
9 | self.uid = obj.uid
10 | self.listener = listener
11 | self.obj = obj
12 | self.message = listener.message
13 |
14 | def name(self) -> str:
15 | return self.obj.name
16 |
17 | def progress_raw(self):
18 | try:
19 | return round(self.processed_bytes() / self.obj.size * 100,2)
20 | except ZeroDivisionError:
21 | return 0.0
22 |
23 | def progress(self):
24 | """Progress of download in percentage"""
25 | return f"{self.progress_raw()}%"
26 |
27 | def status(self) -> str:
28 | return MirrorStatus.STATUS_DOWNLOADING
29 |
30 | def processed_bytes(self):
31 | return self.obj.downloaded_bytes
32 |
33 | def eta(self):
34 | try:
35 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
36 | return f'{get_readable_time(seconds)}'
37 | except ZeroDivisionError:
38 | return '-'
39 |
40 | def size_raw(self):
41 | return self.obj.size
42 |
43 | def size(self) -> str:
44 | return get_readable_file_size(self.size_raw())
45 |
46 | def downloaded(self) -> str:
47 | return get_readable_file_size(self.obj.downloadedBytes)
48 |
49 | def speed_raw(self):
50 | return self.obj.speed
51 |
52 | def speed(self) -> str:
53 | return f'{get_readable_file_size(self.speed_raw())}/s'
54 |
55 | def gid(self) -> str:
56 | return self.obj.gid
57 |
58 | def path(self) -> str:
59 | return f"{DOWNLOAD_DIR}{self.uid}"
60 |
61 | def download(self):
62 | return self.obj
63 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/qbit_download_status.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/0bfba523f095ab1dccad431d72561e0e002e7a59)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | from bot import DOWNLOAD_DIR, LOGGER, get_client
6 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
7 | from .status import Status
8 | from time import sleep
9 |
10 |
11 | class QbDownloadStatus(Status):
12 |
13 | def __init__(self, gid, listener, qbhash, client):
14 | super().__init__()
15 | self.__gid = gid
16 | self.__hash = qbhash
17 | self.client = client
18 | self.__uid = listener.uid
19 | self.listener = listener
20 | self.message = listener.message
21 |
22 |
23 | def progress(self):
24 | """
25 | Calculates the progress of the mirror (upload or download)
26 | :return: returns progress in percentage
27 | """
28 | return f'{round(self.torrent_info().progress*100,2)}%'
29 |
30 | def size_raw(self):
31 | """
32 | Gets total size of the mirror file/folder
33 | :return: total size of mirror
34 | """
35 | return self.torrent_info().size
36 |
37 | def processed_bytes(self):
38 | return self.torrent_info().downloaded
39 |
40 | def speed(self):
41 | return f"{get_readable_file_size(self.torrent_info().dlspeed)}/s"
42 |
43 | def name(self):
44 | return self.torrent_info().name
45 |
46 | def path(self):
47 | return f"{DOWNLOAD_DIR}{self.__uid}"
48 |
49 | def size(self):
50 | return get_readable_file_size(self.torrent_info().size)
51 |
52 | def eta(self):
53 | return get_readable_time(self.torrent_info().eta)
54 |
55 | def status(self):
56 | download = self.torrent_info().state
57 | if download == "queuedDL":
58 | return MirrorStatus.STATUS_WAITING
59 | elif download in ["metaDL", "checkingResumeData"]:
60 | return MirrorStatus.STATUS_DOWNLOADING + " (Metadata)"
61 | elif download == "pausedDL":
62 | return MirrorStatus.STATUS_PAUSE
63 | else:
64 | return MirrorStatus.STATUS_DOWNLOADING
65 |
66 | def torrent_info(self):
67 | return self.client.torrents_info(torrent_hashes=self.__hash)[0]
68 |
69 | def download(self):
70 | return self
71 |
72 | def uid(self):
73 | return self.__uid
74 |
75 | def gid(self):
76 | return self.__gid
77 |
78 | def cancel_download(self):
79 | LOGGER.info(f"Cancelling Download: {self.name()}")
80 | self.client.torrents_pause(torrent_hashes=self.__hash)
81 | sleep(0.3)
82 | self.listener.onDownloadError('Download stopped by user!')
83 | self.client.torrents_delete(torrent_hashes=self.__hash)
84 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/split_status.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/d888a1e7237f4633c066f7c2bbfba030b83ad616)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | from .status import Status
6 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus
7 |
8 |
9 | class SplitStatus(Status):
10 | def __init__(self, name, path, size):
11 | self.__name = name
12 | self.__path = path
13 | self.__size = size
14 |
15 | # The progress of Tar function cannot be tracked. So we just return dummy values.
16 | # If this is possible in future,we should implement it
17 |
18 | def progress(self):
19 | return '0'
20 |
21 | def speed(self):
22 | return '0'
23 |
24 | def name(self):
25 | return self.__name
26 |
27 | def path(self):
28 | return self.__path
29 |
30 | def size(self):
31 | return get_readable_file_size(self.__size)
32 |
33 | def eta(self):
34 | return '0s'
35 |
36 | def status(self):
37 | return MirrorStatus.STATUS_SPLITTING
38 |
39 | def processed_bytes(self):
40 | return 0
41 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/status.py:
--------------------------------------------------------------------------------
1 | # Generic status class. All other status classes must inherit this class
2 |
3 |
4 | class Status:
5 |
6 | def progress(self):
7 | """
8 | Calculates the progress of the mirror (upload or download)
9 | :return: progress in percentage
10 | """
11 | raise NotImplementedError
12 |
13 | def speed(self):
14 | """:return: speed in bytes per second"""
15 | raise NotImplementedError
16 |
17 | def name(self):
18 | """:return name of file/directory being processed"""
19 | raise NotImplementedError
20 |
21 | def path(self):
22 | """:return path of the file/directory"""
23 | raise NotImplementedError
24 |
25 | def size(self):
26 | """:return Size of file folder"""
27 | raise NotImplementedError
28 |
29 | def eta(self):
30 | """:return ETA of the process to complete"""
31 | raise NotImplementedError
32 |
33 | def status(self):
34 | """:return String describing what is the object of this class will be tracking (upload/download/something
35 | else) """
36 | raise NotImplementedError
37 |
38 | def processed_bytes(self):
39 | """:return The size of file that has been processed (downloaded/uploaded/archived)"""
40 | raise NotImplementedError
41 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/tar_status.py:
--------------------------------------------------------------------------------
1 | from .status import Status
2 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, MirrorStatus
3 |
4 |
5 | class TarStatus(Status):
6 | def __init__(self, name, path, size):
7 | self.__name = name
8 | self.__path = path
9 | self.__size = size
10 |
11 | # The progress of Tar function cannot be tracked. So we just return dummy values.
12 | # If this is possible in future,we should implement it
13 |
14 | def progress(self):
15 | return '0'
16 |
17 | def speed(self):
18 | return '0'
19 |
20 | def name(self):
21 | return self.__name
22 |
23 | def path(self):
24 | return self.__path
25 |
26 | def size(self):
27 | return get_readable_file_size(self.__size)
28 |
29 | def eta(self):
30 | return '0s'
31 |
32 | def status(self):
33 | return MirrorStatus.STATUS_ARCHIVING
34 |
35 | def processed_bytes(self):
36 | return 0
37 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/telegram_download_status.py:
--------------------------------------------------------------------------------
1 | from bot import DOWNLOAD_DIR
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
3 | from .status import Status
4 |
5 |
6 | class TelegramDownloadStatus(Status):
7 | def __init__(self, obj, listener):
8 | self.obj = obj
9 | self.uid = listener.uid
10 | self.message = listener.message
11 |
12 | def gid(self):
13 | return self.obj.gid
14 |
15 | def path(self):
16 | return f"{DOWNLOAD_DIR}{self.uid}"
17 |
18 | def processed_bytes(self):
19 | return self.obj.downloaded_bytes
20 |
21 | def size_raw(self):
22 | return self.obj.size
23 |
24 | def size(self):
25 | return get_readable_file_size(self.size_raw())
26 |
27 | def status(self):
28 | return MirrorStatus.STATUS_DOWNLOADING
29 |
30 | def name(self):
31 | return self.obj.name
32 |
33 | def progress_raw(self):
34 | return self.obj.progress
35 |
36 | def progress(self):
37 | return f'{round(self.progress_raw(), 2)}%'
38 |
39 | def speed_raw(self):
40 | """
41 | :return: Download speed in Bytes/Seconds
42 | """
43 | return self.obj.download_speed
44 |
45 | def speed(self):
46 | return f'{get_readable_file_size(self.speed_raw())}/s'
47 |
48 | def eta(self):
49 | try:
50 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
51 | return f'{get_readable_time(seconds)}'
52 | except ZeroDivisionError:
53 | return '-'
54 |
55 | def download(self):
56 | return self.obj
57 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/tg_upload_status.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/d888a1e7237f4633c066f7c2bbfba030b83ad616)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | from .status import Status
6 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
7 | from bot import DOWNLOAD_DIR
8 |
9 |
10 | class TgUploadStatus(Status):
11 | def __init__(self, obj, size, gid, listener):
12 | self.obj = obj
13 | self.__size = size
14 | self.uid = listener.uid
15 | self.message = listener.message
16 | self.__gid = gid
17 |
18 | def path(self):
19 | return f"{DOWNLOAD_DIR}{self.uid}"
20 |
21 | def processed_bytes(self):
22 | return self.obj.uploaded_bytes
23 |
24 | def size_raw(self):
25 | return self.__size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.__size)
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_UPLOADING
32 |
33 | def name(self):
34 | return self.obj.name
35 |
36 | def progress_raw(self):
37 | try:
38 | return self.obj.uploaded_bytes / self.__size * 100
39 | except ZeroDivisionError:
40 | return 0
41 |
42 | def progress(self):
43 | return f'{round(self.progress_raw(), 2)}%'
44 |
45 | def speed_raw(self):
46 | """
47 | :return: Upload speed in Bytes/Seconds
48 | """
49 | return self.obj.speed()
50 |
51 | def speed(self):
52 | return f'{get_readable_file_size(self.speed_raw())}/s'
53 |
54 | def eta(self):
55 | try:
56 | seconds = (self.__size - self.obj.uploaded_bytes) / self.speed_raw()
57 | return f'{get_readable_time(seconds)}'
58 | except ZeroDivisionError:
59 | return '-'
60 |
61 | def gid(self) -> str:
62 | return self.__gid
63 |
64 | def download(self):
65 | return self.obj
66 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/upload_status.py:
--------------------------------------------------------------------------------
1 | from .status import Status
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
3 | from bot import DOWNLOAD_DIR
4 |
5 |
6 | class UploadStatus(Status):
7 | def __init__(self, obj, size, gid, listener):
8 | self.obj = obj
9 | self.__size = size
10 | self.uid = listener.uid
11 | self.message = listener.message
12 | self.__gid = gid
13 |
14 | def path(self):
15 | return f"{DOWNLOAD_DIR}{self.uid}"
16 |
17 | def processed_bytes(self):
18 | return self.obj.uploaded_bytes
19 |
20 | def size_raw(self):
21 | return self.__size
22 |
23 | def size(self):
24 | return get_readable_file_size(self.__size)
25 |
26 | def status(self):
27 | return MirrorStatus.STATUS_UPLOADING
28 |
29 | def name(self):
30 | return self.obj.name
31 |
32 | def progress_raw(self):
33 | try:
34 | return self.obj.uploaded_bytes / self.__size * 100
35 | except ZeroDivisionError:
36 | return 0
37 |
38 | def progress(self):
39 | return f'{round(self.progress_raw(), 2)}%'
40 |
41 | def speed_raw(self):
42 | """
43 | :return: Upload speed in Bytes/Seconds
44 | """
45 | return self.obj.speed()
46 |
47 | def speed(self):
48 | return f'{get_readable_file_size(self.speed_raw())}/s'
49 |
50 | def eta(self):
51 | try:
52 | seconds = (self.__size - self.obj.uploaded_bytes) / self.speed_raw()
53 | return f'{get_readable_time(seconds)}'
54 | except ZeroDivisionError:
55 | return '-'
56 |
57 | def gid(self) -> str:
58 | return self.__gid
59 |
60 | def download(self):
61 | return self.obj
62 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/status_utils/youtube_dl_download_status.py:
--------------------------------------------------------------------------------
1 | from bot import DOWNLOAD_DIR
2 | from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
3 | from .status import Status
4 | from bot.helper.ext_utils.fs_utils import get_path_size
5 |
6 | class YoutubeDLDownloadStatus(Status):
7 | def __init__(self, obj, listener):
8 | self.obj = obj
9 | self.uid = listener.uid
10 | self.message = listener.message
11 |
12 | def gid(self):
13 | return self.obj.gid
14 |
15 | def path(self):
16 | return f"{DOWNLOAD_DIR}{self.uid}"
17 |
18 | def processed_bytes(self):
19 | if self.obj.downloaded_bytes != 0:
20 | return self.obj.downloaded_bytes
21 | else:
22 | return get_path_size(f"{DOWNLOAD_DIR}{self.uid}")
23 |
24 | def size_raw(self):
25 | return self.obj.size
26 |
27 | def size(self):
28 | return get_readable_file_size(self.size_raw())
29 |
30 | def status(self):
31 | return MirrorStatus.STATUS_DOWNLOADING
32 |
33 | def name(self):
34 | return self.obj.name
35 |
36 | def progress_raw(self):
37 | return self.obj.progress
38 |
39 | def progress(self):
40 | return f'{round(self.progress_raw(), 2)}%'
41 |
42 | def speed_raw(self):
43 | """
44 | :return: Download speed in Bytes/Seconds
45 | """
46 | return self.obj.download_speed
47 |
48 | def speed(self):
49 | return f'{get_readable_file_size(self.speed_raw())}/s'
50 |
51 | def eta(self):
52 | try:
53 | seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
54 | return f'{get_readable_time(seconds)}'
55 | except:
56 | return '-'
57 |
58 | def download(self):
59 | return self.obj
60 |
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/upload_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/mirror_utils/upload_utils/__init__.py
--------------------------------------------------------------------------------
/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/d888a1e7237f4633c066f7c2bbfba030b83ad616)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | import os
6 | import logging
7 | import time
8 |
9 | from pyrogram.errors import FloodWait
10 | from hachoir.parser import createParser
11 | from hachoir.metadata import extractMetadata
12 |
13 | from bot import app, DOWNLOAD_DIR, AS_DOCUMENT, AS_DOC_USERS, AS_MEDIA_USERS
14 | from bot.helper.ext_utils.fs_utils import take_ss
15 |
16 | LOGGER = logging.getLogger(__name__)
17 | logging.getLogger("pyrogram").setLevel(logging.WARNING)
18 |
19 | VIDEO_SUFFIXES = ("MKV", "MP4", "MOV", "WMV", "3GP", "MPG", "WEBM", "AVI", "FLV", "M4V")
20 | AUDIO_SUFFIXES = ("MP3", "M4A", "M4B", "FLAC", "WAV", "AIF", "OGG", "AAC", "DTS", "MID", "AMR", "MKA")
21 | IMAGE_SUFFIXES = ("JPG", "JPX", "PNG", "GIF", "WEBP", "CR2", "TIF", "BMP", "JXR", "PSD", "ICO", "HEIC")
22 |
23 |
24 | class TgUploader:
25 |
26 | def __init__(self, name=None, listener=None):
27 | self.__listener = listener
28 | self.name = name
29 | self.__app = app
30 | self.total_bytes = 0
31 | self.uploaded_bytes = 0
32 | self.last_uploaded = 0
33 | self.start_time = time.time()
34 | self.is_cancelled = False
35 | self.chat_id = listener.message.chat.id
36 | self.message_id = listener.uid
37 | self.user_id = listener.message.from_user.id
38 | self.as_doc = AS_DOCUMENT
39 | self.thumb = f"Thumbnails/{self.user_id}.jpg"
40 | self.sent_msg = self.__app.get_messages(self.chat_id, self.message_id)
41 |
42 | def upload(self):
43 | msgs_dict = {}
44 | path = f"{DOWNLOAD_DIR}{self.message_id}"
45 | self.user_settings()
46 | for dirpath, subdir, files in sorted(os.walk(path)):
47 | for file in sorted(files):
48 | if self.is_cancelled:
49 | return
50 | up_path = os.path.join(dirpath, file)
51 | self.upload_file(up_path, file, dirpath)
52 | if self.is_cancelled:
53 | return
54 | msgs_dict[file] = self.sent_msg.message_id
55 | self.last_uploaded = 0
56 | LOGGER.info(f"Leech Done: {self.name}")
57 | self.__listener.onUploadComplete(self.name, None, msgs_dict, None, None)
58 |
59 | def upload_file(self, up_path, file, dirpath):
60 | cap_mono = f"{file}
"
61 | notMedia = False
62 | thumb = self.thumb
63 | try:
64 | if not self.as_doc:
65 | duration = 0
66 | if file.upper().endswith(VIDEO_SUFFIXES):
67 | metadata = extractMetadata(createParser(up_path))
68 | if metadata.has("duration"):
69 | duration = metadata.get("duration").seconds
70 | if thumb is None:
71 | thumb = take_ss(up_path)
72 | if self.is_cancelled:
73 | return
74 | if not file.upper().endswith(("MKV", "MP4")):
75 | file = os.path.splitext(file)[0] + '.mp4'
76 | new_path = os.path.join(dirpath, file)
77 | os.rename(up_path, new_path)
78 | up_path = new_path
79 | self.sent_msg = self.sent_msg.reply_video(video=up_path,
80 | quote=True,
81 | caption=cap_mono,
82 | parse_mode="html",
83 | duration=duration,
84 | width=480,
85 | height=320,
86 | thumb=thumb,
87 | supports_streaming=True,
88 | disable_notification=True,
89 | progress=self.upload_progress)
90 | if self.thumb is None and thumb is not None and os.path.lexists(thumb):
91 | os.remove(thumb)
92 | elif file.upper().endswith(AUDIO_SUFFIXES):
93 | metadata = extractMetadata(createParser(up_path))
94 | if metadata.has("duration"):
95 | duration = metadata.get('duration').seconds
96 | title = metadata.get("title") if metadata.has("title") else None
97 | artist = metadata.get("artist") if metadata.has("artist") else None
98 | self.sent_msg = self.sent_msg.reply_audio(audio=up_path,
99 | quote=True,
100 | caption=cap_mono,
101 | parse_mode="html",
102 | duration=duration,
103 | performer=artist,
104 | title=title,
105 | thumb=thumb,
106 | disable_notification=True,
107 | progress=self.upload_progress)
108 | elif file.upper().endswith(IMAGE_SUFFIXES):
109 | self.sent_msg = self.sent_msg.reply_photo(photo=up_path,
110 | quote=True,
111 | caption=cap_mono,
112 | parse_mode="html",
113 | disable_notification=True,
114 | progress=self.upload_progress)
115 | else:
116 | notMedia = True
117 | if self.as_doc or notMedia:
118 | if file.upper().endswith(VIDEO_SUFFIXES) and thumb is None:
119 | thumb = take_ss(up_path)
120 | if self.is_cancelled:
121 | return
122 | self.sent_msg = self.sent_msg.reply_document(document=up_path,
123 | quote=True,
124 | thumb=thumb,
125 | caption=cap_mono,
126 | parse_mode="html",
127 | disable_notification=True,
128 | progress=self.upload_progress)
129 | if self.thumb is None and thumb is not None and os.path.lexists(thumb):
130 | os.remove(thumb)
131 | if not self.is_cancelled:
132 | os.remove(up_path)
133 | except FloodWait as f:
134 | LOGGER.info(f)
135 | time.sleep(f.x)
136 | def upload_progress(self, current, total):
137 | if self.is_cancelled:
138 | self.__app.stop_transmission()
139 | return
140 | chunk_size = current - self.last_uploaded
141 | self.last_uploaded = current
142 | self.uploaded_bytes += chunk_size
143 |
144 | def user_settings(self):
145 | if self.user_id in AS_DOC_USERS:
146 | self.as_doc = True
147 | elif self.user_id in AS_MEDIA_USERS:
148 | self.as_doc = False
149 | if not os.path.lexists(self.thumb):
150 | self.thumb = None
151 |
152 | def speed(self):
153 | try:
154 | return self.uploaded_bytes / (time.time() - self.start_time)
155 | except ZeroDivisionError:
156 | return 0
157 |
158 | def cancel_download(self):
159 | self.is_cancelled = True
160 | LOGGER.info(f"Cancelling Upload: {self.name}")
161 | self.__listener.onUploadError('your upload has been stopped!')
162 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/breakdowns/slam-mirrorbot/8c95f7b261c4be9db97c94bc69bfaa2039f5d853/bot/helper/telegram_helper/__init__.py
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/bot_commands.py:
--------------------------------------------------------------------------------
1 | class _BotCommands:
2 | def __init__(self):
3 | self.StartCommand = 'start'
4 | self.MirrorCommand = 'mirror'
5 | self.UnzipMirrorCommand = 'unzipmirror'
6 | self.TarMirrorCommand = 'tarmirror'
7 | self.ZipMirrorCommand = 'zipmirror'
8 | self.CancelMirror = 'cancel'
9 | self.CancelAllCommand = 'cancelall'
10 | self.ListCommand = 'list'
11 | self.StatusCommand = 'status'
12 | self.AuthorizedUsersCommand = 'users'
13 | self.AuthorizeCommand = 'authorize'
14 | self.UnAuthorizeCommand = 'unauthorize'
15 | self.AddSudoCommand = 'addsudo'
16 | self.RmSudoCommand = 'rmsudo'
17 | self.PingCommand = 'ping'
18 | self.RestartCommand = 'restart'
19 | self.StatsCommand = 'stats'
20 | self.HelpCommand = 'help'
21 | self.LogCommand = 'log'
22 | self.SpeedCommand = 'speedtest'
23 | self.CloneCommand = 'clone'
24 | self.CountCommand = 'count'
25 | self.WatchCommand = 'watch'
26 | self.TarWatchCommand = 'tarwatch'
27 | self.ZipWatchCommand = 'zipwatch'
28 | self.QbMirrorCommand = 'qbmirror'
29 | self.QbUnzipMirrorCommand = 'qbunzipmirror'
30 | self.QbTarMirrorCommand = 'qbtarmirror'
31 | self.QbZipMirrorCommand = 'qbzipmirror'
32 | self.DeleteCommand = 'del'
33 | self.ShellCommand = 'shell'
34 | self.ExecHelpCommand = 'exechelp'
35 | self.TsHelpCommand = 'tshelp'
36 | self.LeechSetCommand = 'leechset'
37 | self.SetThumbCommand = 'setthumb'
38 | self.LeechCommand = 'leech'
39 | self.TarLeechCommand = 'tarleech'
40 | self.UnzipLeechCommand = 'unzipleech'
41 | self.ZipLeechCommand = 'zipleech'
42 | self.QbLeechCommand = 'qbleech'
43 | self.QbTarLeechCommand = 'qbtarleech'
44 | self.QbUnzipLeechCommand = 'qbunzipleech'
45 | self.QbZipLeechCommand = 'qbzipleech'
46 | self.LeechWatchCommand = 'leechwatch'
47 | self.LeechTarWatchCommand = 'leechtarwatch'
48 | self.LeechZipWatchCommand = 'leechzipwatch'
49 |
50 | BotCommands = _BotCommands()
51 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/button_build.py:
--------------------------------------------------------------------------------
1 | from telegram import InlineKeyboardButton
2 |
3 |
4 | class ButtonMaker:
5 | def __init__(self):
6 | self.button = []
7 |
8 | def buildbutton(self, key, link):
9 | self.button.append(InlineKeyboardButton(text = key, url = link))
10 |
11 | def sbutton(self, key, data):
12 | self.button.append(InlineKeyboardButton(text = key, callback_data = data))
13 |
14 | def build_menu(self, n_cols, footer_buttons=None, header_buttons=None):
15 | menu = [self.button[i:i + n_cols] for i in range(0, len(self.button), n_cols)]
16 | if header_buttons:
17 | menu.insert(0, header_buttons)
18 | if footer_buttons:
19 | menu.append(footer_buttons)
20 | return menu
21 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/filters.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import MessageFilter
2 | from telegram import Message
3 | from bot import AUTHORIZED_CHATS, SUDO_USERS, OWNER_ID, download_dict, download_dict_lock
4 |
5 |
6 | class CustomFilters:
7 | class _OwnerFilter(MessageFilter):
8 | def filter(self, message):
9 | return bool(message.from_user.id == OWNER_ID)
10 |
11 | owner_filter = _OwnerFilter()
12 |
13 | class _AuthorizedUserFilter(MessageFilter):
14 | def filter(self, message):
15 | id = message.from_user.id
16 | return bool(id in AUTHORIZED_CHATS or id in SUDO_USERS or id == OWNER_ID)
17 |
18 | authorized_user = _AuthorizedUserFilter()
19 |
20 | class _AuthorizedChat(MessageFilter):
21 | def filter(self, message):
22 | return bool(message.chat.id in AUTHORIZED_CHATS)
23 |
24 | authorized_chat = _AuthorizedChat()
25 |
26 | class _SudoUser(MessageFilter):
27 | def filter(self,message):
28 | return bool(message.from_user.id in SUDO_USERS)
29 |
30 | sudo_user = _SudoUser()
31 |
32 | class _MirrorOwner(MessageFilter):
33 | def filter(self, message: Message):
34 | user_id = message.from_user.id
35 | if user_id == OWNER_ID:
36 | return True
37 | args = str(message.text).split(' ')
38 | if len(args) > 1:
39 | # Cancelling by gid
40 | with download_dict_lock:
41 | for message_id, status in download_dict.items():
42 | if status.gid() == args[1] and status.message.from_user.id == user_id:
43 | return True
44 | else:
45 | return False
46 | if not message.reply_to_message and len(args) == 1:
47 | return True
48 | # Cancelling by replying to original mirror message
49 | reply_user = message.reply_to_message.from_user.id
50 | return bool(reply_user == user_id)
51 | mirror_owner_filter = _MirrorOwner()
52 |
--------------------------------------------------------------------------------
/bot/helper/telegram_helper/message_utils.py:
--------------------------------------------------------------------------------
1 | from telegram import InlineKeyboardMarkup
2 | from telegram.message import Message
3 | from telegram.update import Update
4 | import psutil, shutil
5 | import time
6 | from bot import AUTO_DELETE_MESSAGE_DURATION, LOGGER, bot, \
7 | status_reply_dict, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, Interval, DOWNLOAD_STATUS_UPDATE_INTERVAL
8 | from bot.helper.ext_utils.bot_utils import get_readable_message, get_readable_file_size, get_readable_time, MirrorStatus, setInterval
9 | from telegram.error import TimedOut, BadRequest
10 |
11 |
12 | def sendMessage(text: str, bot, update: Update):
13 | try:
14 | return bot.send_message(update.message.chat_id,
15 | reply_to_message_id=update.message.message_id,
16 | text=text, allow_sending_without_reply=True, parse_mode='HTMl')
17 | except Exception as e:
18 | LOGGER.error(str(e))
19 | def sendMarkup(text: str, bot, update: Update, reply_markup: InlineKeyboardMarkup):
20 | return bot.send_message(update.message.chat_id,
21 | reply_to_message_id=update.message.message_id,
22 | text=text, reply_markup=reply_markup, allow_sending_without_reply=True, parse_mode='HTMl')
23 |
24 | def editMessage(text: str, message: Message, reply_markup=None):
25 | try:
26 | bot.edit_message_text(text=text, message_id=message.message_id,
27 | chat_id=message.chat.id,reply_markup=reply_markup,
28 | parse_mode='HTMl')
29 | except Exception as e:
30 | LOGGER.error(str(e))
31 |
32 |
33 | def deleteMessage(bot, message: Message):
34 | try:
35 | bot.delete_message(chat_id=message.chat.id,
36 | message_id=message.message_id)
37 | except Exception as e:
38 | LOGGER.error(str(e))
39 |
40 |
41 | def sendLogFile(bot, update: Update):
42 | with open('log.txt', 'rb') as f:
43 | bot.send_document(document=f, filename=f.name,
44 | reply_to_message_id=update.message.message_id,
45 | chat_id=update.message.chat_id)
46 |
47 |
48 | def auto_delete_message(bot, cmd_message: Message, bot_message: Message):
49 | if AUTO_DELETE_MESSAGE_DURATION != -1:
50 | time.sleep(AUTO_DELETE_MESSAGE_DURATION)
51 | try:
52 | # Skip if None is passed meaning we don't want to delete bot xor cmd message
53 | deleteMessage(bot, cmd_message)
54 | deleteMessage(bot, bot_message)
55 | except AttributeError:
56 | pass
57 |
58 |
59 | def delete_all_messages():
60 | with status_reply_dict_lock:
61 | for message in list(status_reply_dict.values()):
62 | try:
63 | deleteMessage(bot, message)
64 | del status_reply_dict[message.chat.id]
65 | except Exception as e:
66 | LOGGER.error(str(e))
67 |
68 |
69 | def update_all_messages():
70 | total, used, free = shutil.disk_usage('.')
71 | free = get_readable_file_size(free)
72 | currentTime = get_readable_time(time.time() - botStartTime)
73 | msg, buttons = get_readable_message()
74 | msg += f"CPU: {psutil.cpu_percent()}%
" \
75 | f" RAM: {psutil.virtual_memory().percent}%
" \
76 | f" DISK: {psutil.disk_usage('/').percent}%
"
77 | with download_dict_lock:
78 | dlspeed_bytes = 0
79 | uldl_bytes = 0
80 | for download in list(download_dict.values()):
81 | speedy = download.speed()
82 | if download.status() == MirrorStatus.STATUS_DOWNLOADING:
83 | if 'K' in speedy:
84 | dlspeed_bytes += float(speedy.split('K')[0]) * 1024
85 | elif 'M' in speedy:
86 | dlspeed_bytes += float(speedy.split('M')[0]) * 1048576
87 | if download.status() == MirrorStatus.STATUS_UPLOADING:
88 | if 'KB/s' in speedy:
89 | uldl_bytes += float(speedy.split('K')[0]) * 1024
90 | elif 'MB/s' in speedy:
91 | uldl_bytes += float(speedy.split('M')[0]) * 1048576
92 | dlspeed = get_readable_file_size(dlspeed_bytes)
93 | ulspeed = get_readable_file_size(uldl_bytes)
94 | msg += f"\nFREE: {free}
| UPTIME: {currentTime}
\nDL: {dlspeed}/s
🔻 | UL: {ulspeed}/s
🔺\n"
95 | with status_reply_dict_lock:
96 | for chat_id in list(status_reply_dict.keys()):
97 | if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id].text:
98 | try:
99 | if buttons == "":
100 | editMessage(msg, status_reply_dict[chat_id])
101 | else:
102 | editMessage(msg, status_reply_dict[chat_id], buttons)
103 | except Exception as e:
104 | LOGGER.error(str(e))
105 | status_reply_dict[chat_id].text = msg
106 |
107 |
108 | def sendStatusMessage(msg, bot):
109 | if len(Interval) == 0:
110 | Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
111 | total, used, free = shutil.disk_usage('.')
112 | free = get_readable_file_size(free)
113 | currentTime = get_readable_time(time.time() - botStartTime)
114 | progress, buttons = get_readable_message()
115 | progress += f"CPU: {psutil.cpu_percent()}%
" \
116 | f" RAM: {psutil.virtual_memory().percent}%
" \
117 | f" DISK: {psutil.disk_usage('/').percent}%
"
118 | with download_dict_lock:
119 | dlspeed_bytes = 0
120 | uldl_bytes = 0
121 | for download in list(download_dict.values()):
122 | speedy = download.speed()
123 | if download.status() == MirrorStatus.STATUS_DOWNLOADING:
124 | if 'K' in speedy:
125 | dlspeed_bytes += float(speedy.split('K')[0]) * 1024
126 | elif 'M' in speedy:
127 | dlspeed_bytes += float(speedy.split('M')[0]) * 1048576
128 | if download.status() == MirrorStatus.STATUS_UPLOADING:
129 | if 'KB/s' in speedy:
130 | uldl_bytes += float(speedy.split('K')[0]) * 1024
131 | elif 'MB/s' in speedy:
132 | uldl_bytes += float(speedy.split('M')[0]) * 1048576
133 | dlspeed = get_readable_file_size(dlspeed_bytes)
134 | ulspeed = get_readable_file_size(uldl_bytes)
135 | progress += f"\nFREE: {free}
| UPTIME: {currentTime}
\nDL: {dlspeed}/s
🔻 | UL: {ulspeed}/s
🔺\n"
136 | with status_reply_dict_lock:
137 | if msg.message.chat.id in list(status_reply_dict.keys()):
138 | try:
139 | message = status_reply_dict[msg.message.chat.id]
140 | deleteMessage(bot, message)
141 | del status_reply_dict[msg.message.chat.id]
142 | except Exception as e:
143 | LOGGER.error(str(e))
144 | del status_reply_dict[msg.message.chat.id]
145 | if buttons == "":
146 | message = sendMessage(progress, bot, msg)
147 | else:
148 | message = sendMarkup(progress, bot, msg, buttons)
149 | status_reply_dict[msg.message.chat.id] = message
150 |
--------------------------------------------------------------------------------
/bot/modules/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/bot/modules/authorize.py:
--------------------------------------------------------------------------------
1 | from bot.helper.telegram_helper.message_utils import sendMessage
2 | from bot import AUTHORIZED_CHATS, SUDO_USERS, dispatcher, DB_URI
3 | from telegram.ext import CommandHandler
4 | from bot.helper.telegram_helper.filters import CustomFilters
5 | from bot.helper.telegram_helper.bot_commands import BotCommands
6 | from bot.helper.ext_utils.db_handler import DbManger
7 |
8 |
9 | def authorize(update, context):
10 | reply_message = None
11 | message_ = None
12 | reply_message = update.message.reply_to_message
13 | message_ = update.message.text.split(' ')
14 | if len(message_) == 2:
15 | user_id = int(message_[1])
16 | if user_id in AUTHORIZED_CHATS:
17 | msg = 'User Already Authorized'
18 | elif DB_URI is not None:
19 | msg = DbManger().db_auth(user_id)
20 | else:
21 | with open('authorized_chats.txt', 'a') as file:
22 | file.write(f'{user_id}\n')
23 | AUTHORIZED_CHATS.add(user_id)
24 | msg = 'User Authorized'
25 | elif reply_message is None:
26 | # Trying to authorize a chat
27 | chat_id = update.effective_chat.id
28 | if chat_id in AUTHORIZED_CHATS:
29 | msg = 'Chat Already Authorized'
30 |
31 | elif DB_URI is not None:
32 | msg = DbManger().db_auth(chat_id)
33 | else:
34 | with open('authorized_chats.txt', 'a') as file:
35 | file.write(f'{chat_id}\n')
36 | AUTHORIZED_CHATS.add(chat_id)
37 | msg = 'Chat Authorized'
38 | else:
39 | # Trying to authorize someone by replying
40 | user_id = reply_message.from_user.id
41 | if user_id in AUTHORIZED_CHATS:
42 | msg = 'User Already Authorized'
43 | elif DB_URI is not None:
44 | msg = DbManger().db_auth(user_id)
45 | else:
46 | with open('authorized_chats.txt', 'a') as file:
47 | file.write(f'{user_id}\n')
48 | AUTHORIZED_CHATS.add(user_id)
49 | msg = 'User Authorized'
50 | sendMessage(msg, context.bot, update)
51 |
52 |
53 | def unauthorize(update, context):
54 | reply_message = None
55 | message_ = None
56 | reply_message = update.message.reply_to_message
57 | message_ = update.message.text.split(' ')
58 | if len(message_) == 2:
59 | user_id = int(message_[1])
60 | if user_id in AUTHORIZED_CHATS:
61 | if DB_URI is not None:
62 | msg = DbManger().db_unauth(user_id)
63 | else:
64 | AUTHORIZED_CHATS.remove(user_id)
65 | msg = 'User Unauthorized'
66 | else:
67 | msg = 'User Already Unauthorized'
68 | elif reply_message is None:
69 | # Trying to unauthorize a chat
70 | chat_id = update.effective_chat.id
71 | if chat_id in AUTHORIZED_CHATS:
72 | if DB_URI is not None:
73 | msg = DbManger().db_unauth(chat_id)
74 | else:
75 | AUTHORIZED_CHATS.remove(chat_id)
76 | msg = 'Chat Unauthorized'
77 | else:
78 | msg = 'Chat Already Unauthorized'
79 | else:
80 | # Trying to authorize someone by replying
81 | user_id = reply_message.from_user.id
82 | if user_id in AUTHORIZED_CHATS:
83 | if DB_URI is not None:
84 | msg = DbManger().db_unauth(user_id)
85 | else:
86 | AUTHORIZED_CHATS.remove(user_id)
87 | msg = 'User Unauthorized'
88 | else:
89 | msg = 'User Already Unauthorized'
90 | with open('authorized_chats.txt', 'a') as file:
91 | file.truncate(0)
92 | for i in AUTHORIZED_CHATS:
93 | file.write(f'{i}\n')
94 | sendMessage(msg, context.bot, update)
95 |
96 |
97 | def addSudo(update, context):
98 | reply_message = None
99 | message_ = None
100 | reply_message = update.message.reply_to_message
101 | message_ = update.message.text.split(' ')
102 | if len(message_) == 2:
103 | user_id = int(message_[1])
104 | if user_id in SUDO_USERS:
105 | msg = 'Already Sudo'
106 | elif DB_URI is not None:
107 | msg = DbManger().db_addsudo(user_id)
108 | else:
109 | with open('sudo_users.txt', 'a') as file:
110 | file.write(f'{user_id}\n')
111 | SUDO_USERS.add(user_id)
112 | msg = 'Promoted as Sudo'
113 | elif reply_message is None:
114 | msg = "Give ID or Reply To message of whom you want to Promote"
115 | else:
116 | # Trying to authorize someone by replying
117 | user_id = reply_message.from_user.id
118 | if user_id in SUDO_USERS:
119 | msg = 'Already Sudo'
120 | elif DB_URI is not None:
121 | msg = DbManger().db_addsudo(user_id)
122 | else:
123 | with open('sudo_users.txt', 'a') as file:
124 | file.write(f'{user_id}\n')
125 | SUDO_USERS.add(user_id)
126 | msg = 'Promoted as Sudo'
127 | sendMessage(msg, context.bot, update)
128 |
129 |
130 | def removeSudo(update, context):
131 | reply_message = None
132 | message_ = None
133 | reply_message = update.message.reply_to_message
134 | message_ = update.message.text.split(' ')
135 | if len(message_) == 2:
136 | user_id = int(message_[1])
137 | if user_id in SUDO_USERS:
138 | if DB_URI is not None:
139 | msg = DbManger().db_rmsudo(user_id)
140 | else:
141 | SUDO_USERS.remove(user_id)
142 | msg = 'Demoted'
143 | else:
144 | msg = 'Not a Sudo'
145 | elif reply_message is None:
146 | msg = "Give ID or Reply To message of whom you want to remove from Sudo"
147 | else:
148 | user_id = reply_message.from_user.id
149 | if user_id in SUDO_USERS:
150 | if DB_URI is not None:
151 | msg = DbManger().db_rmsudo(user_id)
152 | else:
153 | SUDO_USERS.remove(user_id)
154 | msg = 'Demoted'
155 | else:
156 | msg = 'Not a Sudo'
157 | if DB_URI is None:
158 | with open('sudo_users.txt', 'a') as file:
159 | file.truncate(0)
160 | for i in SUDO_USERS:
161 | file.write(f'{i}\n')
162 | sendMessage(msg, context.bot, update)
163 |
164 |
165 | def sendAuthChats(update, context):
166 | user = sudo = ''
167 | user += '\n'.join(str(id) for id in AUTHORIZED_CHATS)
168 | sudo += '\n'.join(str(id) for id in SUDO_USERS)
169 | sendMessage(f'Authorized Chats\n{user}
\nSudo Users\n{sudo}
', context.bot, update)
170 |
171 |
172 | send_auth_handler = CommandHandler(command=BotCommands.AuthorizedUsersCommand, callback=sendAuthChats,
173 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
174 | authorize_handler = CommandHandler(command=BotCommands.AuthorizeCommand, callback=authorize,
175 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
176 | unauthorize_handler = CommandHandler(command=BotCommands.UnAuthorizeCommand, callback=unauthorize,
177 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
178 | addsudo_handler = CommandHandler(command=BotCommands.AddSudoCommand, callback=addSudo,
179 | filters=CustomFilters.owner_filter, run_async=True)
180 | removesudo_handler = CommandHandler(command=BotCommands.RmSudoCommand, callback=removeSudo,
181 | filters=CustomFilters.owner_filter, run_async=True)
182 |
183 | dispatcher.add_handler(send_auth_handler)
184 | dispatcher.add_handler(authorize_handler)
185 | dispatcher.add_handler(unauthorize_handler)
186 | dispatcher.add_handler(addsudo_handler)
187 | dispatcher.add_handler(removesudo_handler)
188 |
--------------------------------------------------------------------------------
/bot/modules/cancel_mirror.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 | from bot import download_dict, dispatcher, download_dict_lock, DOWNLOAD_DIR
3 | from bot.helper.ext_utils.fs_utils import clean_download
4 | from bot.helper.telegram_helper.bot_commands import BotCommands
5 | from bot.helper.telegram_helper.filters import CustomFilters
6 | from bot.helper.telegram_helper.message_utils import *
7 |
8 | from time import sleep
9 | from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, getAllDownload
10 |
11 |
12 | def cancel_mirror(update, context):
13 | args = update.message.text.split(" ", maxsplit=1)
14 | mirror_message = None
15 | if len(args) > 1:
16 | gid = args[1]
17 | dl = getDownloadByGid(gid)
18 | if not dl:
19 | sendMessage(f"GID: {gid}
Not Found.", context.bot, update)
20 | return
21 | mirror_message = dl.message
22 | elif update.message.reply_to_message:
23 | mirror_message = update.message.reply_to_message
24 | with download_dict_lock:
25 | keys = list(download_dict.keys())
26 | try:
27 | dl = download_dict[mirror_message.message_id]
28 | except:
29 | pass
30 | if len(args) == 1:
31 | msg = f"Please reply to the /{BotCommands.MirrorCommand}
message which was used to start the download or send /{BotCommands.CancelMirror} GID
to cancel it!"
32 | if mirror_message and mirror_message.message_id not in keys:
33 | if BotCommands.MirrorCommand in mirror_message.text or \
34 | BotCommands.TarMirrorCommand in mirror_message.text or \
35 | BotCommands.UnzipMirrorCommand in mirror_message.text:
36 | msg1 = "Mirror Already Have Been Cancelled"
37 | sendMessage(msg1, context.bot, update)
38 | else:
39 | sendMessage(msg, context.bot, update)
40 | return
41 | elif not mirror_message:
42 | sendMessage(msg, context.bot, update)
43 | return
44 | if dl.status() == MirrorStatus.STATUS_ARCHIVING:
45 | sendMessage("Archival in Progress, You Can't Cancel It.", context.bot, update)
46 | elif dl.status() == MirrorStatus.STATUS_EXTRACTING:
47 | sendMessage("Extract in Progress, You Can't Cancel It.", context.bot, update)
48 | elif dl.status() == MirrorStatus.STATUS_SPLITTING:
49 | sendMessage("Split in Progress, You Can't Cancel It.", context.bot, update)
50 | else:
51 | dl.download().cancel_download()
52 | sleep(3) # incase of any error with ondownloaderror listener
53 | clean_download(f'{DOWNLOAD_DIR}{mirror_message.message_id}')
54 |
55 |
56 | def cancel_all(update, context):
57 | count = 0
58 | gid = 0
59 | while True:
60 | dl = getAllDownload()
61 | if dl:
62 | if dl.gid() != gid:
63 | gid = dl.gid()
64 | dl.download().cancel_download()
65 | count += 1
66 | sleep(0.3)
67 | else:
68 | break
69 | sendMessage(f'{count} Download(s) has been Cancelled!', context.bot, update)
70 |
71 |
72 |
73 | cancel_mirror_handler = CommandHandler(BotCommands.CancelMirror, cancel_mirror,
74 | filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user) & CustomFilters.mirror_owner_filter | CustomFilters.sudo_user, run_async=True)
75 | cancel_all_handler = CommandHandler(BotCommands.CancelAllCommand, cancel_all,
76 | filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
77 | dispatcher.add_handler(cancel_all_handler)
78 | dispatcher.add_handler(cancel_mirror_handler)
79 |
--------------------------------------------------------------------------------
/bot/modules/clone.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 | from bot.helper.mirror_utils.upload_utils import gdriveTools
3 | from bot.helper.telegram_helper.message_utils import *
4 | from bot.helper.telegram_helper.filters import CustomFilters
5 | from bot.helper.telegram_helper.bot_commands import BotCommands
6 | from bot.helper.mirror_utils.status_utils.clone_status import CloneStatus
7 | from bot import dispatcher, LOGGER, CLONE_LIMIT, STOP_DUPLICATE, download_dict, download_dict_lock, Interval
8 | from bot.helper.ext_utils.bot_utils import get_readable_file_size, check_limit
9 | import random
10 | import string
11 |
12 |
13 | def cloneNode(update, context):
14 | args = update.message.text.split(" ", maxsplit=1)
15 | if len(args) > 1:
16 | link = args[1]
17 | gd = gdriveTools.GoogleDriveHelper()
18 | res, size, name, files = gd.clonehelper(link)
19 | if res != "":
20 | sendMessage(res, context.bot, update)
21 | return
22 | if STOP_DUPLICATE:
23 | LOGGER.info('Checking File/Folder if already in Drive...')
24 | smsg, button = gd.drive_list(name, True, True)
25 | if smsg:
26 | msg3 = "File/Folder is already available in Drive.\nHere are the search results:"
27 | sendMarkup(msg3, context.bot, update, button)
28 | return
29 | if CLONE_LIMIT is not None:
30 | result = check_limit(size, CLONE_LIMIT)
31 | if result:
32 | msg2 = f'Failed, Clone limit is {CLONE_LIMIT}.\nYour File/Folder size is {get_readable_file_size(size)}.'
33 | sendMessage(msg2, context.bot, update)
34 | return
35 | if files < 15:
36 | msg = sendMessage(f"Cloning: {link}
", context.bot, update)
37 | result, button = gd.clone(link)
38 | deleteMessage(context.bot, msg)
39 | else:
40 | drive = gdriveTools.GoogleDriveHelper(name)
41 | gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12))
42 | clone_status = CloneStatus(drive, size, update, gid)
43 | with download_dict_lock:
44 | download_dict[update.message.message_id] = clone_status
45 | sendStatusMessage(update, context.bot)
46 | result, button = drive.clone(link)
47 | with download_dict_lock:
48 | del download_dict[update.message.message_id]
49 | count = len(download_dict)
50 | try:
51 | if count == 0:
52 | Interval[0].cancel()
53 | del Interval[0]
54 | delete_all_messages()
55 | else:
56 | update_all_messages()
57 | except IndexError:
58 | pass
59 | if update.message.from_user.username:
60 | uname = f'@{update.message.from_user.username}'
61 | else:
62 | uname = f'{update.message.from_user.first_name}'
63 | if uname is not None:
64 | cc = f'\n\ncc: {uname}'
65 | men = f'{uname} '
66 | if button in ["cancelled", ""]:
67 | sendMessage(men + result, context.bot, update)
68 | else:
69 | sendMarkup(result + cc, context.bot, update, button)
70 | else:
71 | sendMessage('Provide G-Drive Shareable Link to Clone.', context.bot, update)
72 |
73 | clone_handler = CommandHandler(BotCommands.CloneCommand, cloneNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
74 | dispatcher.add_handler(clone_handler)
75 |
--------------------------------------------------------------------------------
/bot/modules/count.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/pull/111)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | from telegram.ext import CommandHandler
6 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
7 | from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage
8 | from bot.helper.telegram_helper.filters import CustomFilters
9 | from bot.helper.telegram_helper.bot_commands import BotCommands
10 | from bot import dispatcher
11 |
12 |
13 | def countNode(update, context):
14 | args = update.message.text.split(" ", maxsplit=1)
15 | if len(args) > 1:
16 | link = args[1]
17 | msg = sendMessage(f"Counting: {link}
", context.bot, update)
18 | gd = GoogleDriveHelper()
19 | result = gd.count(link)
20 | deleteMessage(context.bot, msg)
21 | if update.message.from_user.username:
22 | uname = f'@{update.message.from_user.username}'
23 | else:
24 | uname = f'{update.message.from_user.first_name}'
25 | if uname is not None:
26 | cc = f'\n\ncc: {uname}'
27 | sendMessage(result + cc, context.bot, update)
28 | else:
29 | sendMessage("Provide G-Drive Shareable Link to Count.", context.bot, update)
30 |
31 | count_handler = CommandHandler(BotCommands.CountCommand, countNode, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
32 | dispatcher.add_handler(count_handler)
33 |
--------------------------------------------------------------------------------
/bot/modules/delete.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 | import threading
3 | from telegram import Update
4 | from bot import dispatcher, LOGGER
5 | from bot.helper.telegram_helper.message_utils import auto_delete_message, sendMessage
6 | from bot.helper.telegram_helper.filters import CustomFilters
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.mirror_utils.upload_utils import gdriveTools
9 |
10 |
11 | def deletefile(update, context):
12 | msg_args = update.message.text.split(None, 1)
13 | msg = ''
14 | try:
15 | link = msg_args[1]
16 | LOGGER.info(link)
17 | except IndexError:
18 | msg = 'Send a link along with command'
19 |
20 | if msg == '' :
21 | drive = gdriveTools.GoogleDriveHelper()
22 | msg = drive.deletefile(link)
23 | LOGGER.info(f"DeleteFileCmd: {msg}")
24 | reply_message = sendMessage(msg, context.bot, update)
25 |
26 | threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
27 |
28 | delete_handler = CommandHandler(command=BotCommands.DeleteCommand, callback=deletefile, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True)
29 | dispatcher.add_handler(delete_handler)
30 |
--------------------------------------------------------------------------------
/bot/modules/eval.py:
--------------------------------------------------------------------------------
1 | import io
2 | import os
3 | # Common imports for eval
4 | import textwrap
5 | import traceback
6 | from contextlib import redirect_stdout
7 | from bot.helper.telegram_helper.filters import CustomFilters
8 | from bot.helper.telegram_helper.bot_commands import BotCommands
9 | from bot.helper.telegram_helper.message_utils import sendMessage
10 | from bot import LOGGER, dispatcher
11 | from telegram import ParseMode
12 | from telegram.ext import CommandHandler
13 |
14 | namespaces = {}
15 |
16 |
17 | def namespace_of(chat, update, bot):
18 | if chat not in namespaces:
19 | namespaces[chat] = {
20 | '__builtins__': globals()['__builtins__'],
21 | 'bot': bot,
22 | 'effective_message': update.effective_message,
23 | 'effective_user': update.effective_user,
24 | 'effective_chat': update.effective_chat,
25 | 'update': update
26 | }
27 |
28 | return namespaces[chat]
29 |
30 |
31 | def log_input(update):
32 | user = update.effective_user.id
33 | chat = update.effective_chat.id
34 | LOGGER.info(
35 | f"IN: {update.effective_message.text} (user={user}, chat={chat})")
36 |
37 |
38 | def send(msg, bot, update):
39 | if len(str(msg)) > 2000:
40 | with io.BytesIO(str.encode(msg)) as out_file:
41 | out_file.name = "output.txt"
42 | bot.send_document(
43 | chat_id=update.effective_chat.id, document=out_file)
44 | else:
45 | LOGGER.info(f"OUT: '{msg}'")
46 | bot.send_message(
47 | chat_id=update.effective_chat.id,
48 | text=f"`{msg}`",
49 | parse_mode=ParseMode.MARKDOWN)
50 |
51 |
52 | def evaluate(update, context):
53 | bot = context.bot
54 | send(do(eval, bot, update), bot, update)
55 |
56 |
57 | def execute(update, context):
58 | bot = context.bot
59 | send(do(exec, bot, update), bot, update)
60 |
61 |
62 | def cleanup_code(code):
63 | if code.startswith('```') and code.endswith('```'):
64 | return '\n'.join(code.split('\n')[1:-1])
65 | return code.strip('` \n')
66 |
67 |
68 | def do(func, bot, update):
69 | log_input(update)
70 | content = update.message.text.split(' ', 1)[-1]
71 | body = cleanup_code(content)
72 | env = namespace_of(update.message.chat_id, update, bot)
73 |
74 | os.chdir(os.getcwd())
75 | with open(
76 | os.path.join(os.getcwd(),
77 | 'bot/modules/temp.txt'),
78 | 'w') as temp:
79 | temp.write(body)
80 |
81 | stdout = io.StringIO()
82 |
83 | to_compile = f'def func():\n{textwrap.indent(body, " ")}'
84 |
85 | try:
86 | exec(to_compile, env)
87 | except Exception as e:
88 | return f'{e.__class__.__name__}: {e}'
89 |
90 | func = env['func']
91 |
92 | try:
93 | with redirect_stdout(stdout):
94 | func_return = func()
95 | except Exception as e:
96 | value = stdout.getvalue()
97 | return f'{value}{traceback.format_exc()}'
98 | else:
99 | value = stdout.getvalue()
100 | result = None
101 | if func_return is None:
102 | if value:
103 | result = f'{value}'
104 | else:
105 | try:
106 | result = f'{repr(eval(body, env))}'
107 | except:
108 | pass
109 | else:
110 | result = f'{value}{func_return}'
111 | if result:
112 | return result
113 |
114 |
115 | def clear(update, context):
116 | bot = context.bot
117 | log_input(update)
118 | global namespaces
119 | if update.message.chat_id in namespaces:
120 | del namespaces[update.message.chat_id]
121 | send("Cleared locals.", bot, update)
122 |
123 |
124 | def exechelp(update, context):
125 | help_string = '''
126 | Executor
127 | • /eval Run Python Code Line | Lines
128 | • /exec Run Commands In Exec
129 | • /clearlocals Cleared locals
130 | '''
131 | sendMessage(help_string, context.bot, update)
132 |
133 |
134 | EVAL_HANDLER = CommandHandler(('eval'), evaluate, filters=CustomFilters.owner_filter, run_async=True)
135 | EXEC_HANDLER = CommandHandler(('exec'), execute, filters=CustomFilters.owner_filter, run_async=True)
136 | CLEAR_HANDLER = CommandHandler('clearlocals', clear, filters=CustomFilters.owner_filter, run_async=True)
137 | EXECHELP_HANDLER = CommandHandler(BotCommands.ExecHelpCommand, exechelp, filters=CustomFilters.owner_filter, run_async=True)
138 |
139 | dispatcher.add_handler(EVAL_HANDLER)
140 | dispatcher.add_handler(EXEC_HANDLER)
141 | dispatcher.add_handler(CLEAR_HANDLER)
142 | dispatcher.add_handler(EXECHELP_HANDLER)
143 |
--------------------------------------------------------------------------------
/bot/modules/leech_settings.py:
--------------------------------------------------------------------------------
1 | # Implement By - @anasty17 (https://github.com/SlamDevs/slam-mirrorbot/commit/d888a1e7237f4633c066f7c2bbfba030b83ad616)
2 | # (c) https://github.com/SlamDevs/slam-mirrorbot
3 | # All rights reserved
4 |
5 | import os
6 | import threading
7 |
8 | from PIL import Image
9 | from telegram.ext import CommandHandler, CallbackQueryHandler
10 | from telegram import InlineKeyboardMarkup
11 |
12 | from bot import AS_DOC_USERS, AS_MEDIA_USERS, dispatcher, AS_DOCUMENT, app, AUTO_DELETE_MESSAGE_DURATION
13 | from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup, auto_delete_message
14 | from bot.helper.telegram_helper.filters import CustomFilters
15 | from bot.helper.telegram_helper.bot_commands import BotCommands
16 | from bot.helper.telegram_helper import button_build
17 |
18 |
19 | def leechSet(update, context):
20 | user_id = update.message.from_user.id
21 | path = f"Thumbnails/{user_id}.jpg"
22 | msg = f"Leech Type for {user_id} user is "
23 | if (
24 | user_id in AS_DOC_USERS
25 | or user_id not in AS_MEDIA_USERS
26 | and AS_DOCUMENT
27 | ):
28 | msg += "DOCUMENT"
29 | else:
30 | msg += "MEDIA"
31 | msg += "\nCustom Thumbnail "
32 | msg += "exists" if os.path.exists(path) else "not exists"
33 | buttons = button_build.ButtonMaker()
34 | buttons.sbutton("As Document", f"doc {user_id}")
35 | buttons.sbutton("As Media", f"med {user_id}")
36 | buttons.sbutton("Delete Thumbnail", f"thumb {user_id}")
37 | if AUTO_DELETE_MESSAGE_DURATION == -1:
38 | buttons.sbutton("Close", f"closeset {user_id}")
39 | button = InlineKeyboardMarkup(buttons.build_menu(2))
40 | choose_msg = sendMarkup(msg, context.bot, update, button)
41 | threading.Thread(target=auto_delete_message, args=(context.bot, update.message, choose_msg)).start()
42 |
43 | def setLeechType(update, context):
44 | query = update.callback_query
45 | user_id = query.from_user.id
46 | data = query.data
47 | data = data.split(" ")
48 | if user_id != int(data[1]):
49 | query.answer(text="Not Yours!", show_alert=True)
50 | elif data[0] == "doc":
51 | if (
52 | user_id in AS_DOC_USERS
53 | or user_id not in AS_MEDIA_USERS
54 | and AS_DOCUMENT
55 | ):
56 | query.answer(text="Already As Document!", show_alert=True)
57 | elif user_id in AS_MEDIA_USERS:
58 | AS_MEDIA_USERS.remove(user_id)
59 | AS_DOC_USERS.add(user_id)
60 | query.answer(text="Done!", show_alert=True)
61 | else:
62 | AS_DOC_USERS.add(user_id)
63 | query.answer(text="Done!", show_alert=True)
64 | elif data[0] == "med":
65 | if user_id in AS_DOC_USERS:
66 | AS_DOC_USERS.remove(user_id)
67 | AS_MEDIA_USERS.add(user_id)
68 | query.answer(text="Done!", show_alert=True)
69 | elif user_id in AS_MEDIA_USERS or not AS_DOCUMENT:
70 | query.answer(text="Already As Media!", show_alert=True)
71 | else:
72 | AS_MEDIA_USERS.add(user_id)
73 | query.answer(text="Done!", show_alert=True)
74 | elif data[0] == "thumb":
75 | path = f"Thumbnails/{user_id}.jpg"
76 | if os.path.lexists(path):
77 | os.remove(path)
78 | query.answer(text="Done!", show_alert=True)
79 | else:
80 | query.answer(text="No Thumbnail To Delete!", show_alert=True)
81 | elif data[0] == "closeset":
82 | query.message.delete()
83 |
84 | def setThumb(update, context):
85 | user_id = update.message.from_user.id
86 | reply_to = update.message.reply_to_message
87 | if reply_to is not None and reply_to.photo:
88 | path = "Thumbnails"
89 | if not os.path.exists(path):
90 | os.mkdir(path)
91 | photo_msg = app.get_messages(update.message.chat.id, reply_to_message_ids=update.message.message_id)
92 | photo_dir = app.download_media(photo_msg, file_name=path)
93 | des_dir = os.path.join(path, str(user_id) + ".jpg")
94 | # Image.open(photo_dir).convert("RGB").save(photo_dir)
95 | img = Image.open(photo_dir)
96 | img.thumbnail((480, 320))
97 | # img.resize((480, 320))
98 | img.save(des_dir, "JPEG")
99 | os.remove(photo_dir)
100 | sendMessage(f"Custom thumbnail saved for {user_id}
user.", context.bot, update)
101 | else:
102 | sendMessage("Reply to a photo to save custom thumbnail.", context.bot, update)
103 |
104 | leech_set_handler = CommandHandler(BotCommands.LeechSetCommand, leechSet, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
105 | set_thumbnail_handler = CommandHandler(BotCommands.SetThumbCommand, setThumb, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
106 | as_doc_handler = CallbackQueryHandler(setLeechType, pattern="doc", run_async=True)
107 | as_media_handler = CallbackQueryHandler(setLeechType, pattern="med", run_async=True)
108 | del_thumb_handler = CallbackQueryHandler(setLeechType, pattern="thumb", run_async=True)
109 | close_set_handler = CallbackQueryHandler(setLeechType, pattern="closeset", run_async=True)
110 | dispatcher.add_handler(leech_set_handler)
111 | dispatcher.add_handler(as_doc_handler)
112 | dispatcher.add_handler(as_media_handler)
113 | dispatcher.add_handler(close_set_handler)
114 | dispatcher.add_handler(set_thumbnail_handler)
115 | dispatcher.add_handler(del_thumb_handler)
116 |
--------------------------------------------------------------------------------
/bot/modules/list.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 | from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
3 | from bot import LOGGER, dispatcher
4 | from bot.helper.telegram_helper.message_utils import sendMessage, editMessage
5 | from bot.helper.telegram_helper.filters import CustomFilters
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 |
8 |
9 | def list_drive(update, context):
10 | try:
11 | search = update.message.text.split(' ', maxsplit=1)[1]
12 | LOGGER.info(f"Searching: {search}")
13 | reply = sendMessage('Searching..... Please wait!', context.bot, update)
14 | gdrive = GoogleDriveHelper()
15 | msg, button = gdrive.drive_list(search)
16 |
17 | if button:
18 | editMessage(msg, reply, button)
19 | else:
20 | editMessage(f'No result found for {search}
', reply, button)
21 |
22 | except IndexError:
23 | sendMessage('Send a search key along with command', context.bot, update)
24 |
25 |
26 | list_handler = CommandHandler(BotCommands.ListCommand, list_drive, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
27 | dispatcher.add_handler(list_handler)
28 |
--------------------------------------------------------------------------------
/bot/modules/mirror_status.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 | from bot import dispatcher, status_reply_dict, status_reply_dict_lock, download_dict, download_dict_lock
3 | from bot.helper.telegram_helper.message_utils import *
4 | from telegram.error import BadRequest
5 | from bot.helper.telegram_helper.filters import CustomFilters
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 | import threading
8 |
9 |
10 | def mirror_status(update, context):
11 | with download_dict_lock:
12 | if len(download_dict) == 0:
13 | message = "No active downloads"
14 | reply_message = sendMessage(message, context.bot, update)
15 | threading.Thread(target=auto_delete_message, args=(bot, update.message, reply_message)).start()
16 | return
17 | index = update.effective_chat.id
18 | with status_reply_dict_lock:
19 | if index in status_reply_dict.keys():
20 | deleteMessage(bot, status_reply_dict[index])
21 | del status_reply_dict[index]
22 | sendStatusMessage(update, context.bot)
23 | deleteMessage(context.bot, update.message)
24 |
25 |
26 | mirror_status_handler = CommandHandler(BotCommands.StatusCommand, mirror_status,
27 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
28 | dispatcher.add_handler(mirror_status_handler)
29 |
--------------------------------------------------------------------------------
/bot/modules/shell.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | from bot import LOGGER, dispatcher
3 | from telegram import ParseMode
4 | from telegram.ext import CommandHandler
5 | from bot.helper.telegram_helper.filters import CustomFilters
6 | from bot.helper.telegram_helper.bot_commands import BotCommands
7 |
8 |
9 | def shell(update, context):
10 | message = update.effective_message
11 | cmd = message.text.split(' ', 1)
12 | if len(cmd) == 1:
13 | message.reply_text('No command to execute was given.')
14 | return
15 | cmd = cmd[1]
16 | process = subprocess.Popen(
17 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
18 | stdout, stderr = process.communicate()
19 | reply = ''
20 | stderr = stderr.decode()
21 | stdout = stdout.decode()
22 | if stdout:
23 | reply += f"*Stdout*\n`{stdout}`\n"
24 | LOGGER.info(f"Shell - {cmd} - {stdout}")
25 | if stderr:
26 | reply += f"*Stderr*\n`{stderr}`\n"
27 | LOGGER.error(f"Shell - {cmd} - {stderr}")
28 | if len(reply) > 3000:
29 | with open('shell_output.txt', 'w') as file:
30 | file.write(reply)
31 | with open('shell_output.txt', 'rb') as doc:
32 | context.bot.send_document(
33 | document=doc,
34 | filename=doc.name,
35 | reply_to_message_id=message.message_id,
36 | chat_id=message.chat_id)
37 | else:
38 | message.reply_text(reply, parse_mode=ParseMode.MARKDOWN)
39 |
40 |
41 | SHELL_HANDLER = CommandHandler(BotCommands.ShellCommand, shell,
42 | filters=CustomFilters.owner_filter, run_async=True)
43 | dispatcher.add_handler(SHELL_HANDLER)
44 |
--------------------------------------------------------------------------------
/bot/modules/speedtest.py:
--------------------------------------------------------------------------------
1 | from speedtest import Speedtest
2 | from bot.helper.telegram_helper.filters import CustomFilters
3 | from bot import dispatcher
4 | from bot.helper.telegram_helper.bot_commands import BotCommands
5 | from bot.helper.telegram_helper.message_utils import sendMessage, editMessage
6 | from telegram.ext import CommandHandler
7 |
8 |
9 | def speedtest(update, context):
10 | speed = sendMessage("Running Speed Test . . . ", context.bot, update)
11 | test = Speedtest()
12 | test.get_best_server()
13 | test.download()
14 | test.upload()
15 | test.results.share()
16 | result = test.results.dict()
17 | string_speed = f'''
18 | Server
19 | Name: {result['server']['name']}
20 | Country: {result['server']['country']}, {result['server']['cc']}
21 | Sponsor: {result['server']['sponsor']}
22 | ISP: {result['client']['isp']}
23 |
24 | SpeedTest Results
25 | Upload: {speed_convert(result['upload'] / 8)}
26 | Download: {speed_convert(result['download'] / 8)}
27 | Ping: {result['ping']} ms
28 | ISP Rating: {result['client']['isprating']}
29 | '''
30 | editMessage(string_speed, speed)
31 |
32 |
33 | def speed_convert(size):
34 | """Hi human, you can't read bytes?"""
35 | power = 2 ** 10
36 | zero = 0
37 | units = {0: "", 1: "Kb/s", 2: "MB/s", 3: "Gb/s", 4: "Tb/s"}
38 | while size > power:
39 | size /= power
40 | zero += 1
41 | return f"{round(size, 2)} {units[zero]}"
42 |
43 |
44 | SPEED_HANDLER = CommandHandler(BotCommands.SpeedCommand, speedtest,
45 | filters=CustomFilters.owner_filter | CustomFilters.authorized_user, run_async=True)
46 |
47 | dispatcher.add_handler(SPEED_HANDLER)
48 |
--------------------------------------------------------------------------------
/bot/modules/torrent_search.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import html
4 | import asyncio
5 | import aiohttp
6 | import json
7 | import feedparser
8 | import requests
9 | import itertools
10 |
11 | from telegram.ext import CommandHandler
12 | from telegram import ParseMode
13 |
14 | from urllib.parse import quote as urlencode, urlsplit
15 |
16 | from pyrogram import Client, filters, emoji
17 | from pyrogram.parser import html as pyrogram_html
18 | from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
19 | from pyrogram.handlers import MessageHandler, CallbackQueryHandler
20 |
21 | from bot import app, dispatcher, bot
22 | from bot.helper.ext_utils import custom_filters
23 | from bot.helper.telegram_helper.bot_commands import BotCommands
24 | from bot.helper.telegram_helper.filters import CustomFilters
25 | from bot.helper.telegram_helper.message_utils import sendMessage
26 |
27 | search_lock = asyncio.Lock()
28 | search_info = {False: dict(), True: dict()}
29 |
30 | async def return_search(query, page=1, sukebei=False):
31 | page -= 1
32 | query = query.lower().strip()
33 | used_search_info = search_info[sukebei]
34 | async with search_lock:
35 | results, get_time = used_search_info.get(query, (None, 0))
36 | if (time.time() - get_time) > 3600:
37 | results = []
38 | async with aiohttp.ClientSession() as session:
39 | async with session.get(f'https://{"sukebei." if sukebei else ""}nyaa.si/?page=rss&q={urlencode(query)}') as resp:
40 | d = feedparser.parse(await resp.text())
41 | text = ''
42 | a = 0
43 | parser = pyrogram_html.HTML(None)
44 | for i in sorted(d['entries'], key=lambda i: int(i['nyaa_seeders']), reverse=True):
45 | if i['nyaa_size'].startswith('0'):
46 | continue
47 | if not int(i['nyaa_seeders']):
48 | break
49 | link = i['link']
50 | splitted = urlsplit(link)
51 | if splitted.scheme == 'magnet' and splitted.query:
52 | link = f'{link}
'
53 | newtext = f'''{a + 1}. {html.escape(i["title"])}
54 | Link: {link}
55 | Size: {i["nyaa_size"]}
56 | Seeders: {i["nyaa_seeders"]}
57 | Leechers: {i["nyaa_leechers"]}
58 | Category: {i["nyaa_category"]}
\n\n'''
59 | futtext = text + newtext
60 | if (a and not a % 10) or len((await parser.parse(futtext))['message']) > 4096:
61 | results.append(text)
62 | futtext = newtext
63 | text = futtext
64 | a += 1
65 | results.append(text)
66 | ttl = time.time()
67 | used_search_info[query] = results, ttl
68 | try:
69 | return results[page], len(results), ttl
70 | except IndexError:
71 | return '', len(results), ttl
72 |
73 | message_info = {}
74 | ignore = set()
75 |
76 | @app.on_message(filters.command(['nyaasi', f'nyaasi@{bot.username}']))
77 | async def nyaa_search(client, message):
78 | text = message.text.split(' ')
79 | text.pop(0)
80 | query = ' '.join(text)
81 | await init_search(client, message, query, False)
82 |
83 | @app.on_message(filters.command(['sukebei', f'sukebei@{bot.username}']))
84 | async def nyaa_search_sukebei(client, message):
85 | text = message.text.split(' ')
86 | text.pop(0)
87 | query = ' '.join(text)
88 | await init_search(client, message, query, True)
89 |
90 | async def init_search(client, message, query, sukebei):
91 | result, pages, ttl = await return_search(query, sukebei=sukebei)
92 | if not result:
93 | await message.reply_text('No results found')
94 | else:
95 | buttons = [
96 | InlineKeyboardButton(f'1/{pages}', 'nyaa_nop'),
97 | InlineKeyboardButton('Next', 'nyaa_next'),
98 | ]
99 |
100 | if pages == 1:
101 | buttons.pop()
102 | reply = await message.reply_text(result, reply_markup=InlineKeyboardMarkup([
103 | buttons
104 | ]))
105 | message_info[(reply.chat.id, reply.message_id)] = message.from_user.id, ttl, query, 1, pages, sukebei
106 |
107 | @app.on_callback_query(custom_filters.callback_data('nyaa_nop'))
108 | async def nyaa_nop(client, callback_query):
109 | await callback_query.answer(cache_time=3600)
110 |
111 | callback_lock = asyncio.Lock()
112 | @app.on_callback_query(custom_filters.callback_data(['nyaa_back', 'nyaa_next']))
113 | async def nyaa_callback(client, callback_query):
114 | message = callback_query.message
115 | message_identifier = (message.chat.id, message.message_id)
116 | data = callback_query.data
117 | async with callback_lock:
118 | if message_identifier in ignore:
119 | await callback_query.answer()
120 | return
121 | user_id, ttl, query, current_page, pages, sukebei = message_info.get(message_identifier, (None, 0, None, 0, 0, None))
122 | og_current_page = current_page
123 | if data == 'nyaa_back':
124 | current_page -= 1
125 | elif data == 'nyaa_next':
126 | current_page += 1
127 | if current_page < 1:
128 | current_page = 1
129 | elif current_page > pages:
130 | current_page = pages
131 | ttl_ended = (time.time() - ttl) > 3600
132 | if ttl_ended:
133 | text = getattr(message.text, 'html', 'Search expired')
134 | else:
135 | if callback_query.from_user.id != user_id:
136 | await callback_query.answer('...no', cache_time=3600)
137 | return
138 | text, pages, ttl = await return_search(query, current_page, sukebei)
139 | buttons = [
140 | InlineKeyboardButton('Prev', 'nyaa_back'),
141 | InlineKeyboardButton(f'{current_page}/{pages}', 'nyaa_nop'),
142 | InlineKeyboardButton('Next', 'nyaa_next'),
143 | ]
144 |
145 | if ttl_ended:
146 | buttons = [InlineKeyboardButton('Search Expired', 'nyaa_nop')]
147 | else:
148 | if current_page == 1:
149 | buttons.pop(0)
150 | if current_page == pages:
151 | buttons.pop()
152 | if ttl_ended or current_page != og_current_page:
153 | await callback_query.edit_message_text(text, reply_markup=InlineKeyboardMarkup([
154 | buttons
155 | ]))
156 | message_info[message_identifier] = user_id, ttl, query, current_page, pages, sukebei
157 | if ttl_ended:
158 | ignore.add(message_identifier)
159 | await callback_query.answer()
160 |
161 | # Using upstream API based on: https://github.com/Ryuk-me/Torrents-Api
162 | # Implemented by https://github.com/jusidama18
163 |
164 | class TorrentSearch:
165 | index = 0
166 | query = None
167 | message = None
168 | response = None
169 | response_range = None
170 |
171 | RESULT_LIMIT = 4
172 | RESULT_STR = None
173 |
174 | def __init__(self, command: str, source: str, result_str: str):
175 | self.command = command
176 | self.source = source.rstrip('/')
177 | self.RESULT_STR = result_str
178 |
179 | app.add_handler(MessageHandler(self.find, filters.command([command, f'{self.command}@{bot.username}'])))
180 | app.add_handler(CallbackQueryHandler(self.previous, filters.regex(f"{self.command}_previous")))
181 | app.add_handler(CallbackQueryHandler(self.delete, filters.regex(f"{self.command}_delete")))
182 | app.add_handler(CallbackQueryHandler(self.next, filters.regex(f"{self.command}_next")))
183 |
184 | @staticmethod
185 | def format_magnet(string: str):
186 | if not string:
187 | return ""
188 | return string.split('&tr', 1)[0]
189 |
190 | def get_formatted_string(self, values):
191 | string = self.RESULT_STR.format(**values)
192 | extra = ""
193 | if "Files" in values:
194 | tmp_str = "➲[{Quality} - {Type} ({Size})]({Torrent}): `{magnet}`"
195 | extra += "\n".join(
196 | tmp_str.format(**f, magnet=self.format_magnet(f['Magnet']))
197 | for f in values['Files']
198 | )
199 | else:
200 | magnet = values.get('magnet', values.get('Magnet')) # Avoid updating source dict
201 | if magnet:
202 | extra += f"➲Magnet: `{self.format_magnet(magnet)}`"
203 | if (extra):
204 | string += "\n" + extra
205 | return string
206 |
207 | async def update_message(self):
208 | prevBtn = InlineKeyboardButton(
209 | 'Prev', callback_data=f"{self.command}_previous"
210 | )
211 |
212 | delBtn = InlineKeyboardButton(f"{emoji.CROSS_MARK}", callback_data=f"{self.command}_delete")
213 | nextBtn = InlineKeyboardButton('Next', callback_data=f"{self.command}_next")
214 |
215 | inline = []
216 | if (self.index != 0):
217 | inline.append(prevBtn)
218 | inline.append(delBtn)
219 | if (self.index != len(self.response_range) - 1):
220 | inline.append(nextBtn)
221 |
222 | res_lim = min(self.RESULT_LIMIT, len(self.response) - self.RESULT_LIMIT*self.index)
223 | result = f"**Page - {self.index+1}**\n\n"
224 | result += "\n\n=======================\n\n".join(
225 | self.get_formatted_string(self.response[self.response_range[self.index]+i])
226 | for i in range(res_lim)
227 | )
228 |
229 | await self.message.edit(
230 | result,
231 | reply_markup=InlineKeyboardMarkup([inline]),
232 | parse_mode="markdown",
233 | )
234 |
235 | async def find(self, client, message):
236 | if len(message.command) < 2:
237 | await message.reply_text(f"Usage: /{self.command} query")
238 | return
239 |
240 | query = urlencode(message.text.split(None, 1)[1])
241 | self.message = await message.reply_text("Searching")
242 | try:
243 | async with aiohttp.ClientSession() as session:
244 | async with session.get(f"{self.source}/{query}", timeout=15) as resp:
245 | if (resp.status != 200):
246 | raise Exception('unsuccessful request')
247 | result = await resp.json()
248 | if (result and isinstance(result[0], list)):
249 | result = list(itertools.chain(*result))
250 | self.response = result
251 | self.response_range = range(0, len(self.response), self.RESULT_LIMIT)
252 | except:
253 | await self.message.edit("No Results Found.")
254 | return
255 | await self.update_message()
256 |
257 | async def delete(self, client, message):
258 | index = 0
259 | query = None
260 | message = None
261 | response = None
262 | response_range = None
263 | await self.message.delete()
264 |
265 | async def previous(self, client, message):
266 | self.index -= 1
267 | await self.update_message()
268 |
269 | async def next(self, client, message):
270 | self.index += 1
271 | await self.update_message()
272 |
273 | RESULT_STR_1337 = (
274 | "➲Name: `{Name}`\n"
275 | "➲Size: {Size}\n"
276 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}"
277 | )
278 | RESULT_STR_PIRATEBAY = (
279 | "➲Name: `{Name}`\n"
280 | "➲Size: {Size}\n"
281 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}"
282 | )
283 | RESULT_STR_TGX = (
284 | "➲Name: `{Name}`\n"
285 | "➲Size: {Size}\n"
286 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}"
287 | )
288 | RESULT_STR_YTS = (
289 | "➲Name: `{Name}`\n"
290 | "➲Released on: {ReleasedDate}\n"
291 | "➲Genre: {Genre}\n"
292 | "➲Rating: {Rating}\n"
293 | "➲Likes: {Likes}\n"
294 | "➲Duration: {Runtime}\n"
295 | "➲Language: {Language}"
296 | )
297 | RESULT_STR_EZTV = (
298 | "➲Name: `{Name}`\n"
299 | "➲Size: {Size}\n"
300 | "➲Seeders: {Seeders}"
301 | )
302 | RESULT_STR_TORLOCK = (
303 | "➲Name: `{Name}`\n"
304 | "➲Size: {Size}\n"
305 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}"
306 | )
307 | RESULT_STR_RARBG = (
308 | "➲Name: `{Name}`\n"
309 | "➲Size: {Size}\n"
310 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}"
311 | )
312 | RESULT_STR_ALL = (
313 | "➲Name: `{Name}`\n"
314 | "➲Size: {Size}\n"
315 | "➲Seeders: {Seeders} || ➲Leechers: {Leechers}"
316 | )
317 |
318 | TORRENT_API = 'https://api.linkstore.eu.org/api'
319 |
320 | torrents_dict = {
321 | '1337x': {'source': f"{TORRENT_API}/1337x/", 'result_str': RESULT_STR_1337},
322 | 'piratebay': {'source': f"{TORRENT_API}/piratebay/", 'result_str': RESULT_STR_PIRATEBAY},
323 | 'tgx': {'source': f"{TORRENT_API}/tgx/", 'result_str': RESULT_STR_TGX},
324 | 'yts': {'source': f"{TORRENT_API}/yts/", 'result_str': RESULT_STR_YTS},
325 | 'eztv': {'source': f"{TORRENT_API}/eztv/", 'result_str': RESULT_STR_EZTV},
326 | 'torlock': {'source': f"{TORRENT_API}/torlock/", 'result_str': RESULT_STR_TORLOCK},
327 | 'rarbg': {'source': f"{TORRENT_API}/rarbg/", 'result_str': RESULT_STR_RARBG},
328 | 'ts': {'source': f"{TORRENT_API}/all/", 'result_str': RESULT_STR_ALL}
329 | }
330 |
331 | torrent_handlers = [
332 | TorrentSearch(command, value['source'], value['result_str'])
333 | for command, value in torrents_dict.items()
334 | ]
335 |
336 | def searchhelp(update, context):
337 | help_string = '''
338 | Torrent Search
339 | • /nyaasi [search query]
340 | • /sukebei [search query]
341 | • /1337x [search query]
342 | • /piratebay [search query]
343 | • /tgx [search query]
344 | • /yts [search query]
345 | • /eztv [search query]
346 | • /torlock [search query]
347 | • /rarbg [search query]
348 | • /ts [search query]
349 | '''
350 | sendMessage(help_string, context.bot, update)
351 |
352 |
353 | SEARCHHELP_HANDLER = CommandHandler(BotCommands.TsHelpCommand, searchhelp, filters=(CustomFilters.authorized_chat | CustomFilters.authorized_user) & CustomFilters.mirror_owner_filter, run_async=True)
354 | dispatcher.add_handler(SEARCHHELP_HANDLER)
355 |
--------------------------------------------------------------------------------
/bot/modules/watch.py:
--------------------------------------------------------------------------------
1 | from telegram.ext import CommandHandler
2 | from telegram import Bot, Update
3 | from bot import DOWNLOAD_DIR, dispatcher, LOGGER
4 | from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage
5 | from .mirror import MirrorListener
6 | from bot.helper.mirror_utils.download_utils.youtube_dl_download_helper import YoutubeDLHelper
7 | from bot.helper.telegram_helper.bot_commands import BotCommands
8 | from bot.helper.telegram_helper.filters import CustomFilters
9 | import threading
10 |
11 |
12 | def _watch(bot: Bot, update, isTar=False, isZip=False, isLeech=False):
13 | mssg = update.message.text
14 | message_args = mssg.split(' ')
15 | name_args = mssg.split('|')
16 |
17 | try:
18 | link = message_args[1]
19 | except IndexError:
20 | msg = f"/{BotCommands.WatchCommand} [youtube-dl supported link] [quality] |[CustomName] to mirror with youtube-dl.\n\n"
21 | msg += "Note: Quality and custom name are optional\n\nExample of quality: audio, 144, 240, 360, 480, 720, 1080, 2160."
22 | msg += "\n\nIf you want to use custom filename, enter it after |"
23 | msg += f"\n\nExample:\n/{BotCommands.WatchCommand} https://youtu.be/Pk_TthHfLeE 720 |Slam
\n\n"
24 | msg += "This file will be downloaded in 720p quality and it's name will be Slam"
25 | sendMessage(msg, bot, update)
26 | return
27 |
28 | try:
29 | if "|" in mssg:
30 | mssg = mssg.split("|")
31 | qual = mssg[0].split(" ")[2]
32 | if qual == "":
33 | raise IndexError
34 | else:
35 | qual = message_args[2]
36 | if qual != "audio":
37 | qual = f'bestvideo[height<={qual}]+bestaudio/best[height<={qual}]'
38 | except IndexError:
39 | qual = "bestvideo+bestaudio/best"
40 |
41 | try:
42 | name = name_args[1]
43 | except IndexError:
44 | name = ""
45 |
46 | pswd = ""
47 | listener = MirrorListener(bot, update, pswd, isTar, isZip=isZip, isLeech=isLeech)
48 | ydl = YoutubeDLHelper(listener)
49 | threading.Thread(target=ydl.add_download,args=(link, f'{DOWNLOAD_DIR}{listener.uid}', qual, name)).start()
50 | sendStatusMessage(update, bot)
51 |
52 | def watch(update, context):
53 | _watch(context.bot, update)
54 |
55 | def watchTar(update, context):
56 | _watch(context.bot, update, True)
57 |
58 | def watchZip(update, context):
59 | _watch(context.bot, update, True, True)
60 |
61 | def leechWatch(update, context):
62 | _watch(context.bot, update, isLeech=True)
63 |
64 | def leechWatchTar(update, context):
65 | _watch(context.bot, update, True, isLeech=True)
66 |
67 | def leechWatchZip(update, context):
68 | _watch(context.bot, update, True, True, True)
69 |
70 | watch_handler = CommandHandler(BotCommands.WatchCommand, watch,
71 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
72 | tar_watch_handler = CommandHandler(BotCommands.TarWatchCommand, watchTar,
73 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
74 | zip_watch_handler = CommandHandler(BotCommands.ZipWatchCommand, watchZip,
75 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
76 | leech_watch_handler = CommandHandler(BotCommands.LeechWatchCommand, leechWatch,
77 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
78 | leech_tar_watch_handler = CommandHandler(BotCommands.LeechTarWatchCommand, leechWatchTar,
79 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
80 | leech_zip_watch_handler = CommandHandler(BotCommands.LeechZipWatchCommand, leechWatchZip,
81 | filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
82 |
83 | dispatcher.add_handler(watch_handler)
84 | dispatcher.add_handler(tar_watch_handler)
85 | dispatcher.add_handler(zip_watch_handler)
86 | dispatcher.add_handler(leech_watch_handler)
87 | dispatcher.add_handler(leech_tar_watch_handler)
88 | dispatcher.add_handler(leech_zip_watch_handler)
89 |
--------------------------------------------------------------------------------
/captain-definition:
--------------------------------------------------------------------------------
1 | {
2 | "schemaVersion": 2,
3 | "dockerfilePath": "./Dockerfile"
4 | }
5 |
--------------------------------------------------------------------------------
/config_sample.env:
--------------------------------------------------------------------------------
1 | # Remove this line before deploying
2 | _____REMOVE_THIS_LINE_____=True
3 |
4 | # Please read this for filling instructions: https://github.com/SlamDevs/slam-mirrorbot#setting-up-config-file
5 |
6 | # REQUIRED CONFIG
7 | BOT_TOKEN = ""
8 | GDRIVE_FOLDER_ID = ""
9 | OWNER_ID =
10 | DOWNLOAD_DIR = "/usr/src/app/downloads"
11 | DOWNLOAD_STATUS_UPDATE_INTERVAL = 8
12 | AUTO_DELETE_MESSAGE_DURATION = 20
13 | IS_TEAM_DRIVE = ""
14 | TELEGRAM_API =
15 | TELEGRAM_HASH = ""
16 | # OPTIONAL CONFIG
17 | DATABASE_URL = ""
18 | AUTHORIZED_CHATS = "" # Split by space
19 | SUDO_USERS = "" # Split by space
20 | IGNORE_PENDING_REQUESTS = ""
21 | USE_SERVICE_ACCOUNTS = ""
22 | INDEX_URL = ""
23 | STATUS_LIMIT = "" # Recommended limit is 4
24 | # Leech
25 | TG_SPLIT_SIZE = "" # leave it empty for max size (2GB)
26 | AS_DOCUMENT = ""
27 | UPTOBOX_TOKEN = ""
28 | MEGA_API_KEY = ""
29 | MEGA_EMAIL_ID = ""
30 | MEGA_PASSWORD = ""
31 | BLOCK_MEGA_FOLDER = ""
32 | BLOCK_MEGA_LINKS = ""
33 | STOP_DUPLICATE = ""
34 | RECURSIVE_SEARCH = "" # Fill True or False to search in sub-folders or not (only for shared-drive ID or root, you can't use it with folder ID)
35 | SHORTENER = ""
36 | SHORTENER_API = ""
37 | # qBittorrent
38 | IS_VPS = "" # Don't set this to True even if you're using VPS, unless facing error with web server
39 | SERVER_PORT = "80" # Only For VPS even if IS_VPS is False
40 | BASE_URL_OF_BOT = "" # Web Link, Required for Heroku to avoid sleep or use worker if you don't want to use web (selection)
41 | # If you want to use Credentials externally from Index Links, fill these vars with the direct links
42 | # These are optional, if you don't know about them, simply leave them empty
43 | ACCOUNTS_ZIP_URL = ""
44 | TOKEN_PICKLE_URL = ""
45 | MULTI_SEARCH_URL = "" # You can use gist raw link (remove commit id from the link, like config raw link check Heroku guide)
46 | # To use limit leave space between number and unit. Available units are (gb or GB, tb or TB)
47 | TORRENT_DIRECT_LIMIT = ""
48 | TAR_UNZIP_LIMIT = ""
49 | CLONE_LIMIT = ""
50 | MEGA_LIMIT = ""
51 | # View Link button to open file Index Link in browser instead of direct download link
52 | # You can figure out if it's compatible with your Index code or not, open any video from you Index and check if its URL ends with ?a=view, if yes make it True it will work (Compatible with Bhadoo Drive Index)
53 | VIEW_LINK = ""
54 | # Add more buttons (Three buttons are already added including Drive Link, Index Link, and View Link, you can add extra buttons too, these are optional)
55 | # If you don't know what are below entries, simply leave them empty
56 | BUTTON_FOUR_NAME = ""
57 | BUTTON_FOUR_URL = ""
58 | BUTTON_FIVE_NAME = ""
59 | BUTTON_FIVE_URL = ""
60 | BUTTON_SIX_NAME = ""
61 | BUTTON_SIX_URL = ""
62 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 |
3 | services:
4 | slam:
5 | build: .
6 | command: bash start.sh
7 | restart: on-failure
8 | ports:
9 | - "80:80"
10 |
--------------------------------------------------------------------------------
/driveid.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | print("\n\n"\
4 | " Bot can search files recursively, but you have to add the list of drives you want to search.\n"\
5 | " Use the following format: (You can use 'root' in the ID in case you wan to use main drive.)\n"\
6 | " teamdrive NAME --> anything that you likes\n"\
7 | " teamdrive ID --> id of teamdrives in which you likes to search ('root' for main drive)\n"\
8 | " teamdrive INDEX URL --> enter index url for this drive.\n" \
9 | " go to the respective drive and copy the url from address bar\n")
10 | msg = ''
11 | if os.path.exists('drive_folder'):
12 | with open('drive_folder', 'r+') as f:
13 | lines = f.read()
14 | if not re.match(r'^\s*$', lines):
15 | print(lines)
16 | print("\n\n"\
17 | " DO YOU WISH TO KEEP THE ABOVE DETAILS THAT YOU PREVIOUSLY ADDED???? ENTER (y/n)\n"\
18 | " IF NOTHING SHOWS ENTER n")
19 | while 1:
20 | choice = input()
21 | if choice in ['y', 'Y']:
22 | msg = f'{lines}'
23 | break
24 | elif choice in ['n', 'N']:
25 | break
26 | else:
27 | print("\n\n DO YOU WISH TO KEEP THE ABOVE DETAILS ???? y/n <=== this is option ..... OPEN YOUR EYES & READ...")
28 | num = int(input(" How Many Drive/Folder You Likes To Add : "))
29 | for count in range(1, num + 1):
30 | print(f"\n > DRIVE - {count}\n")
31 | name = input(" Enter Drive NAME (anything) : ")
32 | id = input(" Enter Drive ID : ")
33 | index = input(" Enter Drive INDEX URL (optional) : ")
34 | if not name or not id:
35 | print("\n\n ERROR : Dont leave the name/id without filling.")
36 | exit(1)
37 | name=name.replace(" ", "_")
38 | if index:
39 | if index[-1] == "/":
40 | index = index[:-1]
41 | else:
42 | index = ''
43 | msg += f"{name} {id} {index}\n"
44 | with open('drive_folder', 'w') as file:
45 | file.truncate(0)
46 | file.write(msg)
47 | print("\n\n Done!")
48 |
--------------------------------------------------------------------------------
/extract:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -lt 1 ]; then
4 | echo "Usage: $(basename $0) FILES"
5 | exit 1
6 | fi
7 |
8 | extract() {
9 | arg="$1"
10 | cd "$(dirname "$arg")" || exit
11 | case "$arg" in
12 | *.tar.bz2)
13 | tar xjf "$arg" --one-top-level
14 | local code=$?
15 | ;;
16 | *.tar.gz)
17 | tar xzf "$arg" --one-top-level
18 | local code=$?
19 | ;;
20 | *.bz2)
21 | bunzip2 "$arg"
22 | local code=$?
23 | ;;
24 | *.gz)
25 | gunzip "$arg"
26 | local code=$?
27 | ;;
28 | *.tar)
29 | tar xf "$arg" --one-top-level
30 | local code=$?
31 | ;;
32 | *.tbz2)
33 | (tar xjf "$arg" --one-top-level)
34 | local code=$?
35 | ;;
36 | *.tgz)
37 | tar xzf "$arg" --one-top-level
38 | local code=$?
39 | ;;
40 | *.tar.xz)
41 | a_dir=$(expr "$arg" : '\(.*\).tar.xz')
42 | 7z x "$arg" -o"$a_dir"
43 | local code=$?
44 | ;;
45 | *.zip)
46 | a_dir=$(expr "$arg" : '\(.*\).zip')
47 | 7z x "$arg" -o"$a_dir"
48 | local code=$?
49 | ;;
50 | *.7z)
51 | a_dir=$(expr "$arg" : '\(.*\).7z')
52 | 7z x "$arg" -o"$a_dir"
53 | local code=$?
54 | ;;
55 | *.Z)
56 | uncompress "$arg"
57 | local code=$?
58 | ;;
59 | *.rar)
60 | a_dir=$(expr "$arg" : '\(.*\).rar')
61 | mkdir "$a_dir"
62 | 7z x "$arg" -o"$a_dir"
63 | local code=$?
64 | ;;
65 | *.iso)
66 | a_dir=$(expr "$arg" : '\(.*\).iso')
67 | 7z x "$arg" -o"$a_dir"
68 | local code=$?
69 | ;;
70 | *.wim)
71 | a_dir=$(expr "$arg" : '\(.*\).wim')
72 | 7z x "$arg" -o"$a_dir"
73 | local code=$?
74 | ;;
75 | *.cab)
76 | a_dir=$(expr "$arg" : '\(.*\).cab')
77 | 7z x "$arg" -o"$a_dir"
78 | local code=$?
79 | ;;
80 | *.apm)
81 | a_dir=$(expr "$arg" : '\(.*\).apm')
82 | 7z x "$arg" -o"$a_dir"
83 | local code=$?
84 | ;;
85 | *.arj)
86 | a_dir=$(expr "$arg" : '\(.*\).arj')
87 | 7z x "$arg" -o"$a_dir"
88 | local code=$?
89 | ;;
90 | *.chm)
91 | a_dir=$(expr "$arg" : '\(.*\).chm')
92 | 7z x "$arg" -o"$a_dir"
93 | local code=$?
94 | ;;
95 | *.cpio)
96 | a_dir=$(expr "$arg" : '\(.*\).cpio')
97 | 7z x "$arg" -o"$a_dir"
98 | local code=$?
99 | ;;
100 | *.cramfs)
101 | a_dir=$(expr "$arg" : '\(.*\).cramfs')
102 | 7z x "$arg" -o"$a_dir"
103 | local code=$?
104 | ;;
105 | *.deb)
106 | a_dir=$(expr "$arg" : '\(.*\).deb')
107 | 7z x "$arg" -o"$a_dir"
108 | local code=$?
109 | ;;
110 | *.dmg)
111 | a_dir=$(expr "$arg" : '\(.*\).dmg')
112 | 7z x "$arg" -o"$a_dir"
113 | local code=$?
114 | ;;
115 | *.fat)
116 | a_dir=$(expr "$arg" : '\(.*\).fat')
117 | 7z x "$arg" -o"$a_dir"
118 | local code=$?
119 | ;;
120 | *.hfs)
121 | a_dir=$(expr "$arg" : '\(.*\).hfs')
122 | 7z x "$arg" -o"$a_dir"
123 | local code=$?
124 | ;;
125 | *.lzh)
126 | a_dir=$(expr "$arg" : '\(.*\).lzh')
127 | 7z x "$arg" -o"$a_dir"
128 | local code=$?
129 | ;;
130 | *.lzma)
131 | a_dir=$(expr "$arg" : '\(.*\).lzma')
132 | 7z x "$arg" -o"$a_dir"
133 | local code=$?
134 | ;;
135 | *.lzma2)
136 | a_dir=$(expr "$arg" : '\(.*\).lzma2')
137 | 7z x "$arg" -o"$a_dir"
138 | local code=$?
139 | ;;
140 | *.mbr)
141 | a_dir=$(expr "$arg" : '\(.*\).mbr')
142 | 7z x "$arg" -o"$a_dir"
143 | local code=$?
144 | ;;
145 | *.msi)
146 | a_dir=$(expr "$arg" : '\(.*\).msi')
147 | 7z x "$arg" -o"$a_dir"
148 | local code=$?
149 | ;;
150 | *.mslz)
151 | a_dir=$(expr "$arg" : '\(.*\).mslz')
152 | 7z x "$arg" -o"$a_dir"
153 | local code=$?
154 | ;;
155 | *.nsis)
156 | a_dir=$(expr "$arg" : '\(.*\).nsis')
157 | 7z x "$arg" -o"$a_dir"
158 | local code=$?
159 | ;;
160 | *.ntfs)
161 | a_dir=$(expr "$arg" : '\(.*\).ntfs')
162 | 7z x "$arg" -o"$a_dir"
163 | local code=$?
164 | ;;
165 | *.rpm)
166 | a_dir=$(expr "$arg" : '\(.*\).rpm')
167 | 7z x "$arg" -o"$a_dir"
168 | local code=$?
169 | ;;
170 | *.squashfs)
171 | a_dir=$(expr "$arg" : '\(.*\).squashfs')
172 | 7z x "$arg" -o"$a_dir"
173 | local code=$?
174 | ;;
175 | *.udf)
176 | a_dir=$(expr "$arg" : '\(.*\).udf')
177 | 7z x "$arg" -o"$a_dir"
178 | local code=$?
179 | ;;
180 | *.vhd)
181 | a_dir=$(expr "$arg" : '\(.*\).vhd')
182 | 7z x "$arg" -o"$a_dir"
183 | local code=$?
184 | ;;
185 | *.xar)
186 | a_dir=$(expr "$arg" : '\(.*\).xar')
187 | 7z x "$arg" -o"$a_dir"
188 | local code=$?
189 | ;;
190 | *)
191 | echo "'$arg' cannot be extracted via extract()" 1>&2
192 | exit 1
193 | ;;
194 | esac
195 | cd - || exit $?
196 | exit $code
197 | }
198 |
199 | extract "$1"
200 |
--------------------------------------------------------------------------------
/generate_drive_token.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import os
3 | from google_auth_oauthlib.flow import InstalledAppFlow
4 | from google.auth.transport.requests import Request
5 |
6 | credentials = None
7 | __G_DRIVE_TOKEN_FILE = "token.pickle"
8 | __OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"]
9 | if os.path.exists(__G_DRIVE_TOKEN_FILE):
10 | with open(__G_DRIVE_TOKEN_FILE, 'rb') as f:
11 | credentials = pickle.load(f)
12 | if (
13 | (credentials is None or not credentials.valid)
14 | and credentials
15 | and credentials.expired
16 | and credentials.refresh_token
17 | ):
18 | credentials.refresh(Request())
19 | else:
20 | flow = InstalledAppFlow.from_client_secrets_file(
21 | 'credentials.json', __OAUTH_SCOPE)
22 | credentials = flow.run_console(port=0)
23 |
24 | # Save the credentials for the next run
25 | with open(__G_DRIVE_TOKEN_FILE, 'wb') as token:
26 | pickle.dump(credentials, token)
--------------------------------------------------------------------------------
/heroku.yml:
--------------------------------------------------------------------------------
1 | build:
2 | docker:
3 | web: Dockerfile
4 | run:
5 | web: bash start.sh
6 |
--------------------------------------------------------------------------------
/nodes.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # (c) YashDK [yash-dk@github]
3 |
4 | from anytree import NodeMixin, RenderTree, PreOrderIter
5 | import qbittorrentapi as qba
6 |
7 | SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
8 |
9 | class TorNode(NodeMixin):
10 | def __init__(self, name, is_folder=False, is_file=False, parent=None, progress=None, size=None, priority=None, file_id=None):
11 | super().__init__()
12 | self.name = name
13 | self.is_folder = is_folder
14 | self.is_file = is_file
15 |
16 | if parent is not None:
17 | self.parent = parent
18 | if progress is not None:
19 | self.progress = progress
20 | if size is not None:
21 | self.size = size
22 | if priority is not None:
23 | self.priority = priority
24 | if file_id is not None:
25 | self.file_id = file_id
26 |
27 |
28 | def get_folders(path):
29 | path_seperator = "/"
30 | return path.split(path_seperator)
31 |
32 |
33 | def make_tree(res):
34 | """This function takes the list of all the torrent files. The files are name hierarchically.
35 | Felt a need to document to save time.
36 |
37 | Args:
38 | res (list): Torrent files list.
39 |
40 | Returns:
41 | TorNode: Parent node of the tree constructed and can be used further.
42 | """
43 | parent = TorNode("Torrent")
44 | for l, i in enumerate(res):
45 | # Get the hierarchy of the folders by splitting based on '/'
46 | folders = get_folders(i.name)
47 | # Check if the file is alone for if its in folder
48 | if len(folders) > 1:
49 | # Enter here if in folder
50 |
51 | # Set the parent
52 | previous_node = parent
53 |
54 | # Traverse till second last assuming the last is a file.
55 | for j in range(len(folders)-1):
56 | current_node = None
57 |
58 | # As we are traversing the folder from top to bottom we are searching
59 | # the first folder (folders list) under the parent node in first iteration.
60 | # If the node is found then it becomes the current node else the current node
61 | # is left None.
62 | for k in previous_node.children:
63 | if k.name == folders[j]:
64 | current_node = k
65 | break
66 | # if the node is not found then create the folder node
67 | # if the node is found then use it as base for the next
68 | if current_node is None:
69 | previous_node = TorNode(folders[j],parent=previous_node,is_folder=True)
70 | else:
71 | previous_node = current_node
72 | # at this point the previous_node will contain the deepest folder in it so add the file to it
73 | TorNode(folders[-1],is_file=True,parent=previous_node,progress=i.progress,size=i.size,priority=i.priority,file_id=l)
74 | else:
75 | # at the file to the parent if no folders are there
76 | TorNode(folders[-1],is_file=True,parent=parent,progress=i.progress,size=i.size,priority=i.priority,file_id=l)
77 | return parent
78 |
79 |
80 | def print_tree(parent):
81 | for pre, _, node in RenderTree(parent):
82 | treestr = u"%s%s" % (pre, node.name)
83 | print(treestr.ljust(8), node.is_folder, node.is_file)
84 |
85 |
86 | def create_list(par, msg):
87 | if par.name != ".unwanted":
88 | msg[0] += ""
109 |
110 | def get_readable_file_size(size_in_bytes) -> str:
111 | if size_in_bytes is None:
112 | return '0B'
113 | index = 0
114 | while size_in_bytes >= 1024:
115 | size_in_bytes /= 1024
116 | index += 1
117 | try:
118 | return f'{round(size_in_bytes, 2)}{SIZE_UNITS[index]}'
119 | except IndexError:
120 | return 'File too large'
121 |
--------------------------------------------------------------------------------
/pextract:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ $# -lt 1 ]; then
4 | echo "Usage: $(basename $0) FILES"
5 | exit 1
6 | fi
7 |
8 | extract() {
9 | arg="$1"
10 | pswd="$2"
11 | cd "$(dirname "$arg")" || exit
12 | case "$arg" in
13 | *.tar.bz2)
14 | tar xjf "$arg" --one-top-level
15 | local code=$?
16 | ;;
17 | *.tar.gz)
18 | tar xzf "$arg" --one-top-level
19 | local code=$?
20 | ;;
21 | *.bz2)
22 | bunzip2 "$arg"
23 | local code=$?
24 | ;;
25 | *.gz)
26 | gunzip "$arg"
27 | local code=$?
28 | ;;
29 | *.tar)
30 | tar xf "$arg" --one-top-level
31 | local code=$?
32 | ;;
33 | *.tbz2)
34 | (tar xjf "$arg" --one-top-level)
35 | local code=$?
36 | ;;
37 | *.tgz)
38 | tar xzf "$arg" --one-top-level
39 | local code=$?
40 | ;;
41 | *.tar.xz)
42 | a_dir=$(expr "$arg" : '\(.*\).tar.xz')
43 | 7z x "$arg" -o"$a_dir" -p"$pswd"
44 | local code=$?
45 | ;;
46 | *.zip)
47 | a_dir=$(expr "$arg" : '\(.*\).zip')
48 | 7z x "$arg" -o"$a_dir" -p"$pswd"
49 | local code=$?
50 | ;;
51 | *.7z)
52 | a_dir=$(expr "$arg" : '\(.*\).7z')
53 | 7z x "$arg" -o"$a_dir" -p"$pswd"
54 | local code=$?
55 | ;;
56 | *.Z)
57 | uncompress "$arg"
58 | local code=$?
59 | ;;
60 | *.rar)
61 | a_dir=$(expr "$arg" : '\(.*\).rar')
62 | mkdir "$a_dir"
63 | 7z x "$arg" -o"$a_dir" -p"$pswd"
64 | local code=$?
65 | ;;
66 | *.iso)
67 | a_dir=$(expr "$arg" : '\(.*\).iso')
68 | 7z x "$arg" -o"$a_dir" -p"$pswd"
69 | local code=$?
70 | ;;
71 | *.wim)
72 | a_dir=$(expr "$arg" : '\(.*\).wim')
73 | 7z x "$arg" -o"$a_dir" -p"$pswd"
74 | local code=$?
75 | ;;
76 | *.cab)
77 | a_dir=$(expr "$arg" : '\(.*\).cab')
78 | 7z x "$arg" -o"$a_dir" -p"$pswd"
79 | local code=$?
80 | ;;
81 | *.apm)
82 | a_dir=$(expr "$arg" : '\(.*\).apm')
83 | 7z x "$arg" -o"$a_dir" -p"$pswd"
84 | local code=$?
85 | ;;
86 | *.arj)
87 | a_dir=$(expr "$arg" : '\(.*\).arj')
88 | 7z x "$arg" -o"$a_dir" -p"$pswd"
89 | local code=$?
90 | ;;
91 | *.chm)
92 | a_dir=$(expr "$arg" : '\(.*\).chm')
93 | 7z x "$arg" -o"$a_dir" -p"$pswd"
94 | local code=$?
95 | ;;
96 | *.cpio)
97 | a_dir=$(expr "$arg" : '\(.*\).cpio')
98 | 7z x "$arg" -o"$a_dir" -p"$pswd"
99 | local code=$?
100 | ;;
101 | *.cramfs)
102 | a_dir=$(expr "$arg" : '\(.*\).cramfs')
103 | 7z x "$arg" -o"$a_dir" -p"$pswd"
104 | local code=$?
105 | ;;
106 | *.deb)
107 | a_dir=$(expr "$arg" : '\(.*\).deb')
108 | 7z x "$arg" -o"$a_dir" -p"$pswd"
109 | local code=$?
110 | ;;
111 | *.dmg)
112 | a_dir=$(expr "$arg" : '\(.*\).dmg')
113 | 7z x "$arg" -o"$a_dir" -p"$pswd"
114 | local code=$?
115 | ;;
116 | *.fat)
117 | a_dir=$(expr "$arg" : '\(.*\).fat')
118 | 7z x "$arg" -o"$a_dir" -p"$pswd"
119 | local code=$?
120 | ;;
121 | *.hfs)
122 | a_dir=$(expr "$arg" : '\(.*\).hfs')
123 | 7z x "$arg" -o"$a_dir" -p"$pswd"
124 | local code=$?
125 | ;;
126 | *.lzh)
127 | a_dir=$(expr "$arg" : '\(.*\).lzh')
128 | 7z x "$arg" -o"$a_dir" -p"$pswd"
129 | local code=$?
130 | ;;
131 | *.lzma)
132 | a_dir=$(expr "$arg" : '\(.*\).lzma')
133 | 7z x "$arg" -o"$a_dir" -p"$pswd"
134 | local code=$?
135 | ;;
136 | *.lzma2)
137 | a_dir=$(expr "$arg" : '\(.*\).lzma2')
138 | 7z x "$arg" -o"$a_dir" -p"$pswd"
139 | local code=$?
140 | ;;
141 | *.mbr)
142 | a_dir=$(expr "$arg" : '\(.*\).mbr')
143 | 7z x "$arg" -o"$a_dir" -p"$pswd"
144 | local code=$?
145 | ;;
146 | *.msi)
147 | a_dir=$(expr "$arg" : '\(.*\).msi')
148 | 7z x "$arg" -o"$a_dir" -p"$pswd"
149 | local code=$?
150 | ;;
151 | *.mslz)
152 | a_dir=$(expr "$arg" : '\(.*\).mslz')
153 | 7z x "$arg" -o"$a_dir" -p"$pswd"
154 | local code=$?
155 | ;;
156 | *.nsis)
157 | a_dir=$(expr "$arg" : '\(.*\).nsis')
158 | 7z x "$arg" -o"$a_dir" -p"$pswd"
159 | local code=$?
160 | ;;
161 | *.ntfs)
162 | a_dir=$(expr "$arg" : '\(.*\).ntfs')
163 | 7z x "$arg" -o"$a_dir" -p"$pswd"
164 | local code=$?
165 | ;;
166 | *.rpm)
167 | a_dir=$(expr "$arg" : '\(.*\).rpm')
168 | 7z x "$arg" -o"$a_dir" -p"$pswd"
169 | local code=$?
170 | ;;
171 | *.squashfs)
172 | a_dir=$(expr "$arg" : '\(.*\).squashfs')
173 | 7z x "$arg" -o"$a_dir" -p"$pswd"
174 | local code=$?
175 | ;;
176 | *.udf)
177 | a_dir=$(expr "$arg" : '\(.*\).udf')
178 | 7z x "$arg" -o"$a_dir" -p"$pswd"
179 | local code=$?
180 | ;;
181 | *.vhd)
182 | a_dir=$(expr "$arg" : '\(.*\).vhd')
183 | 7z x "$arg" -o"$a_dir" -p"$pswd"
184 | local code=$?
185 | ;;
186 | *.xar)
187 | a_dir=$(expr "$arg" : '\(.*\).xar')
188 | 7z x "$arg" -o"$a_dir" -p"$pswd"
189 | local code=$?
190 | ;;
191 | *)
192 | echo "'$arg' cannot be extracted via extract()" 1>&2
193 | exit 1
194 | ;;
195 | esac
196 | cd - || exit $?
197 | exit $code
198 | }
199 |
200 | extract "$1" "$2"
201 |
--------------------------------------------------------------------------------
/qBittorrent.conf:
--------------------------------------------------------------------------------
1 | [AutoRun]
2 | enabled=true
3 | program=
4 |
5 | [LegalNotice]
6 | Accepted=true
7 |
8 | [BitTorrent]
9 | Session\AsyncIOThreadsCount=8
10 | Session\SlowTorrentsDownloadRate=100
11 | Session\SlowTorrentsInactivityTimer=600
12 |
13 | [Preferences]
14 | Advanced\AnnounceToAllTrackers=true
15 | Advanced\AnonymousMode=false
16 | Advanced\IgnoreLimitsLAN=true
17 | Advanced\RecheckOnCompletion=true
18 | Advanced\LtTrackerExchange=true
19 | Bittorrent\MaxConnecs=3000
20 | Bittorrent\MaxConnecsPerTorrent=500
21 | Bittorrent\DHT=true
22 | Bittorrent\DHTPort=6881
23 | Bittorrent\PeX=true
24 | Bittorrent\LSD=true
25 | Bittorrent\sameDHTPortAsBT=true
26 | Downloads\DiskWriteCacheSize=32
27 | Downloads\PreAllocation=true
28 | Downloads\UseIncompleteExtension=true
29 | General\PreventFromSuspendWhenDownloading=true
30 | Queueing\IgnoreSlowTorrents=true
31 | Queueing\MaxActiveDownloads=15
32 | Queueing\MaxActiveTorrents=50
33 | Queueing\QueueingEnabled=false
34 | WebUI\Enabled=true
35 | WebUI\Port=8090
36 |
--------------------------------------------------------------------------------
/requirements-cli.txt:
--------------------------------------------------------------------------------
1 | oauth2client
2 | google-api-python-client<=2.70.0
3 | progress
4 | progressbar2
5 | httplib2shim
6 | google_auth_oauthlib
7 | pyrogram==1.4.16
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp<=3.8.3
2 | anytree<=2.8.0
3 | aria2p<=0.11.3
4 | appdirs<=1.4.4
5 | attrdict<=2.0.1
6 | beautifulsoup4<=4.10.0
7 | cloudscraper<=1.2.66
8 | feedparser<=6.0.10
9 | git+https://github.com/breakdowns/lk21
10 | google-api-python-client<=2.70.0
11 | google-auth-httplib2
12 | google-auth-oauthlib
13 | gunicorn<=20.1.0
14 | hachoir
15 | js2py
16 | lxml<=4.8.0
17 | pillow<=9.3.0
18 | psutil
19 | psycopg2-binary
20 | pybase64
21 | pyrogram==1.4.16
22 | pyshorteners
23 | python-dotenv
24 | python-magic
25 | python-telegram-bot==13.15
26 | qbittorrent-api
27 | requests
28 | speedtest-cli
29 | telegraph<=2.2.0
30 | tenacity
31 | TgCrypto
32 | torrentool==1.1.0
33 | urllib3<=1.26.15
34 | youtube_dl
35 |
--------------------------------------------------------------------------------
/start.sh:
--------------------------------------------------------------------------------
1 | ./aria.sh; python3 -m bot
2 |
--------------------------------------------------------------------------------