├── README.md ├── __pycache__ ├── instances.cpython-310.pyc ├── instances.cpython-312.pyc ├── instances.cpython-313.pyc ├── spammer.cpython-310.pyc ├── spammer.cpython-312.pyc └── spammer.cpython-313.pyc ├── api_key.txt ├── instances.py ├── requirements.txt ├── spaminput.py ├── spammer.py ├── webhook_log.txt └── webscraper.py /README.md: -------------------------------------------------------------------------------- 1 | # Triage Webscraper 2 | ![](https://img.shields.io/badge/3.10.0%20and%20higher%20recommended!-ffa6ed?style=for-the-badge&color=ffa6ed&labelColor=ff6ee9&label=:3) 3 | ![](https://img.shields.io/badge/LOVE%20YOU%20LULULEPU-ffa6ed?style=for-the-badge&color=ffa6ed&labelColor=ff6ee9&label=:3) 4 | ![](https://img.shields.io/badge/PUT%20WEBHOOK%20IN%20webhook_log.txt!-ffa6ed?style=for-the-badge&color=ffa6ed&labelColor=ff6ee9&label=:3) 5 | ![](https://img.shields.io/badge/PUT%20API%20KEY%20IN%20api_key.txt!-ffa6ed?style=for-the-badge&color=ffa6ed&labelColor=ff6ee9&label=:3) 6 |

7 | A webscraper made to get the website https://tria.ge and malware submissions! If a submission is over a 6 and it is part of a malware family, then it will automatically decompile it. 8 | 9 | ## Features 10 | 11 | - **Decompiling**: Automatically extract the malware config (Discord Webhook, Discord Bot Token) (THANKS TO lululepu!!) 12 | - **Nice Design**: Beautiful design overall 13 | - **Auto-ignoring certain malware families**: If any of these malware families are detected, they will skip. 'asyncrat', 'atomsilo', 'blackmatter', 'cerber', 'urelas', 'xmrig', 'metasploit', 'xworm', 'cryptbot', 'cyrat', 'acobaltstrike', 'umbral', 'blacknet', 'berbew', 'blackmoon', 'emotet', 'mydoom', 'neshta', 'doomrat', 'shadowrat' 14 | 15 | ## Getting Started 16 | 17 | ### Prerequisites 18 | 19 | - **Python** (v3.10.0 or later recommended) 20 | - **Triage API Key** (only for decompiling! Not for webscraping.) 21 | - **INFO** Please put your API Key in "api_key.txt". First line the API Key, nothing else! Just your API Key. 22 | - **INFO** Please put your Webhook in "webhook_lgo.txt". First line Webhook, nothing else! Just your Webhook. 23 | ### Installation 24 | 25 | 1. **Clone the Repository:** 26 | ```bash 27 | git clone https://github.com/monokaiidev/triage-webscraper.git 28 | cd triage-webscraper 29 | ``` 30 | 31 | 2. **Install Dependencies:** 32 | ```bash 33 | pip install -r requirements.txt 34 | ``` 35 | 36 | ### Usage 37 | 1. **Run the Scraper**: 38 | ```bash 39 | python webscraper.py 40 | ``` 41 | -------------------------------------------------------------------------------- /__pycache__/instances.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nettproxy/triage-webscraper/1337bb58fd212995aa51f7ea54f60a7721ba04ad/__pycache__/instances.cpython-310.pyc -------------------------------------------------------------------------------- /__pycache__/instances.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nettproxy/triage-webscraper/1337bb58fd212995aa51f7ea54f60a7721ba04ad/__pycache__/instances.cpython-312.pyc -------------------------------------------------------------------------------- /__pycache__/instances.cpython-313.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nettproxy/triage-webscraper/1337bb58fd212995aa51f7ea54f60a7721ba04ad/__pycache__/instances.cpython-313.pyc -------------------------------------------------------------------------------- /__pycache__/spammer.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nettproxy/triage-webscraper/1337bb58fd212995aa51f7ea54f60a7721ba04ad/__pycache__/spammer.cpython-310.pyc -------------------------------------------------------------------------------- /__pycache__/spammer.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nettproxy/triage-webscraper/1337bb58fd212995aa51f7ea54f60a7721ba04ad/__pycache__/spammer.cpython-312.pyc -------------------------------------------------------------------------------- /__pycache__/spammer.cpython-313.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nettproxy/triage-webscraper/1337bb58fd212995aa51f7ea54f60a7721ba04ad/__pycache__/spammer.cpython-313.pyc -------------------------------------------------------------------------------- /api_key.txt: -------------------------------------------------------------------------------- 1 | YOUR_API_KEY_HERE 2 | -------------------------------------------------------------------------------- /instances.py: -------------------------------------------------------------------------------- 1 | import os, time 2 | import colorama 3 | from colorama import Fore 4 | 5 | colorama.init(autoreset=True) 6 | 7 | def info(text): 8 | print(f"{Fore.CYAN}[{Fore.CYAN}INFO{Fore.CYAN}]{Fore.WHITE} > " + text) 9 | 10 | def error(text): 11 | print(f"{Fore.RED}[{Fore.RED}ERROR{Fore.RED}]{Fore.WHITE} > " + text) 12 | 13 | def success(text): 14 | print(f"{Fore.LIGHTGREEN_EX}[{Fore.LIGHTGREEN_EX}SUCCESS{Fore.LIGHTGREEN_EX}]{Fore.WHITE} > " + text) 15 | 16 | def warning(text): 17 | print(f"{Fore.YELLOW}[{Fore.YELLOW}WARNING{Fore.YELLOW}]{Fore.WHITE} > " + text) 18 | 19 | def newlog(text): 20 | print(f"{Fore.GREEN}[{Fore.LIGHTGREEN_EX}NEW LOG{Fore.GREEN}]{Fore.WHITE} > " + text) 21 | def newlog1(text): 22 | print(f"{Fore.GREEN}[{Fore.RED}NEW LOG{Fore.GREEN}]{Fore.WHITE} > " + text) 23 | def monokai(text): 24 | print(f"{Fore.LIGHTYELLOW_EX}[MONOKAI]{Fore.WHITE} > {text}") 25 | def info(text): 26 | print(f"{Fore.CYAN}[INFO]{Fore.WHITE} > {text}") 27 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | bs4 2 | colorama 3 | datetime 4 | requests 5 | asyncio -------------------------------------------------------------------------------- /spaminput.py: -------------------------------------------------------------------------------- 1 | from spammer import spam_messages 2 | 3 | spam_messages(input()) 4 | -------------------------------------------------------------------------------- /spammer.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from colorama import Fore, Style, init 3 | import time 4 | import threading, random 5 | from datetime import datetime 6 | from instances import info, success, error, warning, newlog, monokai 7 | 8 | init(autoreset=True) 9 | 10 | data = { 11 | "content": "# @everyone MONOKAI NUKED UR WEBHOOK https://github.com/monokaiidev/triage-webscraper :rofl:" 12 | } 13 | 14 | request_count = 0 15 | max_requests = 30 16 | lock = threading.Lock() 17 | stop_spamming = False 18 | threads = 10 19 | 20 | def spam_messages(webhook_url): 21 | global request_count, stop_spamming 22 | 23 | while not stop_spamming: 24 | try: 25 | r = requests.post(webhook_url, data=data) 26 | with lock: 27 | if r.status_code == 200 or r.status_code == 204: 28 | request_count += 1 29 | monokai(f"Sent Message to your webhook! ({request_count}/{max_requests})") 30 | 31 | if request_count >= max_requests: 32 | warning("Maximum number of messages reached! Stopping spamming!") 33 | monokai("Deleting webhook...") 34 | 35 | rrrq = requests.delete(webhook_url) 36 | if rrrq.status_code == 204: 37 | success("Successfully deleted webhook!") 38 | else: 39 | error("Couldn't delete webhook!") 40 | 41 | time.sleep(1) 42 | request_count = 0 43 | stop_spamming = True 44 | break 45 | 46 | elif r.status_code == 429: 47 | retry_after = r.headers.get("Retry-After") 48 | if retry_after: 49 | wait_time = int(retry_after) / 1000 50 | error(f"Rate limited on {webhook_url}. Waiting for {wait_time} seconds...") 51 | time.sleep(wait_time) 52 | else: 53 | reset_timestamp = int(r.headers.get("X-RateLimit-Reset", 0)) 54 | reset_time = datetime.utcfromtimestamp(reset_timestamp) 55 | current_time = datetime.utcnow() 56 | wait_time = (reset_time - current_time).total_seconds() 57 | print(f"Rate limited on {webhook_url}. Waiting until {reset_time} UTC ({wait_time} seconds)...") 58 | time.sleep(wait_time) 59 | 60 | else: 61 | stop_spamming = True 62 | break 63 | 64 | except Exception as e: 65 | print(f"Error occurred: {str(e)}") 66 | 67 | def start_spamming(webhook_url): 68 | global stop_spamming, request_count 69 | stop_spamming = False 70 | request_count = 0 71 | 72 | # Creating a pool of threads to increase concurrency 73 | threads_list = [] 74 | for _ in range(threads): 75 | spam_thread = threading.Thread(target=spam_messages, args=(webhook_url,)) 76 | threads_list.append(spam_thread) 77 | spam_thread.start() 78 | 79 | # Wait for all threads to finish 80 | for thread in threads_list: 81 | thread.join() 82 | 83 | 84 | -------------------------------------------------------------------------------- /webhook_log.txt: -------------------------------------------------------------------------------- 1 | YOUR_WEBHOOK_HERE 2 | -------------------------------------------------------------------------------- /webscraper.py: -------------------------------------------------------------------------------- 1 | from colorama import init, Fore, Style 2 | import random 3 | import os 4 | import requests 5 | from datetime import datetime, UTC 6 | import time, webbrowser 7 | from bs4 import BeautifulSoup 8 | from concurrent.futures import ThreadPoolExecutor 9 | import asyncio 10 | import re 11 | from spammer import spam_messages 12 | 13 | init(autoreset=True) 14 | 15 | current_time = datetime.now().strftime("%H:%M:%S") 16 | 17 | test = "tes2t" 18 | os.system("cls") 19 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Welcome to the Triage Webscraper! {Fore.WHITE}[{Fore.GREEN}200{Fore.WHITE}]{Style.RESET_ALL} 🎄") 20 | time.sleep(2) 21 | webbrowser.open("https://github.com/monokaiidev") 22 | 23 | 24 | main_webhook = open("webhook_log.txt").read().strip() 25 | 26 | def check_webhook(): 27 | current_time = datetime.now().strftime("%H:%M:%S") 28 | if main_webhook == "YOUR_WEBHOOK_HERE": 29 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}No webhook specified! {Fore.WHITE}[{Fore.RED}404{Fore.WHITE}]{Style.RESET_ALL} 🎄") 30 | exit(1) 31 | else: 32 | webhook_req = requests.get(main_webhook) 33 | if webhook_req.status_code == 200: 34 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Valid webhook {Fore.WHITE}[{Fore.GREEN}200{Fore.WHITE}]{Style.RESET_ALL} 🎄") 35 | else: 36 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Invalid webhook {Fore.WHITE}[{Fore.RED}404{Fore.WHITE}]{Style.RESET_ALL} 🎄") 37 | exit(1) 38 | check_webhook() 39 | 40 | triage_url = "https://tria.ge/s?q=score:10 AND tag:pyinstaller or family:blankgrabber or family:discordrat or family:pysilon&limit=1" 41 | processed_ids = set() 42 | triage_api_key = open("api_key.txt").read().strip().replace("Bearer", "") 43 | triage_get_request = requests.get(f'https://tria.ge/api/v0/samples/241105-q57r2ashqn/sample', headers={"Authorization": f"Bearer {triage_api_key}"}) 44 | if triage_get_request.status_code != 200: 45 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Connecting to API... {Fore.WHITE}[{Fore.GREEN}200{Fore.WHITE}]{Style.RESET_ALL} 🎄") 46 | time.sleep(3) 47 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Trying to connect to API... {Fore.WHITE}[{Fore.RED}500{Fore.WHITE}]{Style.RESET_ALL} 🎄") 48 | time.sleep(3) 49 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Invalid API Key in api_key.txt{Style.RESET_ALL} 🎄") 50 | time.sleep(0.5) 51 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Decompiling feature unavailable{Style.RESET_ALL} ❄️") 52 | time.sleep(3) 53 | else: 54 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Connecting to API... {Fore.WHITE}[{Fore.GREEN}200{Fore.WHITE}]{Style.RESET_ALL} 🎄") 55 | time.sleep(3) 56 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}API Connected!{Style.RESET_ALL} 🎅") 57 | time.sleep(0.5) 58 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Decompiler Ready{Style.RESET_ALL} ❄️") 59 | time.sleep(3) 60 | 61 | current_time = datetime.now().strftime("%H:%M:%S") 62 | 63 | def decompile_file(file_content): 64 | current_time = datetime.now().strftime("%H:%M:%S") 65 | try: 66 | if triage_get_request.status_code != 200: 67 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Cannot decompile: Invalid or missing API key{Style.RESET_ALL} 🎄") 68 | return None 69 | 70 | ilikeblack = requests.post('https://lululepu.fr/ungrabber', files={'file': file_content}) 71 | response2 = ilikeblack.json() 72 | 73 | if response2 and 'result' in response2: 74 | return response2['result'] 75 | return None 76 | except Exception as e: 77 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Decompiling error: {e}{Style.RESET_ALL} 🎄") 78 | return None 79 | 80 | async def process_submission(report_id, file_name, fams, score, time_uploaded, tags): 81 | current_time = datetime.now().strftime("%H:%M:%S") 82 | if fams in ['asyncrat', 'atomsilo', 'blackmatter', 'cerber', 'urelas', 'xmrig', 'metasploit', 'xworm', 'cryptbot', 'cyrat', 'acobaltstrike', 'umbral', 'blacknet', 'berbew', 'blackmoon', 'emotet', 'mydoom', 'neshta', 'doomrat', 'shadowrat']: 83 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Blacklisted: {fams}{Style.RESET_ALL}") 84 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}File: {file_name}{Style.RESET_ALL}") 85 | os.system("cls") 86 | return 87 | 88 | if file_name.endswith((".zip", ".rar", ".7z", ".tar", ".sh", ".bat")): 89 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.WHITE}This extension is not supported. Skipping file{Style.RESET_ALL}") 90 | time.sleep(3) 91 | os.system("cls") 92 | return 93 | 94 | try: 95 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.WHITE}Decompiling...{Style.RESET_ALL}") 96 | file_content = requests.get(f'https://tria.ge/api/v0/samples/{report_id}/sample', headers={"Authorization": f"Bearer {triage_api_key}"}).content 97 | 98 | def contains_base64(result): 99 | import re 100 | base64_pattern = r'^[A-Za-z0-9+/]*={0,2}$' 101 | return bool(re.match(base64_pattern, result)) 102 | 103 | result = decompile_file(file_content) 104 | if result: 105 | if "webhook" in result: 106 | webhook_test = requests.get(result) 107 | if webhook_test.status_code == 200: 108 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.WHITE}Valid webhook! {Fore.RED}{result}{Style.RESET_ALL} 🎄") 109 | os.system("cls") 110 | spam_messages(result, 2) 111 | requests.post(main_webhook, data={"content": f"🎅 **New Valid Webhook** 🎄\n`{result}` ❄️ @everyone"}) 112 | requests.post(main_webhook, data={"content": f"🎅 **New Valid Webhook** 🎄\n`{result}` ❄️ @everyone"}) 113 | else: 114 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.WHITE}Invalid webhook: {Fore.RED}{result}{Style.RESET_ALL} 🎄") 115 | requests.post(main_webhook, data={"content": f"🎅 **Invalid Webhook** 🎄\n`{result}` ❄️ @everyone"}) 116 | elif contains_base64(result): 117 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Successfully decompiled Base64 string: {result}{Style.RESET_ALL} 🎄") 118 | requests.post(main_webhook, data={"content": f"🎅 **New Token** 🎄\n`{result}` ❄️ @everyone"}) 119 | 120 | except Exception as e: 121 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Error fetching or processing file: {e}{Style.RESET_ALL} 🎄") 122 | 123 | def check_for_new_submissions(): 124 | global processed_ids 125 | 126 | response = requests.get(triage_url) 127 | if response.status_code == 200: 128 | soup = BeautifulSoup(response.text, 'html.parser') 129 | report_items = soup.find_all('a', class_='row alert') 130 | with ThreadPoolExecutor(max_workers=10) as executor: 131 | for report in report_items: 132 | report_id = report['data-sample-id'] 133 | if report_id in processed_ids: 134 | continue 135 | 136 | file_name = "Unknown file" 137 | score = None 138 | tags = [] 139 | 140 | file_name_div = report.find('div', class_='column-target') 141 | if file_name_div: 142 | file_name = file_name_div.text.strip() 143 | 144 | score_div = report.find('div', class_='column-score') 145 | if score_div: 146 | score_text = score_div.find('div', class_='score').text 147 | score = float(score_text.strip()) if score_text else None 148 | 149 | tags_div = report.find('div', class_='column-tags') 150 | if tags_div: 151 | tags = [tag.text.strip() for tag in tags_div.find_all('span')] 152 | 153 | time_uploaded = report.find('div', class_="column-created").text.replace('UTC', '\n').strip() 154 | 155 | fams_element = report.find('span', class_="rose") 156 | fams = fams_element.text if fams_element else "No family" 157 | 158 | os.system("cls") 159 | 160 | if score is not None and score > 6: 161 | log_entries = [ 162 | ("🎄 ID:", f"{Fore.CYAN}{report_id}{Style.RESET_ALL}"), 163 | ("🎅 Name:", f"{Fore.WHITE}{file_name}{Style.RESET_ALL}"), 164 | ("⭐ Score:", f"{Fore.RED if score > 8 else Fore.WHITE}{score}{Style.RESET_ALL}"), 165 | ("🎁 Family:", f"{Fore.MAGENTA}{fams}{Style.RESET_ALL}"), 166 | ("❄️ Tags:", f"{Fore.BLUE}{', '.join(tags) if tags else 'No tags'}{Style.RESET_ALL}"), 167 | ("🔔 Time:", f"{Fore.GREEN}{time_uploaded}{Style.RESET_ALL}") 168 | ] 169 | 170 | box_width = 54 171 | BORDER = Fore.BLUE + Style.BRIGHT 172 | TITLE = Fore.CYAN + Style.BRIGHT 173 | VALUE = Fore.WHITE 174 | SCORE_HIGH = Fore.RED + Style.BRIGHT 175 | SCORE_MED = Fore.YELLOW + Style.BRIGHT 176 | 177 | print(f"{BORDER}┌{'─' * (box_width-2)}┐{Style.RESET_ALL}") 178 | for title, value in log_entries: 179 | if title == "❄️ Tags:": 180 | max_tag_length = 25 181 | tags_str = value 182 | if len(tags_str) > max_tag_length: 183 | tags_str = value[:max_tag_length] + f"{Fore.CYAN}...{Style.RESET_ALL}" 184 | 185 | padding = box_width - len(title) - len(tags_str) - 4 186 | if padding < 0: padding = 0 187 | print(f"{BORDER} {TITLE}{title}{Style.RESET_ALL} {VALUE}{tags_str}{' ' * padding} {BORDER}{Style.RESET_ALL}") 188 | else: 189 | padding = box_width - len(title) - len(value) - 4 190 | print(f"{BORDER} {TITLE}{title}{Style.RESET_ALL} {VALUE}{value}{Style.RESET_ALL}{' ' * padding} {BORDER}{Style.RESET_ALL}") 191 | print(f"{BORDER}└{'─' * (box_width-2)}┘{Style.RESET_ALL}\n") 192 | 193 | random_color = random.choice([ 194 | 0x00FFFF, 195 | 0x4169E1, 196 | 0xFF1493, 197 | 0x32CD32, 198 | 0x9370DB, 199 | 0x00CED1, 200 | ]) 201 | 202 | logEmbed = { 203 | "title": f"🎄 New Malware Submission Detected 🎄", 204 | "description": f"**Report ID:** {report_id}", 205 | "fields": [ 206 | {"name": "🎅 File Name", "value": f"`{file_name}`", "inline": True}, 207 | {"name": "⭐ Risk Score", "value": f"`{score}/10`", "inline": True}, 208 | {"name": "🎁 Family", "value": f"`{fams}`", "inline": True}, 209 | {"name": "❄️ Tags", "value": f"`{', '.join(tags) if tags else 'No tags'}`"}, 210 | {"name": "🔔 Upload Time", "value": f"`{time_uploaded}`", "inline": True} 211 | ], 212 | "color": random_color, 213 | "footer": {"text": "🎅 Triage Webscraper • Made with ️"}, 214 | "timestamp": datetime.now(UTC).isoformat() 215 | } 216 | try: 217 | requests.post(main_webhook, json={"embeds": [logEmbed]}) 218 | except Exception as e: 219 | print(e) 220 | 221 | executor.submit(lambda: asyncio.run(process_submission(report_id, file_name, fams, score, time_uploaded, tags))) 222 | 223 | processed_ids.add(report_id) 224 | 225 | if 'discordrat' in tags: 226 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.GREEN}Found DiscordRAT in submission{Style.RESET_ALL} 🎅 https://tria.ge/{report_id}") 227 | requests.post(main_webhook, data={"content": f"🎄 **DiscordRAT** 🎅\nDiscordRAT Found in Submission {report_id}! ❄️ @everyone"}) 228 | 229 | def main(): 230 | try: 231 | print(f"{Fore.GREEN}{'='*20} TRIAGE WEBSCRAPER {'='*20}{Style.RESET_ALL}") 232 | print(f"{Fore.CYAN}Starting webscraper...{Style.RESET_ALL}") 233 | while True: 234 | check_for_new_submissions() 235 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.WHITE}Scanning for new submissions...{Style.RESET_ALL}", end='\r') 236 | time.sleep(0.5) 237 | except KeyboardInterrupt: 238 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}Exiting...{Style.RESET_ALL}") 239 | exit(0) 240 | except Exception as e: 241 | print(f"[{Fore.CYAN}{current_time}{Style.RESET_ALL}] {Fore.RED}An error occured: {e}{Style.RESET_ALL}") 242 | exit(1) 243 | 244 | if __name__ == "__main__": 245 | main() 246 | --------------------------------------------------------------------------------