├── data.txt
├── bot
├── core
│ ├── __init__.py
│ ├── headers.py
│ ├── agents.py
│ ├── registrator.py
│ ├── query.py
│ └── tapper.py
├── __init__.py
├── config
│ ├── __init__.py
│ ├── proxies.txt
│ └── config.py
├── exceptions
│ └── __init__.py
└── utils
│ ├── __init__.py
│ ├── logger.py
│ ├── ps.py
│ └── launcher.py
├── .gitignore
├── proxy.json
├── docker-compose.yml
├── main.py
├── Dockerfile
├── .env-example
├── setting.md
├── requirements.txt
├── run.bat
├── run.sh
├── LICENSE
└── README.md
/data.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/bot/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '1.0'
2 |
--------------------------------------------------------------------------------
/bot/config/__init__.py:
--------------------------------------------------------------------------------
1 | from .config import settings
2 |
--------------------------------------------------------------------------------
/bot/exceptions/__init__.py:
--------------------------------------------------------------------------------
1 | class InvalidSession(BaseException):
2 | ...
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .env
2 | sessions
3 | data.txt
4 | __pycache__
5 | user_agents.json
6 | bot/config/proxies.txt
7 |
--------------------------------------------------------------------------------
/proxy.json:
--------------------------------------------------------------------------------
1 | {
2 | "session file name(session mode) or telegram username(query mode)": "type:user:pass@ip:port"
3 | }
4 |
--------------------------------------------------------------------------------
/bot/config/proxies.txt:
--------------------------------------------------------------------------------
1 | type://user:pass@ip:port
2 | type://user:pass:ip:port
3 | type://ip:port:user:pass
4 | type://ip:port@user:pass
5 | type://ip:port
--------------------------------------------------------------------------------
/bot/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .logger import logger
2 | from . import launcher
3 |
4 |
5 | import os
6 |
7 | if not os.path.exists(path="sessions"):
8 | os.mkdir(path="sessions")
9 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '1'
2 | services:
3 | bot:
4 | container_name: 'SeedMiningBot'
5 | build:
6 | context: .
7 | stop_signal: SIGINT
8 | restart: unless-stopped
9 | command: "python3 main.py -a 1"
10 | volumes:
11 | - .:/app
12 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from contextlib import suppress
3 |
4 | from bot.utils.launcher import process
5 |
6 |
7 | async def main():
8 | await process()
9 |
10 |
11 | if __name__ == '__main__':
12 | with suppress(KeyboardInterrupt):
13 | asyncio.run(main())
14 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.11-alpine3.18
2 |
3 | WORKDIR app/
4 |
5 | COPY requirements.txt requirements.txt
6 |
7 | RUN pip3 install --upgrade pip setuptools wheel
8 | RUN pip3 install --no-warn-script-location --no-cache-dir -r requirements.txt
9 |
10 | COPY . .
11 |
12 | CMD ["python3", "main.py", "-a", "1"]
13 |
--------------------------------------------------------------------------------
/bot/utils/logger.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from loguru import logger
3 |
4 |
5 | logger.remove()
6 | logger.add(sink=sys.stdout, format="[Seed] | {time:YYYY-MM-DD HH:mm:ss}"
7 | " | {level: <8}"
8 | " | {line}"
9 | " - {message}")
10 | logger = logger.opt(colors=True)
11 |
--------------------------------------------------------------------------------
/.env-example:
--------------------------------------------------------------------------------
1 | API_ID=
2 | API_HASH=
3 |
4 | REF_LINK=
5 |
6 |
7 |
8 | AUTO_UPGRADE_STORAGE=
9 | AUTO_UPGRADE_MINING=
10 | AUTO_UPGRADE_HOLY=
11 | AUTO_CLEAR_TASKS=
12 | AUTO_START_HUNT=
13 |
14 | AUTO_SPIN=
15 | SPIN_PER_ROUND=
16 | AUTO_FUSION=
17 | MAXIMUM_PRICE_TO_FUSION_COMMON=
18 | MAXIMUM_PRICE_TO_FUSION_UNCOMMON=
19 | MAXIMUM_PRICE_TO_FUSION_RARE=
20 | MAXIMUM_PRICE_TO_FUSION_EPIC=
21 | MAXIMUM_PRICE_TO_FUSION_LEGENDARY=
22 |
23 | AUTO_SELL_WORMS=
24 | QUANTITY_TO_KEEP=
25 |
26 | ADVANCED_ANTI_DETECTION=
27 |
28 | USE_PROXY_FROM_FILE=
29 |
--------------------------------------------------------------------------------
/bot/core/headers.py:
--------------------------------------------------------------------------------
1 | headers = {
2 | 'accept': '*/*',
3 | 'accept-language': 'en-US;q=0.9,en;q=0.8,id;q=0.7',
4 | 'origin': 'https://cf.seeddao.org',
5 | 'priority': 'u=1, i',
6 | 'referer': 'https://cf.seeddao.org/',
7 | 'sec-ch-ua-mobile': '?1',
8 | 'sec-ch-ua-platform': '"Android"',
9 | 'sec-fetch-dest': 'empty',
10 | 'sec-fetch-mode': 'cors',
11 | 'x-requested-with': "org.telegram.messenger",
12 | 'sec-fetch-site': 'same-site',
13 | 'telegram-data': 'tokens',
14 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36'
15 | }
16 |
--------------------------------------------------------------------------------
/setting.md:
--------------------------------------------------------------------------------
1 | # Open setting [here](https://github.com/vanhbakaa/Seed-App-Mine-Seed-BOT-Telegram/blob/main/bot/config/config.py)
2 |
3 | | Settings | Description |
4 | |-------------------------|:---------------------------------------------------------------------------:|
5 | | **quantity_to_keep** | How many worms to keep (set -1 to keep all) |
6 | | **sale_price** | Price to put it on sale (set 0 it will auto get market price) |
7 |
8 | # This is default setting:
9 | 
10 |
11 |
12 |
--------------------------------------------------------------------------------
/bot/core/agents.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | import ua_generator
4 | from ua_generator.options import Options
5 | from ua_generator.data.version import VersionRange
6 |
7 | def generate_random_user_agent(device_type='android', browser_type='chrome'):
8 | chrome_version_range = VersionRange(min_version=117, max_version=130)
9 | options = Options(version_ranges={'chrome': chrome_version_range})
10 | ua = ua_generator.generate(platform=device_type, browser=browser_type, options=options)
11 | return ua.text
12 |
13 |
14 | def fetch_version(ua):
15 | match = re.search(r"Chrome/(\d+)", ua)
16 |
17 | if match:
18 | major_version = match.group(1)
19 | return major_version
20 | else:
21 | return
22 |
23 |
24 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiocfscrape==1.0.0
2 | aiohttp==3.9.3
3 | aiohttp-proxy==0.1.2
4 | aiosignal==1.3.1
5 | annotated-types==0.6.0
6 | async-timeout==4.0.3
7 | attrs==23.2.0
8 | beautifulsoup4==4.12.3
9 | better-proxy==1.1.5
10 | colorama==0.4.6
11 | DateTime==5.5
12 | frozenlist==1.4.1
13 | idna==3.6
14 | Js2Py==0.74
15 | loguru==0.7.2
16 | multidict==6.0.5
17 | pyaes==1.6.1
18 | pydantic==2.6.4
19 | pydantic-settings==2.2.1
20 | pydantic_core==2.16.3
21 | pyjsparser==2.7.1
22 | Pyrogram==2.0.106
23 | PySocks==1.7.1
24 | python-dotenv==1.0.1
25 | pytz==2024.1
26 | six==1.16.0
27 | soupsieve==2.5
28 | TgCrypto==1.2.5
29 | typing_extensions==4.11.0
30 | tzdata==2024.1
31 | tzlocal==5.2
32 | websockets==12.0
33 | win32-setctime==1.1.0
34 | yarl==1.9.4
35 | zope.interface==6.4.post2
36 | requests==2.32.3
37 | python-dateutil==2.9.0
38 | aiofile==3.9.0
39 | ua_generator==1.0.6
--------------------------------------------------------------------------------
/bot/core/registrator.py:
--------------------------------------------------------------------------------
1 | from pyrogram import Client
2 |
3 | from bot.config import settings
4 | from bot.utils import logger
5 |
6 |
7 | async def register_sessions() -> None:
8 | API_ID = settings.API_ID
9 | API_HASH = settings.API_HASH
10 |
11 | if not API_ID or not API_HASH:
12 | raise ValueError("API_ID and API_HASH not found in the .env file.")
13 |
14 | session_name = input('\nEnter the session name (press Enter to exit): ')
15 |
16 | if not session_name:
17 | return None
18 |
19 | session = Client(
20 | name=session_name,
21 | api_id=API_ID,
22 | api_hash=API_HASH,
23 | workdir="sessions/"
24 | )
25 |
26 | async with session:
27 | user_data = await session.get_me()
28 |
29 | logger.success(f'Session added successfully @{user_data.username} | {user_data.first_name} {user_data.last_name}')
30 |
--------------------------------------------------------------------------------
/run.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 |
3 | if not exist venv (
4 | echo Creating virtual environment...
5 | python -m venv venv
6 | )
7 |
8 | echo Activating virtual environment...
9 | call venv\Scripts\activate
10 |
11 | if not exist venv\Lib\site-packages\installed (
12 | if exist requirements.txt (
13 | echo installing wheel for faster installing
14 | pip install wheel
15 | echo Installing dependencies...
16 | pip install -r requirements.txt
17 | echo. > venv\Lib\site-packages\installed
18 | ) else (
19 | echo requirements.txt not found, skipping dependency installation.
20 | )
21 | ) else (
22 | echo Dependencies already installed, skipping installation.
23 | )
24 |
25 | if not exist .env (
26 | echo Copying configuration file
27 | copy .env-example .env
28 | ) else (
29 | echo Skipping .env copying
30 | )
31 |
32 | echo Starting the bot...
33 | python main.py
34 |
35 | echo done
36 | echo PLEASE EDIT .ENV FILE
37 | pause
38 |
--------------------------------------------------------------------------------
/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Проверка на наличие папки venv
4 | if [ ! -d "venv" ]; then
5 | echo "Creating virtual environment..."
6 | python3 -m venv venv
7 | fi
8 |
9 | echo "Activating virtual environment..."
10 | source venv/bin/activate
11 |
12 | # Проверка на наличие установленного флага в виртуальном окружении
13 | if [ ! -f "venv/installed" ]; then
14 | if [ -f "requirements.txt" ]; then
15 | echo "Installing wheel for faster installing"
16 | pip3 install wheel
17 | echo "Installing dependencies..."
18 | pip3 install -r requirements.txt
19 | touch venv/installed
20 | else
21 | echo "requirements.txt not found, skipping dependency installation."
22 | fi
23 | else
24 | echo "Dependencies already installed, skipping installation."
25 | fi
26 |
27 | if [ ! -f ".env" ]; then
28 | echo "Copying configuration file"
29 | cp .env-example .env
30 | else
31 | echo "Skipping .env copying"
32 | fi
33 |
34 | echo "Starting the bot..."
35 | python3 main.py
36 |
37 | echo "done"
38 | echo "PLEASE EDIT .ENV FILE"
39 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Vanhbaka
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/bot/config/config.py:
--------------------------------------------------------------------------------
1 | from pydantic_settings import BaseSettings, SettingsConfigDict
2 |
3 |
4 | class Settings(BaseSettings):
5 | model_config = SettingsConfigDict(env_file=".env", env_ignore_empty=True)
6 |
7 | API_ID: int
8 | API_HASH: str
9 |
10 | REF_LINK: str = ""
11 |
12 | AUTO_UPGRADE_STORAGE: bool = True
13 | AUTO_UPGRADE_MINING: bool = True
14 | AUTO_UPGRADE_HOLY: bool = True
15 | AUTO_CLEAR_TASKS: bool = True
16 | AUTO_START_HUNT: bool = True
17 |
18 | AUTO_SPIN: bool = True
19 | SPIN_PER_ROUND: list[int] = [5, 10]
20 | AUTO_FUSION: bool = True
21 | MAXIMUM_PRICE_TO_FUSION_COMMON: int = 30
22 | MAXIMUM_PRICE_TO_FUSION_UNCOMMON: int = 200
23 | MAXIMUM_PRICE_TO_FUSION_RARE: int = 800
24 | MAXIMUM_PRICE_TO_FUSION_EPIC: int = 3000
25 | MAXIMUM_PRICE_TO_FUSION_LEGENDARY: int = 20000
26 |
27 | AUTO_SELL_WORMS: bool = False
28 | QUANTITY_TO_KEEP: dict = {
29 | "common": {
30 | "quantity_to_keep": 2,
31 | "sale_price": 1
32 | },
33 | "uncommon": {
34 | "quantity_to_keep": 2,
35 | "sale_price": 0
36 | },
37 | "rare": {
38 | "quantity_to_keep": -1,
39 | "sale_price": 0
40 | },
41 | "epic": {
42 | "quantity_to_keep": -1,
43 | "sale_price": 0
44 | },
45 | "legendary": {
46 | "quantity_to_keep": -1,
47 | "sale_price": 0
48 | }
49 | }
50 |
51 | ADVANCED_ANTI_DETECTION: bool = True
52 |
53 | USE_PROXY_FROM_FILE: bool = False
54 |
55 | settings = Settings()
56 |
57 |
58 |
--------------------------------------------------------------------------------
/bot/utils/ps.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import re
3 | from bot.utils import logger
4 | from bot.config import settings
5 |
6 | baseUrl = "https://alb.seeddao.org"
7 |
8 | headers = {
9 | 'accept': '*/*',
10 | 'sec-ch-ua': '"Google Chrome";v="125", "Chromium";v="125", "Not.A/Brand";v="24"',
11 | 'sec-ch-ua-mobile': '?0',
12 | 'sec-ch-ua-platform': '"Windows"',
13 | 'sec-fetch-dest': 'document',
14 | 'sec-fetch-mode': 'navigate',
15 | 'sec-fetch-site': 'same-origin',
16 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0'
17 | }
18 | headerjs = {
19 | 'accept': '*/*',
20 | 'sec-ch-ua': '"Google Chrome";v="125", "Chromium";v="125", "Not.A/Brand";v="24"',
21 | 'sec-ch-ua-mobile': '?0',
22 | 'sec-ch-ua-platform': '"Windows"',
23 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0'
24 | }
25 |
26 |
27 |
28 | def get_main_js_format(base_url):
29 | try:
30 | response = requests.get(base_url, headers=headers)
31 | response.raise_for_status() # Raises an HTTPError for bad responses
32 | content = response.text
33 | matches = re.findall(r'src="(/.*?\.js)"', content)
34 | if matches:
35 | # Return all matches, sorted by length (assuming longer is more specific)
36 | return sorted(set(matches), key=len, reverse=True)
37 | else:
38 | return None
39 | except requests.RequestException as e:
40 | logger.warning(f"Error fetching the base URL: {e}")
41 | return None
42 |
43 | def get_base_api(url):
44 | try:
45 | logger.info("Checking for changes in api...")
46 | response = requests.get(url, headers=headerjs)
47 | response.raise_for_status()
48 | content = response.text
49 | match = re.search(r'baseURL:\s*"(.*?)"', content)
50 |
51 | if match:
52 | # print(match.group(1))
53 | return match.group(1)
54 | else:
55 | logger.info("Could not find 'baseUrl' in the content.")
56 | return None
57 | except requests.RequestException as e:
58 | logger.warning(f"Error fetching the JS file: {e}")
59 | return None
60 |
61 |
62 | def check_base_url():
63 | base_url = "https://cf.seeddao.org/"
64 | main_js_formats = get_main_js_format(base_url)
65 |
66 | if main_js_formats:
67 | if settings.ADVANCED_ANTI_DETECTION:
68 | r = requests.get("https://raw.githubusercontent.com/vanhbakaa/nothing/refs/heads/main/seed")
69 | js_ver = r.text.strip()
70 | for js in main_js_formats:
71 | if js_ver in js:
72 | logger.success(f"No change in js file: {js_ver}")
73 | return True
74 |
75 | logger.warning(f"Detected js files: {main_js_formats}")
76 | return False
77 |
78 | for format in main_js_formats:
79 | logger.info(f"Trying format: {format}")
80 | full_url = f"https://cf.seeddao.org{format}"
81 | result = get_base_api(full_url)
82 | # print(f"{result} | {baseUrl}")
83 | if str(result) == baseUrl:
84 | logger.success("No change in api!")
85 | return True
86 | return False
87 | else:
88 | logger.warning("Could not find 'baseURL' in any of the JS files.")
89 | return False
90 | else:
91 | logger.info("Could not find any main.js format. Dumping page content for inspection:")
92 | try:
93 | response = requests.get(base_url)
94 | print(response.text[:1000]) # Print first 1000 characters of the page
95 | return False
96 | except requests.RequestException as e:
97 | logger.warning(f"Error fetching the base URL for content dump: {e}")
98 | return False
99 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | > [!WARNING]
2 | > ⚠️ I do my best to avoid detection of bots, but using bots is forbidden in all airdrops. i cannot guarantee that you will not be detected as a bot. Use at your own risk. I am not responsible for any consequences of using this software.
3 |
4 |
5 | ## Recommendation before use
6 |
7 | # 🔥🔥 Use PYTHON 3.11.5 🔥🔥
8 |
9 | ## Features
10 | | Feature | Supported |
11 | |-----------------------------------------------------------|:---------:|
12 | | Multithreading | ✅ |
13 | | Proxy binding to session | ✅ |
14 | | Support for tdata / pyrogram .session | ✅ |
15 | | Auto-farming | ✅ |
16 | | Auto-tasks | ✅ |
17 | | Auto-upgrade | ✅ |
18 | | Auto-check-in | ✅ |
19 | | Auto-hunt | ✅ |
20 | | Auto-spin | ✅ |
21 | | Auto-fusion | ✅ |
22 | | Auto-sell worms | ✅ |
23 |
24 |
25 | ## [Settings](https://github.com/vanhbakaa/Seed-App-Mine-Seed-BOT-Telegram/blob/main/.env-example)
26 |
27 | # Use default setting for best performance !
28 | | Settings | Description |
29 | |-------------------------|:---------------------------------------------------------------------------:|
30 | | **API_ID / API_HASH** | Platform data from which to run the Telegram session (by default - android) |
31 | | **AUTO_UPGRADE_STORAGE**| Auto upgrade storage (by default - True) |
32 | | **AUTO_UPGRADE_MINING** | Auto upgrade mining speed (by default - True) |
33 | | **AUTO_UPGRADE_HOLY** | Auto upgrade holy (by default - True) |
34 | | **AUTO_TASK** | Auto tasks (default - True) |
35 | | **AUTO_SPIN** | Auto spin (default - True) |
36 | | **SPIN_PER_ROUND** | Spin count each round (default - [5, 10]) |
37 | | **AUTO_FUSION** | Auto fusion eggs if possible (default - True) |
38 | | **MAXIMUM_PRICE_TO_FUSION_COMMON** | Max price to fusion common egg (default - 30) |
39 | | **MAXIMUM_PRICE_TO_FUSION_UNCOMMON** | Max price to fusion uncommon egg (default - 200) |
40 | | **MAXIMUM_PRICE_TO_FUSION_RARE** | Max price to fusion rare egg (default - 800) |
41 | | **MAXIMUM_PRICE_TO_FUSION_EPIC** | Max price to fusion epic egg (default - 3000) |
42 | | **MAXIMUM_PRICE_TO_FUSION_LEGENDARY** | Max price to fusion legendary egg (default - 20000) |
43 | | **AUTO_START_HUNT** | Auto start hunt (default - True) |
44 | | **AUTO_SELL_WORMS** | Auto sell worms (default - True) |
45 | | **QUANTITY_TO_KEEP** | Quantity to keep worms check instruction [here](https://github.com/vanhbakaa/Seed-App-Mine-Seed-BOT-Telegram/blob/main/setting.md) |
46 | | **ADVANCED_ANTI_DETECTION** | Add more proctection for your account (default: True) |
47 | | **USE_PROXY_FROM_FILE** | Whether to use a proxy from the bot/config/proxies.txt file (True / False) |
48 |
49 | ## Quick Start 📚
50 |
51 | To fast install libraries and run bot - open run.bat on Windows or run.sh on Linux
52 |
53 | ## Prerequisites
54 | Before you begin, make sure you have the following installed:
55 | - [Python](https://www.python.org/downloads/) **version 3.11.5**
56 |
57 | ## Obtaining API Keys
58 | 1. Go to my.telegram.org and log in using your phone number.
59 | 2. Select "API development tools" and fill out the form to register a new application.
60 | 3. Record the API_ID and API_HASH provided after registering your application in the .env file.
61 |
62 |
63 | # Linux manual installation
64 | ```shell
65 | python3 -m venv venv
66 | source venv/bin/activate
67 | pip3 install -r requirements.txt
68 | cp .env-example .env
69 | nano .env # Here you must specify your API_ID and API_HASH, the rest is taken by default
70 | python3 main.py
71 | ```
72 |
73 | You can also use arguments for quick start, for example:
74 | ```shell
75 | ~/Seed-App-Mine-Seed-BOT-Telegram >>> python3 main.py --action (1/2)
76 | # Or
77 | ~/Seed-App-Mine-Seed-BOT-Telegram >>> python3 main.py -a (1/2)
78 |
79 | # 1 - Run clicker
80 | # 2 - Creates a session
81 | ```
82 |
83 | # Windows manual installation
84 | ```shell
85 | python -m venv venv
86 | venv\Scripts\activate
87 | pip install -r requirements.txt
88 | copy .env-example .env
89 | # Here you must specify your API_ID and API_HASH, the rest is taken by default
90 | python main.py
91 | ```
92 |
93 | You can also use arguments for quick start, for example:
94 | ```shell
95 | ~/Seed-App-Mine-Seed-BOT-Telegram >>> python main.py --action (1/2)
96 | # Or
97 | ~/Seed-App-Mine-Seed-BOT-Telegram >>> python main.py -a (1/2)
98 |
99 | # 1 - Run clicker
100 | # 2 - Creates a session
101 | ```
102 | # Support This Project
103 |
104 | If you'd like to support the development of this project, please consider making a donation. Every little bit helps!
105 |
106 | 👉 **[Click here to view donation options](https://github.com/vanhbakaa/Donation/blob/main/README.md)** 👈
107 |
108 | Your support allows us to keep improving the project and bring more features!
109 |
110 | Thank you for your generosity! 🙌
111 |
112 | ### Contacts
113 |
114 | For support or questions, you can contact me [](https://t.me/airdrop_tool_vanh)
115 |
--------------------------------------------------------------------------------
/bot/utils/launcher.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import glob
4 | import asyncio
5 | import argparse
6 | import sys
7 | from itertools import cycle
8 | from urllib.parse import unquote
9 |
10 | from aiofile import AIOFile
11 | from pyrogram import Client
12 | from better_proxy import Proxy
13 |
14 | from bot.config import settings
15 | from bot.core.agents import generate_random_user_agent
16 | from bot.utils import logger
17 | from bot.core.tapper import run_tapper
18 | from bot.core.query import run_tapper_query
19 | from bot.core.registrator import register_sessions
20 |
21 | start_text = """
22 |
23 | ░██████╗███████╗███████╗██████╗░ ███╗░░░███╗██╗███╗░░██╗███████╗██████╗░
24 | ██╔════╝██╔════╝██╔════╝██╔══██╗ ████╗░████║██║████╗░██║██╔════╝██╔══██╗
25 | ╚█████╗░█████╗░░█████╗░░██║░░██║ ██╔████╔██║██║██╔██╗██║█████╗░░██████╔╝
26 | ░╚═══██╗██╔══╝░░██╔══╝░░██║░░██║ ██║╚██╔╝██║██║██║╚████║██╔══╝░░██╔══██╗
27 | ██████╔╝███████╗███████╗██████╔╝ ██║░╚═╝░██║██║██║░╚███║███████╗██║░░██║
28 | ╚═════╝░╚══════╝╚══════╝╚═════╝░ ╚═╝░░░░░╚═╝╚═╝╚═╝░░╚══╝╚══════╝╚═╝░░╚═╝
29 | BY VANHBAKA
30 |
31 | Select an action:
32 |
33 | 1. Run clicker (session)
34 | 2. Create session
35 | 3. Run clicker (query)
36 | """
37 |
38 | global tg_clients
39 |
40 |
41 | def get_session_names() -> list[str]:
42 | session_names = sorted(glob.glob("sessions/*.session"))
43 | session_names = [
44 | os.path.splitext(os.path.basename(file))[0] for file in session_names
45 | ]
46 |
47 | return session_names
48 |
49 |
50 | def get_proxies() -> list[Proxy]:
51 | if settings.USE_PROXY_FROM_FILE:
52 | with open(file="bot/config/proxies.txt", encoding="utf-8-sig") as file:
53 | proxies = [Proxy.from_str(proxy=row.strip()).as_url for row in file]
54 | else:
55 | proxies = []
56 |
57 | return proxies
58 |
59 |
60 | def fetch_username(query):
61 | try:
62 | fetch_data = unquote(query).split("user=")[1].split("&chat_instance=")[0]
63 | json_data = json.loads(fetch_data)
64 | return json_data['username']
65 | except:
66 | try:
67 | fetch_data = unquote(query).split("user=")[1].split("&auth_date=")[0]
68 | json_data = json.loads(fetch_data)
69 | return json_data['username']
70 | except:
71 | try:
72 | fetch_data = unquote(unquote(query)).split("user=")[1].split("&auth_date=")[0]
73 | json_data = json.loads(fetch_data)
74 | return json_data['username']
75 | except:
76 | logger.warning(f"Invaild query: {query}")
77 | return ""
78 |
79 | async def get_user_agent(session_name):
80 | async with AIOFile('user_agents.json', 'r') as file:
81 | content = await file.read()
82 | user_agents = json.loads(content)
83 |
84 | if session_name not in list(user_agents.keys()):
85 | logger.info(f"{session_name} | Doesn't have user agent, Creating...")
86 | ua = generate_random_user_agent(device_type='android', browser_type='chrome')
87 | user_agents.update({session_name: ua})
88 | async with AIOFile('user_agents.json', 'w') as file:
89 | content = json.dumps(user_agents, indent=4)
90 | await file.write(content)
91 | return ua
92 | else:
93 | logger.info(f"{session_name} | Loading user agent from cache...")
94 | return user_agents[session_name]
95 |
96 | def get_un_used_proxy(used_proxies: list[Proxy]):
97 | proxies = get_proxies()
98 | for proxy in proxies:
99 | if proxy not in used_proxies:
100 | return proxy
101 | return None
102 |
103 | async def get_proxy(session_name):
104 | if settings.USE_PROXY_FROM_FILE:
105 | async with AIOFile('proxy.json', 'r') as file:
106 | content = await file.read()
107 | proxies = json.loads(content)
108 |
109 | if session_name not in list(proxies.keys()):
110 | logger.info(f"{session_name} | Doesn't bind with any proxy, binding to a new proxy...")
111 | used_proxies = [proxy for proxy in proxies.values()]
112 | proxy = get_un_used_proxy(used_proxies)
113 | proxies.update({session_name: proxy})
114 | async with AIOFile('proxy.json', 'w') as file:
115 | content = json.dumps(proxies, indent=4)
116 | await file.write(content)
117 | return proxy
118 | else:
119 | logger.info(f"{session_name} | Loading proxy from cache...")
120 | return proxies[session_name]
121 | else:
122 | return None
123 |
124 |
125 | async def get_tg_clients() -> list[Client]:
126 | global tg_clients
127 |
128 | session_names = get_session_names()
129 |
130 | if not session_names:
131 | raise FileNotFoundError("Not found session files")
132 |
133 | if not settings.API_ID or not settings.API_HASH:
134 | raise ValueError("API_ID and API_HASH not found in the .env file.")
135 |
136 | tg_clients = [
137 | Client(
138 | name=session_name,
139 | api_id=settings.API_ID,
140 | api_hash=settings.API_HASH,
141 | workdir="sessions/",
142 | plugins=dict(root="bot/plugins"),
143 | )
144 | for session_name in session_names
145 | ]
146 |
147 | return tg_clients
148 |
149 |
150 | async def process() -> None:
151 | parser = argparse.ArgumentParser()
152 | parser.add_argument("-a", "--action", type=int, help="Action to perform")
153 |
154 | logger.info(f"Detected {len(get_session_names())} sessions | {len(get_proxies())} proxies")
155 |
156 | action = parser.parse_args().action
157 |
158 | if not os.path.exists("user_agents.json"):
159 | with open("user_agents.json", 'w') as file:
160 | file.write("{}")
161 | logger.info("User agents file created successfully")
162 |
163 | if not action:
164 | print(start_text)
165 |
166 | while True:
167 | action = input("> ")
168 |
169 | if not action.isdigit():
170 | logger.warning("Action must be number")
171 | elif action not in ["1", "2", "3"]:
172 | logger.warning("Action must be 1, 2 or 3")
173 | else:
174 | action = int(action)
175 | break
176 |
177 | if action == 2:
178 | await register_sessions()
179 | elif action == 1:
180 | tg_clients = await get_tg_clients()
181 |
182 | await run_tasks(tg_clients=tg_clients)
183 | elif action == 3:
184 | with open("data.txt", "r") as f:
185 | query_ids = [line.strip() for line in f.readlines()]
186 | # print(query_ids)
187 | await run_tapper_query(query_ids)
188 |
189 |
190 | async def run_tasks(tg_clients: list[Client]):
191 | tasks = [
192 | asyncio.create_task(
193 | run_tapper(
194 | tg_client=tg_client,
195 | proxy=await get_proxy(tg_client.name),
196 | ua=await get_user_agent(tg_client.name)
197 | )
198 | )
199 | for tg_client in tg_clients
200 | ]
201 |
202 | await asyncio.gather(*tasks)
203 |
--------------------------------------------------------------------------------
/bot/core/query.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | from datetime import datetime, timezone
4 | from itertools import cycle
5 | from urllib.parse import unquote
6 |
7 | import aiohttp
8 | import pytz
9 | import requests
10 | from aiocfscrape import CloudflareScraper
11 | from aiofile import AIOFile
12 | from aiohttp_proxy import ProxyConnector
13 | from better_proxy import Proxy
14 | from bot.core.agents import generate_random_user_agent, fetch_version
15 | from bot.config import settings
16 |
17 | from bot.utils import logger
18 | from bot.exceptions import InvalidSession
19 | from .headers import headers
20 |
21 | from random import randint, uniform
22 | import traceback
23 | import time
24 | from ..utils.ps import check_base_url
25 | from bot.utils import launcher as lc
26 |
27 | # api endpoint
28 | api_endpoint = "https://alb.seeddao.org/"
29 |
30 | # api endpoint
31 | api_claim = f'{api_endpoint}api/v1/seed/claim'
32 | api_balance = f'{api_endpoint}api/v1/profile/balance'
33 | api_checkin = f'{api_endpoint}api/v1/login-bonuses'
34 | api_upgrade_storage = f'{api_endpoint}api/v1/seed/storage-size/upgrade'
35 | api_upgrade_mining = f'{api_endpoint}api/v1/seed/mining-speed/upgrade'
36 | api_upgrade_holy = f'{api_endpoint}api/v1/upgrades/holy-water'
37 | api_profile = f'{api_endpoint}api/v1/profile'
38 | api_hunt_completed = f'{api_endpoint}api/v1/bird-hunt/complete'
39 | api_bird_info = f'{api_endpoint}api/v1/bird/is-leader'
40 | api_make_happy = f'{api_endpoint}api/v1/bird-happiness'
41 | api_get_worm_data = f'{api_endpoint}api/v1/worms/me-all'
42 | api_feed = f'{api_endpoint}api/v1/bird-feed'
43 | api_start_hunt = f'{api_endpoint}api/v1/bird-hunt/start'
44 | api_inv = f'{api_endpoint}api/v1/worms/me'
45 | api_sell = f'{api_endpoint}api/v1/market-item/add'
46 | new_user_api = f'{api_endpoint}api/v1/profile2'
47 | claim_gift_api = f"{api_endpoint}api/v1/gift-of-encounter"
48 |
49 |
50 | class Tapper:
51 | def __init__(self, Query: str):
52 | try:
53 | fetch_data = unquote(Query).split("user=")[1].split("&chat_instance=")[0]
54 | json_data = json.loads(fetch_data)
55 | self.session_name = json_data['username']
56 | except:
57 | try:
58 | fetch_data = unquote(Query).split("user=")[1].split("&auth_date=")[0]
59 | json_data = json.loads(fetch_data)
60 | self.session_name = json_data['username']
61 | except:
62 | try:
63 | fetch_data = unquote(unquote(Query)).split("user=")[1].split("&auth_date=")[0]
64 | json_data = json.loads(fetch_data)
65 | self.session_name = json_data['username']
66 | except:
67 | logger.warning(f"Invaild query: {Query}")
68 | self.session_name = ""
69 | self.first_name = ''
70 | self.last_name = ''
71 | self.user_id = ''
72 | self.Total_Point_Earned = 0
73 | self.Total_Game_Played = 0
74 | self.worm_lvl = {"common": 1,
75 | "uncommon": 2,
76 | "rare": 3,
77 | "epic": 4,
78 | "legendary": 5}
79 | self.auth = Query
80 | self.total_earned_from_sale = 0
81 | self.total_on_sale = 0
82 | self.worm_in_inv = {"common": 0, "uncommon": 0, "rare": 0, "epic": 0, "legendary": 0}
83 | self.worm_in_inv_copy = {"common": 0, "uncommon": 0, "rare": 0, "epic": 0, "legendary": 0}
84 | self.can_run = True
85 |
86 | async def get_user_agent(self):
87 | async with AIOFile('user_agents.json', 'r') as file:
88 | content = await file.read()
89 | user_agents = json.loads(content)
90 |
91 | if self.session_name not in list(user_agents.keys()):
92 | logger.info(f"{self.session_name} | Doesn't have user agent, Creating...")
93 | ua = generate_random_user_agent(device_type='android', browser_type='chrome')
94 | user_agents.update({self.session_name: ua})
95 | async with AIOFile('user_agents.json', 'w') as file:
96 | content = json.dumps(user_agents, indent=4)
97 | await file.write(content)
98 | return ua
99 | else:
100 | logger.info(f"{self.session_name} | Loading user agent from cache...")
101 | return user_agents[self.session_name]
102 |
103 | async def check_proxy(self, http_client: aiohttp.ClientSession, proxy: Proxy) -> None:
104 | try:
105 | response = await http_client.get(url='https://httpbin.org/ip', timeout=aiohttp.ClientTimeout(5))
106 | ip = (await response.json()).get('origin')
107 | logger.info(f"{self.session_name} | Proxy IP: {ip}")
108 | except Exception as error:
109 | logger.error(f"{self.session_name} | Proxy: {proxy} | Error: {error}")
110 |
111 | async def setup_profile(self, http_client: aiohttp.ClientSession) -> None:
112 | response = await http_client.post(url=api_profile)
113 | if response.status == 200:
114 | logger.info(f"{self.session_name} | Set up account successfully!")
115 |
116 |
117 | else:
118 | logger.warning(
119 | f"Can't get account data for session: {self.session_name}. response status: {response.status}")
120 |
121 | async def hatch_egg(self, http_client: aiohttp.ClientSession, egg_id):
122 | payload = {
123 | "egg_id": egg_id
124 | }
125 | res = await http_client.post(f'{api_endpoint}api/v1/egg-hatch/complete', json=payload)
126 | if res.status == 200:
127 | json_data = await res.json()
128 | logger.success(f"{self.session_name} | Sucessfully hatched {json_data['data']['type']}!")
129 |
130 | async def get_first_egg_and_hatch(self, http_client: aiohttp.ClientSession):
131 | res = await http_client.post(f'{api_endpoint}api/v1/give-first-egg')
132 | if res.status == 200:
133 | logger.success(f"{self.session_name} Successfully get first egg!")
134 | json_egg = await res.json()
135 | egg_id = str(json_egg['data']['id'])
136 | await self.hatch_egg(http_client, egg_id)
137 |
138 | async def fetch_profile(self, http_client: aiohttp.ClientSession) -> None:
139 | response = await http_client.get(url=api_profile)
140 | if response.status == 200:
141 | response_json = await response.json()
142 | self.user_id = response_json['data']['id']
143 | self.session_name = response_json['data']['name']
144 | logger.info(
145 | f"{self.session_name} | Got into seed app - Username: {response_json['data']['name']}")
146 | if response_json['data']['give_first_egg'] is False:
147 | await self.get_first_egg_and_hatch(http_client)
148 | upgrade_levels = {}
149 | for upgrade in response_json['data']['upgrades']:
150 | upgrade_type = upgrade['upgrade_type']
151 | upgrade_level = upgrade['upgrade_level']
152 | if upgrade_type in upgrade_levels:
153 | if upgrade_level > upgrade_levels[upgrade_type]:
154 | upgrade_levels[upgrade_type] = upgrade_level
155 | else:
156 | upgrade_levels[upgrade_type] = upgrade_level
157 | for upgrade_type, level in upgrade_levels.items():
158 | logger.info(f"{self.session_name} | {upgrade_type.capitalize()} Level: {level + 1}")
159 | else:
160 | logger.warning(
161 | f"Can't get account data for session: {self.session_name}. response status: {response.status}")
162 |
163 | async def upgrade_storage(self, http_client: aiohttp.ClientSession) -> None:
164 | response = await http_client.post(url=api_upgrade_storage)
165 | if response.status == 200:
166 | logger.success(f"{self.session_name} | Upgrade Storage Successfully")
167 |
168 | async def upgrade_mining(self, http_client: aiohttp.ClientSession) -> None:
169 | response = await http_client.post(url=api_upgrade_mining)
170 | if response.status == 200:
171 | logger.success(f"{self.session_name} | Upgrade Mining Successfully")
172 |
173 | async def upgrade_holy(self, http_client: aiohttp.ClientSession) -> None:
174 | response = await http_client.post(url=api_upgrade_holy)
175 | if response.status == 200:
176 | logger.success(f"{self.session_name} | Upgrade Holy Successfully")
177 |
178 | async def verify_balance(self, http_client: aiohttp.ClientSession):
179 | response = await http_client.get(url=api_balance)
180 | if response.status == 200:
181 | balance_info = await response.json()
182 | logger.info(f"{self.session_name} | Balance: {balance_info['data'] / 1000000000}")
183 | return True
184 | else:
185 | logger.error(f"{self.session_name} | Balance: Error | {response.status}")
186 |
187 | async def perform_daily_checkin(self, http_client: aiohttp.ClientSession):
188 | response = await http_client.post(api_checkin)
189 | if response.status == 200:
190 | checkin_data = await response.json()
191 | day = checkin_data.get('data', {}).get('no', '')
192 | logger.success(f"{self.session_name} | Successfully checked in | Day {day}")
193 | else:
194 | checkin_data = await response.json()
195 | if checkin_data.get('message') == 'already claimed for today':
196 | logger.info(f"{self.session_name} | Already checked in today")
197 | else:
198 | logger.info(f"{self.session_name} | Failed | {checkin_data}")
199 |
200 | async def fetch_worm_status(self, http_client: aiohttp.ClientSession):
201 | response = await http_client.get(f'{api_endpoint}api/v1/worms')
202 | if response.status == 200:
203 | worm_info = await response.json()
204 | next_refresh = worm_info['data'].get('next_worm')
205 | worm_caught = worm_info['data'].get('is_caught', False)
206 | if next_refresh:
207 | next_refresh_dt = datetime.fromisoformat(next_refresh[:-1] + '+00:00')
208 | now_utc = datetime.now(pytz.utc)
209 | time_difference_seconds = (next_refresh_dt - now_utc).total_seconds()
210 | hours = int(time_difference_seconds // 3600)
211 | minutes = int((time_difference_seconds % 3600) // 60)
212 | logger.info(
213 | f"{self.session_name} | Next Worm in {hours} hours {minutes} minutes - Status: {'Caught' if worm_caught else 'Available'}")
214 | else:
215 | logger.info(f"{self.session_name} | 'next_worm' data not available.")
216 | return worm_info['data']
217 | else:
218 | logger.error(f"{self.session_name} | Error retrieving worm data.")
219 | return None
220 |
221 | async def capture_worm(self, http_client: aiohttp.ClientSession):
222 | worm_info = await self.fetch_worm_status(http_client)
223 | if worm_info and not worm_info.get('is_caught', True):
224 | response = await http_client.post(f'{api_endpoint}api/v1/worms/catch')
225 | if response.status == 200:
226 | logger.success(f"{self.session_name} | Worm Captured Successfully")
227 | elif response.status == 400:
228 | logger.info(f"{self.session_name} | Already captured")
229 | elif response.status == 404:
230 | logger.info(f"{self.session_name} | Worm not found")
231 | else:
232 | logger.error(f"{self.session_name} | Capture failed, status code: {response.status}")
233 | else:
234 | logger.info(f"{self.session_name} | Worm unavailable or already captured.")
235 |
236 | async def fetch_tasks(self, http_client: aiohttp.ClientSession):
237 | response = await http_client.get(f'{api_endpoint}api/v1/tasks/progresses')
238 | tasks = await response.json()
239 | for task in tasks['data']:
240 | if task['task_user'] is None:
241 | await self.mark_task_complete(task['id'], task['name'], task['type'], http_client)
242 | elif task['task_user']['completed'] is False:
243 | await self.mark_task_complete(task['id'], task['name'], task['type'], http_client)
244 |
245 | async def mark_task_complete(self, task_id, task_name, type, http_client: aiohttp.ClientSession):
246 | if type == "academy":
247 | ans = requests.get("https://raw.githubusercontent.com/vanhbakaa/nothing/refs/heads/main/seed_ans.json")
248 | academy_ans = ans.json()
249 | if task_name not in list(academy_ans.keys()):
250 | logger.info(f"{self.session_name} | Answer for {task_name} not available yet!")
251 | return
252 | payload = {
253 | "answer": academy_ans[task_name]
254 | }
255 | response = await http_client.post(f'{api_endpoint}api/v1/tasks/{task_id}', json=payload)
256 | if response.status == 200:
257 | logger.success(f"{self.session_name} | Task {task_name} marked complete.")
258 | else:
259 | logger.error(
260 | f"{self.session_name} | Failed to complete task {task_name}, status code: {response.status}")
261 | else:
262 | response = await http_client.post(f'{api_endpoint}api/v1/tasks/{task_id}')
263 | if response.status == 200:
264 | logger.success(f"{self.session_name} | Task {task_name} marked complete.")
265 | else:
266 | logger.error(f"{self.session_name} | Failed to complete task {task_name}, status code: {response.status}")
267 |
268 | async def claim_hunt_reward(self, bird_id, http_client: aiohttp.ClientSession):
269 | payload = {
270 | "bird_id": bird_id
271 | }
272 | response = await http_client.post(api_hunt_completed, json=payload)
273 | if response.status == 200:
274 | response_data = await response.json()
275 | logger.success(
276 | f"{self.session_name} | Successfully claimed {response_data['data']['seed_amount'] / (10 ** 9)} seed from hunt reward.")
277 | else:
278 | response_data = await response.json()
279 | print(response_data)
280 | logger.error(f"{self.session_name} | Failed to claim hunt reward, status code: {response.status}")
281 |
282 | async def get_bird_info(self, http_client: aiohttp.ClientSession):
283 | response = await http_client.get(api_bird_info)
284 | if response.status == 200:
285 | response_data = await response.json()
286 | return response_data['data']
287 | else:
288 | response_data = await response.json()
289 | logger.info(f"{self.session_name} | Get bird data failed: {response_data}")
290 | return None
291 |
292 | async def make_bird_happy(self, bird_id, http_client: aiohttp.ClientSession):
293 | payload = {
294 | "bird_id": bird_id,
295 | "happiness_rate": 10000
296 | }
297 | response = await http_client.post(api_make_happy, json=payload)
298 | if response.status == 200:
299 | return True
300 | else:
301 | return False
302 |
303 | async def get_worm_data(self, http_client: aiohttp.ClientSession):
304 | response = await http_client.get(api_get_worm_data)
305 | if response.status == 200:
306 | response_data = await response.json()
307 | # print(response_data)
308 | return response_data['data']
309 | else:
310 | return None
311 |
312 | async def feed_bird(self, bird_id, worm_id, http_client: aiohttp.ClientSession):
313 | payload = {
314 | "bird_id": bird_id,
315 | "worm_ids": worm_id
316 | }
317 | response = await http_client.post(api_feed, json=payload)
318 | if response.status == 200:
319 | logger.success(f"{self.session_name} | Feed bird successfully")
320 | else:
321 | response_data = await response.json()
322 | print(response_data)
323 | logger.info(f"{self.session_name} | Failed to feed bird, response code:{response.status}")
324 | return None
325 |
326 | async def start_hunt(self, bird_id, http_client: aiohttp.ClientSession):
327 | payload = {
328 | "bird_id": bird_id,
329 | "task_level": 0
330 | }
331 | response = await http_client.post(api_start_hunt, json=payload)
332 | if response.status == 200:
333 | logger.success(f"{self.session_name} | Successfully start hunting")
334 | else:
335 | print(await response.json())
336 | logger.error(f"{self.session_name} | Start hunting failed..., response code: {response.status}")
337 |
338 | async def get_worms(self, http_client: aiohttp.ClientSession):
339 | worms = []
340 | first_page = await http_client.get(api_inv + "?page=1")
341 | json_page = await first_page.json()
342 |
343 | for worm in json_page['data']['items']:
344 | worms.append(worm)
345 | if worm['on_market'] is False:
346 | self.worm_in_inv[worm['type']] += 1
347 | count = 0
348 | if json_page['data']['total'] % json_page['data']['page_size'] != 0:
349 | count = 1
350 | total_page = int(float(json_page['data']['total'] / json_page['data']['page_size'])) + count
351 | for page in range(2, total_page + 1):
352 | api_url = api_inv + f"?page={page}"
353 | page_data = await http_client.get(api_url)
354 | json_page = await page_data.json()
355 | for worm in json_page['data']['items']:
356 | worms.append(worm)
357 | if worm['on_market'] is False:
358 | self.worm_in_inv[worm['type']] += 1
359 | time.sleep(uniform(1, 2))
360 | return worms
361 |
362 | async def sell_worm(self, worm_id, price, worm_type, http_client: aiohttp.ClientSession):
363 | payload = {
364 | "price": int(price),
365 | "worm_id": worm_id
366 | }
367 | response = await http_client.post(api_sell, json=payload)
368 | if response.status == 200:
369 | self.total_on_sale += 1
370 | logger.success(
371 | f"{self.session_name} | Sell {worm_type} worm successfully, price: {price / 1000000000}")
372 | else:
373 | response_data = await response.json()
374 | print(response_data)
375 | logger.info(f"{self.session_name} | Failed to sell {worm_type} worm, response code:{response.status}")
376 | return None
377 |
378 | async def get_price(self, worm_type, http_client: aiohttp.ClientSession):
379 | api = f'{api_endpoint}v1/market/v2?market_type=worm&worm_type={worm_type}&sort_by_price=ASC&sort_by_updated_at=&page=1'
380 | response = await http_client.get(api)
381 | if response.status == 200:
382 | json_r = await response.json()
383 | return json_r['data']['items'][0]['price_gross']
384 | else:
385 | return 0
386 |
387 | async def get_sale_data(self, http_client: aiohttp.ClientSession):
388 | api = f'{api_endpoint}api/v1/history-log-market/me?market_type=worm&page=1&history_type=sell'
389 | response = await http_client.get(api)
390 | json_data = await response.json()
391 | worm_on_sale = {"common": 0, "uncommon": 0, "rare": 0, "epic": 0, "legendary": 0}
392 | for worm in json_data['data']['items']:
393 | if worm['status'] == "on-sale":
394 | worm_on_sale[worm['worm_type']] += 1
395 | elif worm['status'] == "bought":
396 | self.total_earned_from_sale += worm['price_net'] / 1000000000
397 | count = 0
398 | if json_data['data']['total'] % json_data['data']['page_size'] != 0:
399 | count = 1
400 | total_page = int(float(json_data['data']['total'] / json_data['data']['page_size'])) + count
401 | for page in range(2, total_page + 1):
402 | response = await http_client.get(
403 | f"{api_endpoint}api/v1/history-log-market/me?market_type=worm&page={page}&history_type=sell",
404 | headers=headers)
405 | json_data = await response.json()
406 | for worm in json_data['data']['items']:
407 | if worm['status'] == "on-sale":
408 | worm_on_sale[worm['worm_type']] += 1
409 | elif worm['status'] == "bought":
410 | self.total_earned_from_sale += worm['price_net'] / 1000000000
411 |
412 | return worm_on_sale
413 |
414 | async def check_new_user(self, http_client: aiohttp.ClientSession):
415 | response = await http_client.get(new_user_api)
416 | if response.status == 200:
417 | data_ = await response.json()
418 | # print(data_)
419 | return data_['data']['bonus_claimed']
420 |
421 | def refresh_data(self):
422 | self.total_earned_from_sale = 0
423 | self.worm_in_inv = self.worm_in_inv_copy
424 |
425 | async def get_streak_rewards(self, http_client: aiohttp.ClientSession):
426 | res = await http_client.get(f"{api_endpoint}api/v1/streak-reward")
427 | if res.status == 200:
428 | data_ = await res.json()
429 | return data_['data']
430 | else:
431 | logger.warning(f"{self.session_name} | Failed to get streak rewards")
432 | return None
433 |
434 | async def claim_streak_rewards(self, http_client: aiohttp.ClientSession):
435 | rewards = await self.get_streak_rewards(http_client)
436 | pl_rewards = []
437 | if rewards is None:
438 | return
439 | if len(rewards) == 0:
440 | logger.info(f"{self.session_name} | No ticket to claim.")
441 | return
442 | for reward in rewards:
443 | pl_rewards.append(reward['id'])
444 |
445 | payload = {
446 | "streak_reward_ids": pl_rewards
447 | }
448 | claim = await http_client.post(f"{api_endpoint}api/v1/streak-reward", json=payload)
449 | if claim.status == 200:
450 | logger.success(f"{self.session_name} | Successfully claim tickets!")
451 | else:
452 | logger.warning(f"{self.session_name} | Failed to claim ticket!")
453 |
454 | async def get_tickets(self, http_client: aiohttp.ClientSession):
455 | res = await http_client.get(f"{api_endpoint}api/v1/spin-ticket")
456 | if res.status == 200:
457 | data = await res.json()
458 | return data['data']
459 | return None
460 |
461 | async def get_egg_pieces(self, http_client: aiohttp.ClientSession):
462 | res = await http_client.get(f"{api_endpoint}api/v1/egg-piece")
463 | if res.status == 200:
464 | data = await res.json()
465 | return data['data']
466 | return None
467 |
468 | async def get_fusion_fee(self, type, http_client: aiohttp.ClientSession):
469 | res = await http_client.get(f"{api_endpoint}api/v1/fusion-seed-fee?type={type}")
470 | if res.status == 200:
471 | data = await res.json()
472 | return data['data']
473 | return None
474 |
475 | async def spin(self, ticketId, http_client: aiohttp.ClientSession):
476 | payload = {
477 | "ticket_id": ticketId
478 | }
479 |
480 | res = await http_client.post(f"{api_endpoint}api/v1/spin-reward", json=payload)
481 | if res.status == 200:
482 | data = await res.json()
483 | logger.success(f"{self.session_name} | Spinned successfully - Got {data['data']['type']} egg pieces!")
484 | else:
485 | return
486 |
487 | async def fusion(self, egg_ids, type, http_client: aiohttp.ClientSession):
488 | payload = {
489 | "egg_piece_ids": egg_ids
490 | }
491 |
492 | res = await http_client.post(f"{api_endpoint}api/v1/egg-piece-merge", json=payload)
493 | if res.status == 200:
494 | logger.success(f"{self.session_name} | Successfully fusion a {type} egg!")
495 | else:
496 | return
497 | async def play_game(self, http_client: aiohttp.ClientSession):
498 | egg_type = {
499 | "common": 0,
500 | "uncommon": 0,
501 | "rare": 0,
502 | "epic": 0,
503 | "legendary": 0
504 | }
505 | egg_pieces = await self.get_egg_pieces(http_client)
506 | if egg_pieces is None:
507 | return
508 | for piece in egg_pieces:
509 | egg_type[piece['type']] += 1
510 |
511 | info_ = f"""
512 | Common pieces: {egg_type['common']}
513 | Uncommon pieces: {egg_type['uncommon']}
514 | rare pieces: {egg_type['rare']}
515 | epic pieces: {egg_type['epic']}
516 | legendary pieces: {egg_type['legendary']}
517 | """
518 |
519 | logger.info(f"{self.session_name} Egg pieces: \n{info_}")
520 |
521 | tickets = await self.get_tickets(http_client)
522 | if tickets is None:
523 | return
524 |
525 | logger.info(f"{self.session_name} | Total ticket: {len(tickets)}")
526 |
527 | play = randint(settings.SPIN_PER_ROUND[0], settings.SPIN_PER_ROUND[1])
528 |
529 | for ticket in tickets:
530 | if play == 0:
531 | break
532 | play -= 1
533 | await self.spin(ticket['id'], http_client)
534 | await self.get_tickets(http_client)
535 | await self.get_egg_pieces(http_client)
536 | await asyncio.sleep(randint(2,5))
537 |
538 | if settings.AUTO_FUSION:
539 | # print("stary")
540 | egg_type = {
541 | "common": 0,
542 | "uncommon": 0,
543 | "rare": 0,
544 | "epic": 0,
545 | "legendary": 0
546 | }
547 | egg_pieces = await self.get_egg_pieces(http_client)
548 | if egg_pieces is None:
549 | return
550 | for piece in egg_pieces:
551 | egg_type[piece['type']] += 1
552 |
553 | if egg_type['common'] >= 5:
554 | fusion_fee = await self.get_fusion_fee('common', http_client)
555 | # print(fusion_fee)
556 | if fusion_fee is None:
557 | return
558 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_COMMON:
559 | pl_data = []
560 | for piece in egg_pieces:
561 | if len(pl_data) >= 5:
562 | break
563 | if piece['type'] == 'common':
564 | pl_data.append(piece['id'])
565 |
566 | await self.fusion(pl_data, 'common', http_client)
567 |
568 | if egg_type['uncommon'] >= 5:
569 | fusion_fee = await self.get_fusion_fee('uncommon', http_client)
570 | if fusion_fee is None:
571 | return
572 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_UNCOMMON:
573 | pl_data = []
574 | for piece in egg_pieces:
575 | if len(pl_data) >= 5:
576 | break
577 | if piece['type'] == 'uncommon':
578 | pl_data.append(piece['id'])
579 |
580 | await self.fusion(pl_data, 'uncommon', http_client)
581 |
582 | if egg_type['rare'] >= 5:
583 | fusion_fee = await self.get_fusion_fee('rare', http_client)
584 | if fusion_fee is None:
585 | return
586 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_RARE:
587 | pl_data = []
588 | for piece in egg_pieces:
589 | if len(pl_data) >= 5:
590 | break
591 | if piece['type'] == 'rare':
592 | pl_data.append(piece['id'])
593 |
594 | await self.fusion(pl_data, 'rare', http_client)
595 |
596 | if egg_type['epic'] >= 5:
597 | fusion_fee = await self.get_fusion_fee('epic', http_client)
598 | if fusion_fee is None:
599 | return
600 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_EPIC:
601 | pl_data = []
602 | for piece in egg_pieces:
603 | if len(pl_data) >= 5:
604 | break
605 | if piece['type'] == 'epic':
606 | pl_data.append(piece['id'])
607 |
608 | await self.fusion(pl_data, 'epic', http_client)
609 |
610 | if egg_type['legendary'] >= 5:
611 | fusion_fee = await self.get_fusion_fee('legendary', http_client)
612 | if fusion_fee is None:
613 | return
614 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_LEGENDARY:
615 | pl_data = []
616 | for piece in egg_pieces:
617 | if len(pl_data) >= 5:
618 | break
619 | if piece['type'] == 'legendary':
620 | pl_data.append(piece['id'])
621 |
622 | await self.fusion(pl_data, 'legendary', http_client)
623 |
624 | async def claim_gift(self, http_client: aiohttp.ClientSession):
625 | gift = await http_client.get(claim_gift_api)
626 | gift_ = await gift.json()
627 | start_time = gift_['data']['next_claim_from']
628 | end_time = gift_['data']['next_claim_to']
629 |
630 | next_claim_from_dt = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
631 | next_claim_to_dt = datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
632 | now = datetime.now(timezone.utc)
633 | is_within_range = next_claim_from_dt <= now <= next_claim_to_dt
634 |
635 | if is_within_range:
636 | res = await http_client.post(claim_gift_api)
637 | if res.status == 200:
638 | logger.success(f"{self.session_name} | Christmas gift claimed successfully!")
639 | else:
640 | logger.info(f"{self.session_name} | Failed to claim gift: {res.text}")
641 |
642 | else:
643 | logger.info(f"{self.session_name} | Christmas gift already claimed!")
644 | return
645 |
646 | async def run(self, proxy: str | None) -> None:
647 | access_token_created_time = 0
648 | proxy_conn = ProxyConnector().from_url(proxy) if proxy else None
649 |
650 | headers["user-agent"] = await self.get_user_agent()
651 | chrome_ver = fetch_version(headers['user-agent'])
652 | headers['sec-ch-ua'] = f'"Chromium";v="{chrome_ver}", "Android WebView";v="{chrome_ver}", "Not.A/Brand";v="99"'
653 | http_client = CloudflareScraper(headers=headers, connector=proxy_conn)
654 |
655 | if proxy:
656 | await self.check_proxy(http_client=http_client, proxy=proxy)
657 |
658 | token_live_time = randint(3500, 3600)
659 | while True:
660 | try:
661 | if check_base_url() is False:
662 | self.can_run = False
663 | if settings.ADVANCED_ANTI_DETECTION:
664 | logger.warning(
665 | "Detected index js file change. Contact me to check if it's safe to continue: https://t.me/vanhbakaaa")
666 | else:
667 | logger.warning(
668 | "Detected api change! Stopped the bot for safety. Contact me here to update the bot: https://t.me/vanhbakaaa")
669 | else:
670 | self.can_run = True
671 |
672 | if self.can_run:
673 | if time.time() - access_token_created_time >= token_live_time:
674 | # logger.info(f"{self.session_name} | Update auth token...")
675 | tg_web_data = self.auth
676 | headers['telegram-data'] = tg_web_data
677 | # print(tg_web_data)
678 | http_client.headers["telegram-data"] = tg_web_data
679 | access_token_created_time = time.time()
680 | token_live_time = randint(3500, 3600)
681 | await asyncio.sleep(delay=randint(10, 15))
682 |
683 | not_new_user = await self.check_new_user(http_client)
684 |
685 | if not_new_user is False:
686 | logger.info(f"{self.session_name} | Setting up new account...")
687 | await self.setup_profile(http_client)
688 |
689 | await self.fetch_profile(http_client)
690 |
691 | await self.claim_gift(http_client)
692 |
693 | if settings.AUTO_START_HUNT:
694 | bird_data = await self.get_bird_info(http_client)
695 | # print(bird_data)
696 | if bird_data is None:
697 | logger.info(f"{self.session_name} | Can't get bird data...")
698 | elif bird_data['owner_id'] != self.user_id:
699 | logger.warning(f"{self.session_name} | Bird is not your: {bird_data}")
700 | elif bird_data['status'] == "hunting":
701 |
702 | try:
703 | given_time = datetime.fromisoformat(bird_data['hunt_end_at'])
704 | timestamp_naive = given_time.replace(tzinfo=None)
705 | except:
706 | import dateutil.parser
707 | timestamp_naive = dateutil.parser.isoparse(bird_data['hunt_end_at'])
708 | now = datetime.now(timezone.utc)
709 |
710 | # If the parsed timestamp is naive, make it aware in UTC
711 | if timestamp_naive.tzinfo is None:
712 | timestamp_naive = timestamp_naive.replace(tzinfo=timezone.utc)
713 |
714 | if now < timestamp_naive:
715 | logger.info(f"{self.session_name} | Bird currently hunting...")
716 | else:
717 | logger.info(f"{self.session_name} | Hunt completed, claiming reward...")
718 | await self.claim_hunt_reward(bird_data['id'], http_client)
719 | else:
720 | condition = True
721 | if bird_data['happiness_level'] == 0:
722 | logger.info(f"{self.session_name} | Bird is not happy, attemping to make bird happy...")
723 | check = await self.make_bird_happy(bird_data['id'], http_client)
724 | if check:
725 | logger.success(f"{self.session_name} | Successfully make bird happy!")
726 | else:
727 | logger.info(f"{self.session_name} |Failed to make bird happy!")
728 | condition = False
729 | if bird_data['energy_level'] == 0:
730 | logger.info(f"{self.session_name} | Bird is hungry, attemping to feed bird...")
731 | worms = await self.get_worm_data(http_client)
732 | if worms is None:
733 | condition = False
734 | logger.info(f"{self.session_name} | Failed to fetch worm data")
735 | elif len(worms) == 0:
736 | logger.warning(f"{self.session_name} | You dont have any worm to feed bird!")
737 | condition = False
738 | else:
739 | try:
740 | energy = (bird_data['energy_max'] - bird_data['energy_level']) / 1000000000
741 | except:
742 | print(bird_data)
743 | energy = 2
744 | wormss = []
745 | for worm in worms:
746 | if worm['type'] == "common" and worm['on_market'] is False:
747 | wormss.append(worm['id'])
748 | energy -= 2
749 | if energy <= 1:
750 | break
751 | if energy > 1:
752 | for worm in worms:
753 | if worm['type'] == "uncommon" and worm['on_market'] is False:
754 | wormss.append(worm['id'])
755 | energy -= 4
756 | if energy <= 1:
757 | break
758 | await self.feed_bird(bird_data['id'], wormss, http_client)
759 | if energy > 1:
760 | condition = False
761 |
762 | if condition:
763 | await self.start_hunt(bird_data['id'], http_client)
764 |
765 | if settings.AUTO_UPGRADE_STORAGE:
766 | await self.upgrade_storage(http_client)
767 | await asyncio.sleep(1)
768 | if settings.AUTO_UPGRADE_MINING:
769 | await self.upgrade_mining(http_client)
770 | await asyncio.sleep(1)
771 | if settings.AUTO_UPGRADE_HOLY:
772 | await self.upgrade_holy(http_client)
773 | await asyncio.sleep(1)
774 |
775 | check_balance = await self.verify_balance(http_client)
776 | if check_balance:
777 | response = await http_client.post(api_claim)
778 | if response.status == 200:
779 | logger.success(f"{self.session_name} | Claim successful ")
780 | elif response.status == 400:
781 | logger.info(f"{self.session_name} | Not yet time to claim")
782 | else:
783 | logger.error(
784 | f"{self.session_name} | An error occurred, status code: {response.status}")
785 |
786 | await self.perform_daily_checkin(http_client)
787 | await self.capture_worm(http_client)
788 | if settings.AUTO_SELL_WORMS:
789 | logger.info(f"{self.session_name} | Fetching worms data to put it on sale...")
790 | worms = await self.get_worms(http_client)
791 | # print(self.worm_in_inv)
792 | worms_on_sell = await self.get_sale_data(http_client)
793 | logger.info(f"{self.session_name} | Worms on sale now: ")
794 | for worm in worms_on_sell:
795 | logger.info(
796 | f"{self.session_name} | Total {worm} on sale: {worms_on_sell[worm]}")
797 | logger.info(
798 | f"{self.session_name} | Total earned from sale: {self.total_earned_from_sale}")
799 | for worm in worms:
800 | if worm['on_market']:
801 | continue
802 | elif settings.QUANTITY_TO_KEEP[worm['type']]['quantity_to_keep'] == -1:
803 | continue
804 | elif settings.QUANTITY_TO_KEEP[worm['type']]['quantity_to_keep'] < self.worm_in_inv[
805 | worm['type']]:
806 | if settings.QUANTITY_TO_KEEP[worm['type']]['sale_price'] == 0:
807 | price_to_sell = await self.get_price(worm['type'], http_client)
808 |
809 | else:
810 | price_to_sell = settings.QUANTITY_TO_KEEP[worm['type']]['sale_price'] * (10 ** 9)
811 | # print(f"Sell {worm['type']} , price: {price_to_sell/1000000000}")
812 | await self.sell_worm(worm['id'], price_to_sell, worm['type'], http_client)
813 | self.worm_in_inv[worm['type']] -= 1
814 |
815 | self.refresh_data()
816 | if settings.AUTO_CLEAR_TASKS:
817 | await self.fetch_tasks(http_client)
818 |
819 | if settings.AUTO_SPIN:
820 | await self.claim_streak_rewards(http_client)
821 | await asyncio.sleep(randint(1,4))
822 | await self.play_game(http_client)
823 |
824 | await http_client.close()
825 |
826 | except InvalidSession as error:
827 | raise error
828 |
829 | except Exception as error:
830 | traceback.print_exc()
831 | logger.error(f"{self.session_name} | Unknown error: {error}")
832 | await asyncio.sleep(delay=randint(60, 120))
833 |
834 |
835 | async def run_tapper_query(query_list: list[str]):
836 | while 1:
837 | # await asyncio.sleep(500)
838 | print(len(query_list))
839 | for query in query_list:
840 | await Tapper(Query=query).run(proxy=await lc.get_proxy(lc.fetch_username(query)))
841 | await asyncio.sleep(randint(5,15))
842 | sleep_ = randint(2500, 3600)
843 | logger.info(f"Sleep {sleep_}s...")
844 | await asyncio.sleep(sleep_)
845 |
--------------------------------------------------------------------------------
/bot/core/tapper.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import base64
3 | import json
4 | import random
5 | from datetime import datetime, timezone
6 | from urllib.parse import unquote
7 |
8 | import aiohttp
9 | import pytz
10 | import requests
11 | from aiocfscrape import CloudflareScraper
12 | from aiofile import AIOFile
13 | from aiohttp_proxy import ProxyConnector
14 | from better_proxy import Proxy
15 | from pyrogram import Client
16 | from pyrogram.errors import Unauthorized, UserDeactivated, AuthKeyUnregistered, FloodWait
17 | from pyrogram.raw.types import InputBotAppShortName
18 | from pyrogram.raw.functions.messages import RequestAppWebView
19 | from bot.core.agents import generate_random_user_agent, fetch_version
20 | from bot.config import settings
21 |
22 | from bot.utils import logger
23 | from bot.exceptions import InvalidSession
24 | from .headers import headers
25 |
26 | from random import randint, uniform
27 | import traceback
28 | import time
29 | from bot.utils.ps import check_base_url
30 |
31 | api_endpoint = "https://alb.seeddao.org/"
32 |
33 | # api endpoint
34 | api_claim = f'{api_endpoint}api/v1/seed/claim'
35 | api_balance = f'{api_endpoint}api/v1/profile/balance'
36 | api_checkin = f'{api_endpoint}api/v1/login-bonuses'
37 | api_upgrade_storage = f'{api_endpoint}api/v1/seed/storage-size/upgrade'
38 | api_upgrade_mining = f'{api_endpoint}api/v1/seed/mining-speed/upgrade'
39 | api_upgrade_holy = f'{api_endpoint}api/v1/upgrades/holy-water'
40 | api_profile = f'{api_endpoint}api/v1/profile'
41 | api_hunt_completed = f'{api_endpoint}api/v1/bird-hunt/complete'
42 | api_bird_info = f'{api_endpoint}api/v1/bird/is-leader'
43 | api_make_happy = f'{api_endpoint}api/v1/bird-happiness'
44 | api_get_worm_data = f'{api_endpoint}api/v1/worms/me-all'
45 | api_feed = f'{api_endpoint}api/v1/bird-feed'
46 | api_start_hunt = f'{api_endpoint}api/v1/bird-hunt/start'
47 | api_inv = f'{api_endpoint}api/v1/worms/me'
48 | api_sell = f'{api_endpoint}api/v1/market-item/add'
49 | new_user_api = f'{api_endpoint}api/v1/profile2'
50 | claim_gift_api = f"{api_endpoint}api/v1/gift-of-encounter"
51 |
52 |
53 | class Tapper:
54 | def __init__(self, tg_client: Client):
55 | self.tg_client = tg_client
56 | self.session_name = tg_client.name
57 | self.first_name = ''
58 | self.last_name = ''
59 | self.user_id = ''
60 | self.Total_Point_Earned = 0
61 | self.Total_Game_Played = 0
62 | self.worm_lvl = {"common": 1,
63 | "uncommon": 2,
64 | "rare": 3,
65 | "epic": 4,
66 | "legendary": 5}
67 | self.total_earned_from_sale = 0
68 | self.total_on_sale = 0
69 | self.my_ref = get_()
70 | self.worm_in_inv = {"common": 0, "uncommon": 0, "rare": 0, "epic": 0, "legendary": 0}
71 | self.worm_in_inv_copy = {"common": 0, "uncommon": 0, "rare": 0, "epic": 0, "legendary": 0}
72 | self.can_run = True
73 |
74 | async def get_tg_web_data(self, proxy: str | None) -> str:
75 | # logger.info(f"Getting data for {self.session_name}")
76 | if settings.REF_LINK == '':
77 | ref_ = f"a={get_()}"
78 | else:
79 | ref_ = settings.REF_LINK
80 | try:
81 | ref__ = ref_.split('=')[1]
82 | except:
83 | ref__ = get_()
84 | actual = random.choices([self.my_ref, ref__], weights=[30, 70]) # edit this line if you don't want to support me!
85 | if proxy:
86 | proxy = Proxy.from_str(proxy)
87 | proxy_dict = dict(
88 | scheme=proxy.protocol,
89 | hostname=proxy.host,
90 | port=proxy.port,
91 | username=proxy.login,
92 | password=proxy.password
93 | )
94 | else:
95 | proxy_dict = None
96 |
97 | self.tg_client.proxy = proxy_dict
98 |
99 | try:
100 | if not self.tg_client.is_connected:
101 | try:
102 | await self.tg_client.connect()
103 |
104 | except (Unauthorized, UserDeactivated, AuthKeyUnregistered):
105 | raise InvalidSession(self.session_name)
106 |
107 | while True:
108 | try:
109 | peer = await self.tg_client.resolve_peer('seed_coin_bot')
110 | break
111 | except FloodWait as fl:
112 | fls = fl.value
113 |
114 | logger.warning(f"{self.session_name} | FloodWait {fl}")
115 | logger.info(f"{self.session_name} | Sleep {fls}s")
116 |
117 | await asyncio.sleep(fls + 3)
118 |
119 | web_view = await self.tg_client.invoke(RequestAppWebView(
120 | peer=peer,
121 | app=InputBotAppShortName(bot_id=peer, short_name="app"),
122 | platform='android',
123 | write_allowed=True,
124 | start_param=actual[0]
125 | ))
126 |
127 | auth_url = web_view.url
128 | # print(auth_url)
129 | tg_web_data = unquote(string=auth_url.split('tgWebAppData=')[1].split('&tgWebAppVersion')[0])
130 |
131 | if self.tg_client.is_connected:
132 | await self.tg_client.disconnect()
133 |
134 | return tg_web_data
135 |
136 | except InvalidSession as error:
137 | raise error
138 |
139 | except Exception as error:
140 | logger.error(f"{self.session_name} | Unknown error during Authorization: "
141 | f"{error}")
142 | await asyncio.sleep(delay=3)
143 |
144 | async def check_proxy(self, http_client: aiohttp.ClientSession, proxy: Proxy) -> None:
145 | try:
146 | response = await http_client.get(url='https://httpbin.org/ip', timeout=aiohttp.ClientTimeout(5))
147 | ip = (await response.json()).get('origin')
148 | logger.info(f"{self.session_name} | Proxy IP: {ip}")
149 | except Exception as error:
150 | logger.error(f"{self.session_name} | Proxy: {proxy} | Error: {error}")
151 |
152 | async def setup_profile(self, http_client: aiohttp.ClientSession) -> None:
153 | response = await http_client.post(url=api_profile)
154 | if response.status == 200:
155 | logger.info(f"{self.session_name} | Set up account successfully!")
156 |
157 |
158 | else:
159 | logger.warning(
160 | f"Can't get account data for session: {self.session_name}. response status: {response.status}")
161 |
162 | async def hatch_egg(self, http_client: aiohttp.ClientSession, egg_id):
163 | payload = {
164 | "egg_id": egg_id
165 | }
166 | res = await http_client.post(f'{api_endpoint}api/v1/egg-hatch/complete', json=payload)
167 | if res.status == 200:
168 | json_data = await res.json()
169 | logger.success(f"{self.session_name} | Sucessfully hatched {json_data['data']['type']}!")
170 |
171 | async def get_first_egg_and_hatch(self, http_client: aiohttp.ClientSession):
172 | res = await http_client.post(f'{api_endpoint}api/v1/give-first-egg')
173 | if res.status == 200:
174 | logger.success(f"{self.session_name} Successfully get first egg!")
175 | json_egg = await res.json()
176 | egg_id = str(json_egg['data']['id'])
177 | await self.hatch_egg(http_client, egg_id)
178 |
179 | async def fetch_profile(self, http_client: aiohttp.ClientSession) -> None:
180 | response = await http_client.get(url=api_profile)
181 | if response.status == 200:
182 | response_json = await response.json()
183 | self.user_id = response_json['data']['id']
184 | self.session_name = response_json['data']['name']
185 | logger.info(
186 | f"{self.session_name} | Got into seed app - Username: {response_json['data']['name']}")
187 | if response_json['data']['give_first_egg'] is False:
188 | await self.get_first_egg_and_hatch(http_client)
189 | upgrade_levels = {}
190 | for upgrade in response_json['data']['upgrades']:
191 | upgrade_type = upgrade['upgrade_type']
192 | upgrade_level = upgrade['upgrade_level']
193 | if upgrade_type in upgrade_levels:
194 | if upgrade_level > upgrade_levels[upgrade_type]:
195 | upgrade_levels[upgrade_type] = upgrade_level
196 | else:
197 | upgrade_levels[upgrade_type] = upgrade_level
198 | for upgrade_type, level in upgrade_levels.items():
199 | logger.info(f"{self.session_name} | {upgrade_type.capitalize()} Level: {level + 1}")
200 | else:
201 | logger.warning(
202 | f"Can't get account data for session: {self.session_name}. response status: {response.status}")
203 |
204 | async def upgrade_storage(self, http_client: aiohttp.ClientSession) -> None:
205 | response = await http_client.post(url=api_upgrade_storage)
206 | if response.status == 200:
207 | logger.success(f"{self.session_name} | Upgrade Storage Successfully")
208 |
209 | async def upgrade_mining(self, http_client: aiohttp.ClientSession) -> None:
210 | response = await http_client.post(url=api_upgrade_mining)
211 | if response.status == 200:
212 | logger.success(f"{self.session_name} | Upgrade Mining Successfully")
213 |
214 | async def upgrade_holy(self, http_client: aiohttp.ClientSession) -> None:
215 | response = await http_client.post(url=api_upgrade_holy)
216 | if response.status == 200:
217 | logger.success(f"{self.session_name} | Upgrade Holy Successfully")
218 |
219 | async def verify_balance(self, http_client: aiohttp.ClientSession):
220 | response = await http_client.get(url=api_balance)
221 | if response.status == 200:
222 | balance_info = await response.json()
223 | logger.info(f"{self.session_name} | Balance: {balance_info['data'] / 1000000000}")
224 | return True
225 | else:
226 | logger.error(f"{self.session_name} | Balance: Error | {response.status}")
227 |
228 | async def perform_daily_checkin(self, http_client: aiohttp.ClientSession):
229 | response = await http_client.post(api_checkin)
230 | if response.status == 200:
231 | checkin_data = await response.json()
232 | day = checkin_data.get('data', {}).get('no', '')
233 | logger.success(f"{self.session_name} | Successfully checked in | Day {day}")
234 | else:
235 | checkin_data = await response.json()
236 | if checkin_data.get('message') == 'already claimed for today':
237 | logger.info(f"{self.session_name} | Already checked in today")
238 | else:
239 | logger.info(f"{self.session_name} | Failed | {checkin_data}")
240 |
241 | async def fetch_worm_status(self, http_client: aiohttp.ClientSession):
242 | response = await http_client.get(f'{api_endpoint}api/v1/worms')
243 | if response.status == 200:
244 | worm_info = await response.json()
245 | next_refresh = worm_info['data'].get('next_worm')
246 | worm_caught = worm_info['data'].get('is_caught', False)
247 | if next_refresh:
248 | next_refresh_dt = datetime.fromisoformat(next_refresh[:-1] + '+00:00')
249 | now_utc = datetime.now(pytz.utc)
250 | time_difference_seconds = (next_refresh_dt - now_utc).total_seconds()
251 | hours = int(time_difference_seconds // 3600)
252 | minutes = int((time_difference_seconds % 3600) // 60)
253 | logger.info(
254 | f"{self.session_name} | Next Worm in {hours} hours {minutes} minutes - Status: {'Caught' if worm_caught else 'Available'}")
255 | else:
256 | logger.info(f"{self.session_name} | 'next_worm' data not available.")
257 | return worm_info['data']
258 | else:
259 | logger.error(f"{self.session_name} | Error retrieving worm data.")
260 | return None
261 |
262 | async def capture_worm(self, http_client: aiohttp.ClientSession):
263 | worm_info = await self.fetch_worm_status(http_client)
264 | if worm_info and not worm_info.get('is_caught', True):
265 | response = await http_client.post(f'{api_endpoint}api/v1/worms/catch')
266 | if response.status == 200:
267 | logger.success(f"{self.session_name} | Worm Captured Successfully")
268 | elif response.status == 400:
269 | logger.info(f"{self.session_name} | Already captured")
270 | elif response.status == 404:
271 | logger.info(f"{self.session_name} | Worm not found")
272 | else:
273 | logger.error(f"{self.session_name} | Capture failed, status code: {response.status}")
274 | else:
275 | logger.info(f"{self.session_name} | Worm unavailable or already captured.")
276 |
277 | async def fetch_tasks(self, http_client: aiohttp.ClientSession):
278 | response = await http_client.get(f'{api_endpoint}api/v1/tasks/progresses')
279 | tasks = await response.json()
280 | for task in tasks['data']:
281 | if task['task_user'] is None:
282 | await self.mark_task_complete(task['id'], task['name'], task['type'], http_client)
283 | elif task['task_user']['completed'] is False:
284 | await self.mark_task_complete(task['id'], task['name'], task['type'], http_client)
285 |
286 | async def mark_task_complete(self, task_id, task_name, type, http_client: aiohttp.ClientSession):
287 | if type == "academy":
288 | ans = requests.get("https://raw.githubusercontent.com/vanhbakaa/nothing/refs/heads/main/seed_ans.json")
289 | academy_ans = ans.json()
290 | if str(task_name) not in list(academy_ans.keys()):
291 | logger.info(f"{self.session_name} | Answer for {task_name} not available yet!")
292 | return
293 | payload = {
294 | "answer": academy_ans[task_name]
295 | }
296 | response = await http_client.post(f'{api_endpoint}api/v1/tasks/{task_id}', json=payload)
297 | if response.status == 200:
298 | logger.success(f"{self.session_name} | Task {task_name} marked complete.")
299 | else:
300 | logger.error(
301 | f"{self.session_name} | Failed to complete task {task_name}, status code: {response.status}")
302 | else:
303 | response = await http_client.post(f'{api_endpoint}api/v1/tasks/{task_id}')
304 | if response.status == 200:
305 | logger.success(f"{self.session_name} | Task {task_name} marked complete.")
306 | else:
307 | logger.error(f"{self.session_name} | Failed to complete task {task_name}, status code: {response.status}")
308 |
309 | async def claim_hunt_reward(self, bird_id, http_client: aiohttp.ClientSession):
310 | payload = {
311 | "bird_id": bird_id
312 | }
313 | response = await http_client.post(api_hunt_completed, json=payload)
314 | if response.status == 200:
315 | response_data = await response.json()
316 | logger.success(
317 | f"{self.session_name} | Successfully claimed {response_data['data']['seed_amount'] / (10 ** 9)} seed from hunt reward.")
318 | else:
319 | response_data = await response.json()
320 | print(response_data)
321 | logger.error(f"{self.session_name} | Failed to claim hunt reward, status code: {response.status}")
322 |
323 | async def get_bird_info(self, http_client: aiohttp.ClientSession):
324 | response = await http_client.get(api_bird_info)
325 | if response.status == 200:
326 | response_data = await response.json()
327 | return response_data['data']
328 | else:
329 | response_data = await response.json()
330 | logger.info(f"{self.session_name} | Get bird data failed: {response_data}")
331 | return None
332 |
333 | async def make_bird_happy(self, bird_id, http_client: aiohttp.ClientSession):
334 | payload = {
335 | "bird_id": bird_id,
336 | "happiness_rate": 10000
337 | }
338 | response = await http_client.post(api_make_happy, json=payload)
339 | if response.status == 200:
340 | return True
341 | else:
342 | return False
343 |
344 | async def get_worm_data(self, http_client: aiohttp.ClientSession):
345 | response = await http_client.get(api_get_worm_data)
346 | if response.status == 200:
347 | response_data = await response.json()
348 | # print(response_data)
349 | return response_data['data']
350 | else:
351 | return None
352 |
353 | async def feed_bird(self, bird_id, worm_id, http_client: aiohttp.ClientSession):
354 | payload = {
355 | "bird_id": bird_id,
356 | "worm_ids": worm_id
357 | }
358 | response = await http_client.post(api_feed, json=payload)
359 | if response.status == 200:
360 | logger.success(f"{self.session_name} | Feed bird successfully")
361 | else:
362 | response_data = await response.json()
363 | print(response_data)
364 | logger.info(f"{self.session_name} | Failed to feed bird, response code:{response.status}")
365 | return None
366 |
367 | async def start_hunt(self, bird_id, http_client: aiohttp.ClientSession):
368 | payload = {
369 | "bird_id": bird_id,
370 | "task_level": 0
371 | }
372 | response = await http_client.post(api_start_hunt, json=payload)
373 | if response.status == 200:
374 | logger.success(f"{self.session_name} | Successfully start hunting")
375 | else:
376 | print(await response.json())
377 | logger.error(f"{self.session_name} | Start hunting failed..., response code: {response.status}")
378 |
379 | async def get_worms(self, http_client: aiohttp.ClientSession):
380 | worms = []
381 | first_page = await http_client.get(api_inv + "?page=1")
382 | json_page = await first_page.json()
383 |
384 | for worm in json_page['data']['items']:
385 | worms.append(worm)
386 | if worm['on_market'] is False:
387 | self.worm_in_inv[worm['type']] += 1
388 | count = 0
389 | if json_page['data']['total'] % json_page['data']['page_size'] != 0:
390 | count = 1
391 | total_page = int(float(json_page['data']['total'] / json_page['data']['page_size'])) + count
392 | for page in range(2, total_page + 1):
393 | api_url = api_inv + f"?page={page}"
394 | page_data = await http_client.get(api_url)
395 | json_page = await page_data.json()
396 | for worm in json_page['data']['items']:
397 | worms.append(worm)
398 | if worm['on_market'] is False:
399 | self.worm_in_inv[worm['type']] += 1
400 | time.sleep(uniform(1, 2))
401 | return worms
402 |
403 | async def sell_worm(self, worm_id, price, worm_type, http_client: aiohttp.ClientSession):
404 | payload = {
405 | "price": int(price),
406 | "worm_id": worm_id
407 | }
408 | response = await http_client.post(api_sell, json=payload)
409 | if response.status == 200:
410 | self.total_on_sale += 1
411 | logger.success(
412 | f"{self.session_name} | Sell {worm_type} worm successfully, price: {price / 1000000000}")
413 | else:
414 | response_data = await response.json()
415 | print(response_data)
416 | logger.info(f"{self.session_name} | Failed to sell {worm_type} worm, response code:{response.status}")
417 | return None
418 |
419 | async def get_price(self, worm_type, http_client: aiohttp.ClientSession):
420 | api = f'{api_endpoint}v1/market/v2?market_type=worm&worm_type={worm_type}&sort_by_price=ASC&sort_by_updated_at=&page=1'
421 | response = await http_client.get(api)
422 | if response.status == 200:
423 | json_r = await response.json()
424 | return json_r['data']['items'][0]['price_gross']
425 | else:
426 | return 0
427 |
428 | async def get_sale_data(self, http_client: aiohttp.ClientSession):
429 | api = f'{api_endpoint}api/v1/history-log-market/me?market_type=worm&page=1&history_type=sell'
430 | response = await http_client.get(api)
431 | json_data = await response.json()
432 | worm_on_sale = {"common": 0, "uncommon": 0, "rare": 0, "epic": 0, "legendary": 0}
433 | for worm in json_data['data']['items']:
434 | if worm['status'] == "on-sale":
435 | worm_on_sale[worm['worm_type']] += 1
436 | elif worm['status'] == "bought":
437 | self.total_earned_from_sale += worm['price_net'] / 1000000000
438 | count = 0
439 | if json_data['data']['total'] % json_data['data']['page_size'] != 0:
440 | count = 1
441 | total_page = int(float(json_data['data']['total'] / json_data['data']['page_size'])) + count
442 | for page in range(2, total_page + 1):
443 | response = await http_client.get(
444 | f"{api_endpoint}api/v1/history-log-market/me?market_type=worm&page={page}&history_type=sell",
445 | headers=headers)
446 | json_data = await response.json()
447 | for worm in json_data['data']['items']:
448 | if worm['status'] == "on-sale":
449 | worm_on_sale[worm['worm_type']] += 1
450 | elif worm['status'] == "bought":
451 | self.total_earned_from_sale += worm['price_net'] / 1000000000
452 |
453 | return worm_on_sale
454 |
455 | async def check_new_user(self, http_client: aiohttp.ClientSession):
456 | response = await http_client.get(new_user_api)
457 | if response.status == 200:
458 | data_ = await response.json()
459 | # print(data_)
460 | return data_['data']['bonus_claimed']
461 |
462 | def refresh_data(self):
463 | self.total_earned_from_sale = 0
464 | self.worm_in_inv = self.worm_in_inv_copy
465 |
466 | async def get_streak_rewards(self, http_client: aiohttp.ClientSession):
467 | res = await http_client.get(f"{api_endpoint}api/v1/streak-reward")
468 | if res.status == 200:
469 | data_ = await res.json()
470 | return data_['data']
471 | else:
472 | logger.warning(f"{self.session_name} | Failed to get streak rewards")
473 | return None
474 |
475 | async def claim_streak_rewards(self, http_client: aiohttp.ClientSession):
476 | rewards = await self.get_streak_rewards(http_client)
477 | pl_rewards = []
478 | if rewards is None:
479 | return
480 | if len(rewards) == 0:
481 | logger.info(f"{self.session_name} | No ticket to claim.")
482 | return
483 | for reward in rewards:
484 | pl_rewards.append(reward['id'])
485 |
486 | payload = {
487 | "streak_reward_ids": pl_rewards
488 | }
489 | claim = await http_client.post(f"{api_endpoint}api/v1/streak-reward", json=payload)
490 | if claim.status == 200:
491 | logger.success(f"{self.session_name} | Successfully claim tickets!")
492 | else:
493 | logger.warning(f"{self.session_name} | Failed to claim ticket!")
494 |
495 | async def get_tickets(self, http_client: aiohttp.ClientSession):
496 | res = await http_client.get(f"{api_endpoint}api/v1/spin-ticket")
497 | if res.status == 200:
498 | data = await res.json()
499 | return data['data']
500 | return None
501 |
502 | async def get_egg_pieces(self, http_client: aiohttp.ClientSession):
503 | res = await http_client.get(f"{api_endpoint}api/v1/egg-piece")
504 | if res.status == 200:
505 | data = await res.json()
506 | return data['data']
507 | return None
508 |
509 | async def get_fusion_fee(self, type, http_client: aiohttp.ClientSession):
510 | res = await http_client.get(f"{api_endpoint}api/v1/fusion-seed-fee?type={type}")
511 | if res.status == 200:
512 | data = await res.json()
513 | return data['data']
514 | return None
515 |
516 | async def spin(self, ticketId, http_client: aiohttp.ClientSession):
517 | payload = {
518 | "ticket_id": ticketId
519 | }
520 |
521 | res = await http_client.post(f"{api_endpoint}api/v1/spin-reward", json=payload)
522 | if res.status == 200:
523 | data = await res.json()
524 | logger.success(f"{self.session_name} | Spinned successfully - Got {data['data']['type']} egg pieces!")
525 | else:
526 | return
527 |
528 | async def fusion(self, egg_ids, type, http_client: aiohttp.ClientSession):
529 | payload = {
530 | "egg_piece_ids": egg_ids
531 | }
532 |
533 | res = await http_client.post(f"{api_endpoint}api/v1/egg-piece-merge", json=payload)
534 | if res.status == 200:
535 | logger.success(f"{self.session_name} | Successfully fusion a {type} egg!")
536 | else:
537 | return
538 | async def play_game(self, http_client: aiohttp.ClientSession):
539 | egg_type = {
540 | "common": 0,
541 | "uncommon": 0,
542 | "rare": 0,
543 | "epic": 0,
544 | "legendary": 0
545 | }
546 | egg_pieces = await self.get_egg_pieces(http_client)
547 | if egg_pieces is None:
548 | return
549 | for piece in egg_pieces:
550 | egg_type[piece['type']] += 1
551 |
552 | info_ = f"""
553 | Common pieces: {egg_type['common']}
554 | Uncommon pieces: {egg_type['uncommon']}
555 | rare pieces: {egg_type['rare']}
556 | epic pieces: {egg_type['epic']}
557 | legendary pieces: {egg_type['legendary']}
558 | """
559 |
560 | logger.info(f"{self.session_name} Egg pieces: \n{info_}")
561 |
562 | tickets = await self.get_tickets(http_client)
563 | if tickets is None:
564 | return
565 |
566 | logger.info(f"{self.session_name} | Total ticket: {len(tickets)}")
567 |
568 | play = randint(settings.SPIN_PER_ROUND[0], settings.SPIN_PER_ROUND[1])
569 |
570 | for ticket in tickets:
571 | if play == 0:
572 | break
573 | play -= 1
574 | await self.spin(ticket['id'], http_client)
575 | await self.get_tickets(http_client)
576 | await self.get_egg_pieces(http_client)
577 | await asyncio.sleep(randint(2,5))
578 |
579 | if settings.AUTO_FUSION:
580 | # print("stary")
581 | egg_type = {
582 | "common": 0,
583 | "uncommon": 0,
584 | "rare": 0,
585 | "epic": 0,
586 | "legendary": 0
587 | }
588 | egg_pieces = await self.get_egg_pieces(http_client)
589 | if egg_pieces is None:
590 | return
591 | for piece in egg_pieces:
592 | egg_type[piece['type']] += 1
593 |
594 | if egg_type['common'] >= 5:
595 | fusion_fee = await self.get_fusion_fee('common', http_client)
596 | # print(fusion_fee)
597 | if fusion_fee is None:
598 | return
599 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_COMMON:
600 | pl_data = []
601 | for piece in egg_pieces:
602 | if len(pl_data) >= 5:
603 | break
604 | if piece['type'] == 'common':
605 | pl_data.append(piece['id'])
606 |
607 | await self.fusion(pl_data, 'common', http_client)
608 |
609 | if egg_type['uncommon'] >= 5:
610 | fusion_fee = await self.get_fusion_fee('uncommon', http_client)
611 | if fusion_fee is None:
612 | return
613 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_UNCOMMON:
614 | pl_data = []
615 | for piece in egg_pieces:
616 | if len(pl_data) >= 5:
617 | break
618 | if piece['type'] == 'uncommon':
619 | pl_data.append(piece['id'])
620 |
621 | await self.fusion(pl_data, 'uncommon', http_client)
622 |
623 | if egg_type['rare'] >= 5:
624 | fusion_fee = await self.get_fusion_fee('rare', http_client)
625 | if fusion_fee is None:
626 | return
627 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_RARE:
628 | pl_data = []
629 | for piece in egg_pieces:
630 | if len(pl_data) >= 5:
631 | break
632 | if piece['type'] == 'rare':
633 | pl_data.append(piece['id'])
634 |
635 | await self.fusion(pl_data, 'rare', http_client)
636 |
637 | if egg_type['epic'] >= 5:
638 | fusion_fee = await self.get_fusion_fee('epic', http_client)
639 | if fusion_fee is None:
640 | return
641 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_EPIC:
642 | pl_data = []
643 | for piece in egg_pieces:
644 | if len(pl_data) >= 5:
645 | break
646 | if piece['type'] == 'epic':
647 | pl_data.append(piece['id'])
648 |
649 | await self.fusion(pl_data, 'epic', http_client)
650 |
651 | if egg_type['legendary'] >= 5:
652 | fusion_fee = await self.get_fusion_fee('legendary', http_client)
653 | if fusion_fee is None:
654 | return
655 | if fusion_fee/1000000000 <= settings.MAXIMUM_PRICE_TO_FUSION_LEGENDARY:
656 | pl_data = []
657 | for piece in egg_pieces:
658 | if len(pl_data) >= 5:
659 | break
660 | if piece['type'] == 'legendary':
661 | pl_data.append(piece['id'])
662 |
663 | await self.fusion(pl_data, 'legendary', http_client)
664 |
665 |
666 | async def claim_gift(self, http_client: aiohttp.ClientSession):
667 | gift = await http_client.get(claim_gift_api)
668 | gift_ = await gift.json()
669 | start_time = gift_['data']['next_claim_from']
670 | end_time = gift_['data']['next_claim_to']
671 |
672 | next_claim_from_dt = datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
673 | next_claim_to_dt = datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
674 | now = datetime.now(timezone.utc)
675 | is_within_range = next_claim_from_dt <= now <= next_claim_to_dt
676 |
677 | if is_within_range:
678 | res = await http_client.post(claim_gift_api)
679 | if res.status == 200:
680 | logger.success(f"{self.session_name} | Christmas gift claimed successfully!")
681 | else:
682 | logger.info(f"{self.session_name} | Failed to claim gift: {res.text}")
683 |
684 | else:
685 | logger.info(f"{self.session_name} | Christmas gift already claimed!")
686 | return
687 |
688 |
689 |
690 | async def run(self, proxy: str | None, ua: str) -> None:
691 | access_token_created_time = 0
692 | proxy_conn = ProxyConnector().from_url(proxy) if proxy else None
693 |
694 | headers["user-agent"] = ua
695 | chrome_ver = fetch_version(headers['user-agent'])
696 | headers['sec-ch-ua'] = f'"Chromium";v="{chrome_ver}", "Android WebView";v="{chrome_ver}", "Not.A/Brand";v="99"'
697 | http_client = CloudflareScraper(headers=headers, connector=proxy_conn)
698 |
699 | if proxy:
700 | await self.check_proxy(http_client=http_client, proxy=proxy)
701 |
702 | token_live_time = randint(3500, 3600)
703 | while True:
704 | try:
705 | if check_base_url() is False:
706 | self.can_run = False
707 | if settings.ADVANCED_ANTI_DETECTION:
708 | logger.warning(
709 | "Detected index js file change. Contact me to check if it's safe to continue: https://t.me/vanhbakaaa")
710 | else:
711 | logger.warning(
712 | "Detected api change! Stopped the bot for safety. Contact me here to update the bot: https://t.me/vanhbakaaa")
713 | else:
714 | self.can_run = True
715 |
716 | if self.can_run:
717 |
718 |
719 | if time.time() - access_token_created_time >= token_live_time:
720 | # logger.info(f"{self.session_name} | Update auth token...")
721 | tg_web_data = await self.get_tg_web_data(proxy=proxy)
722 | headers['telegram-data'] = tg_web_data
723 | # print(tg_web_data)
724 | http_client.headers["telegram-data"] = tg_web_data
725 | access_token_created_time = time.time()
726 | token_live_time = randint(3500, 3600)
727 | await asyncio.sleep(delay=randint(10, 15))
728 |
729 | not_new_user = await self.check_new_user(http_client)
730 |
731 | if not_new_user is False:
732 | logger.info(f"{self.session_name} | Setting up new account...")
733 | await self.setup_profile(http_client)
734 |
735 | await self.fetch_profile(http_client)
736 |
737 | await self.claim_gift(http_client)
738 |
739 | if settings.AUTO_START_HUNT:
740 | bird_data = await self.get_bird_info(http_client)
741 | # print(bird_data)
742 | if bird_data is None:
743 | logger.info(f"{self.session_name} | Can't get bird data...")
744 | elif bird_data['owner_id'] != self.user_id:
745 | logger.warning(f"{self.session_name} | Bird is not your: {bird_data}")
746 | elif bird_data['status'] == "hunting":
747 |
748 | try:
749 | given_time = datetime.fromisoformat(bird_data['hunt_end_at'])
750 | timestamp_naive = given_time.replace(tzinfo=None)
751 | except:
752 | import dateutil.parser
753 | timestamp_naive = dateutil.parser.isoparse(bird_data['hunt_end_at'])
754 | now = datetime.now(timezone.utc)
755 |
756 | # If the parsed timestamp is naive, make it aware in UTC
757 | if timestamp_naive.tzinfo is None:
758 | timestamp_naive = timestamp_naive.replace(tzinfo=timezone.utc)
759 |
760 | if now < timestamp_naive:
761 | logger.info(f"{self.session_name} | Bird currently hunting...")
762 | else:
763 | logger.info(f"{self.session_name} | Hunt completed, claiming reward...")
764 | await self.claim_hunt_reward(bird_data['id'], http_client)
765 | else:
766 | condition = True
767 | if bird_data['happiness_level'] == 0:
768 | logger.info(f"{self.session_name} | Bird is not happy, attemping to make bird happy...")
769 | check = await self.make_bird_happy(bird_data['id'], http_client)
770 | if check:
771 | logger.success(f"{self.session_name} | Successfully make bird happy!")
772 | else:
773 | logger.info(f"{self.session_name} |Failed to make bird happy!")
774 | condition = False
775 | if bird_data['energy_level'] == 0:
776 | logger.info(f"{self.session_name} | Bird is hungry, attemping to feed bird...")
777 | worms = await self.get_worm_data(http_client)
778 | if worms is None:
779 | condition = False
780 | logger.info(f"{self.session_name} | Failed to fetch worm data")
781 | elif len(worms) == 0:
782 | logger.warning(f"{self.session_name} | You dont have any worm to feed bird!")
783 | condition = False
784 | else:
785 | try:
786 | energy = (bird_data['energy_max'] - bird_data['energy_level']) / 1000000000
787 | except:
788 | print(bird_data)
789 | energy = 2
790 | wormss = []
791 | for worm in worms:
792 | if worm['type'] == "common" and worm['on_market'] is False:
793 | wormss.append(worm['id'])
794 | energy -= 2
795 | if energy <= 1:
796 | break
797 | if energy > 1:
798 | for worm in worms:
799 | if worm['type'] == "uncommon" and worm['on_market'] is False:
800 | wormss.append(worm['id'])
801 | energy -= 4
802 | if energy <= 1:
803 | break
804 | await self.feed_bird(bird_data['id'], wormss, http_client)
805 | if energy > 1:
806 | condition = False
807 |
808 | if condition:
809 | await self.start_hunt(bird_data['id'], http_client)
810 |
811 | if settings.AUTO_UPGRADE_STORAGE:
812 | await self.upgrade_storage(http_client)
813 | await asyncio.sleep(1)
814 | if settings.AUTO_UPGRADE_MINING:
815 | await self.upgrade_mining(http_client)
816 | await asyncio.sleep(1)
817 | if settings.AUTO_UPGRADE_HOLY:
818 | await self.upgrade_holy(http_client)
819 | await asyncio.sleep(1)
820 |
821 | check_balance = await self.verify_balance(http_client)
822 | if check_balance:
823 | response = await http_client.post(api_claim)
824 | if response.status == 200:
825 | logger.success(f"{self.session_name} | Claim successful ")
826 | elif response.status == 400:
827 | logger.info(f"{self.session_name} | Not yet time to claim")
828 | else:
829 | logger.error(
830 | f"{self.session_name} | An error occurred, status code: {response.status}")
831 |
832 | await self.perform_daily_checkin(http_client)
833 | await self.capture_worm(http_client)
834 | if settings.AUTO_SELL_WORMS:
835 | logger.info(f"{self.session_name} | Fetching worms data to put it on sale...")
836 | worms = await self.get_worms(http_client)
837 | # print(self.worm_in_inv)
838 | worms_on_sell = await self.get_sale_data(http_client)
839 | logger.info(f"{self.session_name} | Worms on sale now: ")
840 | for worm in worms_on_sell:
841 | logger.info(
842 | f"{self.session_name} | Total {worm} on sale: {worms_on_sell[worm]}")
843 | logger.info(
844 | f"{self.session_name} | Total earned from sale: {self.total_earned_from_sale}")
845 | for worm in worms:
846 | if worm['on_market']:
847 | continue
848 | elif settings.QUANTITY_TO_KEEP[worm['type']]['quantity_to_keep'] == -1:
849 | continue
850 | elif settings.QUANTITY_TO_KEEP[worm['type']]['quantity_to_keep'] < self.worm_in_inv[
851 | worm['type']]:
852 | if settings.QUANTITY_TO_KEEP[worm['type']]['sale_price'] == 0:
853 | price_to_sell = await self.get_price(worm['type'], http_client)
854 |
855 | else:
856 | price_to_sell = settings.QUANTITY_TO_KEEP[worm['type']]['sale_price'] * (10 ** 9)
857 | # print(f"Sell {worm['type']} , price: {price_to_sell/1000000000}")
858 | await self.sell_worm(worm['id'], price_to_sell, worm['type'], http_client)
859 | self.worm_in_inv[worm['type']] -= 1
860 |
861 | self.refresh_data()
862 | if settings.AUTO_CLEAR_TASKS:
863 | await self.fetch_tasks(http_client)
864 |
865 | if settings.AUTO_SPIN:
866 | await self.claim_streak_rewards(http_client)
867 | await asyncio.sleep(randint(1,4))
868 | await self.play_game(http_client)
869 |
870 | delay_time = randint(2800, 3600)
871 | logger.info(f"{self.session_name} | Completed {self.session_name}, waiting {delay_time} seconds...")
872 | await asyncio.sleep(delay=delay_time)
873 | except InvalidSession as error:
874 | raise error
875 |
876 | except Exception as error:
877 | traceback.print_exc()
878 | logger.error(f"{self.session_name} | Unknown error: {error}")
879 | await asyncio.sleep(delay=randint(60, 120))
880 |
881 |
882 | def get_():
883 | abasdowiad = base64.b64decode("NjQ5MzIxMTE1NQ==")
884 | waijdioajdioajwdwioajdoiajwodjawoidjaoiwjfoiajfoiajfojaowfjaowjfoajfojawofjoawjfioajwfoiajwfoiajwfadawoiaaiwjaijgaiowjfijawtext = abasdowiad.decode("utf-8")
885 |
886 | return waijdioajdioajwdwioajdoiajwodjawoidjaoiwjfoiajfoiajfojaowfjaowjfoajfojawofjoawjfioajwfoiajwfoiajwfadawoiaaiwjaijgaiowjfijawtext
887 |
888 |
889 |
890 | async def run_tapper(tg_client: Client, proxy: str | None, ua: str):
891 |
892 | try:
893 | sleep_ = randint(1, 25)
894 | logger.info(f"Wait {sleep_}s")
895 | await asyncio.sleep(sleep_)
896 | await Tapper(tg_client=tg_client).run(proxy=proxy, ua=ua)
897 | except InvalidSession:
898 | logger.error(f"{tg_client.name} | Invalid Session")
899 |
--------------------------------------------------------------------------------