├── .gitignore ├── README.md ├── checker.py ├── config.toml ├── files ├── discords.txt ├── emails.txt ├── evm_wallets.txt ├── proxies.txt ├── referral_links.txt ├── surveys.csv └── twitters.txt ├── internal ├── __init__.py ├── captcha │ ├── __init__.py │ ├── captcha.py │ └── constants.py ├── config │ ├── __init__.py │ └── config.py ├── email │ ├── __init__.py │ ├── base.py │ ├── constants.py │ ├── factory.py │ ├── imap.py │ └── mail3.py ├── galxe │ ├── __init__.py │ ├── account.py │ ├── client.py │ ├── constants.py │ ├── fingerprint.py │ ├── models.py │ └── utils.py ├── models │ ├── __init__.py │ └── models.py ├── onchain │ ├── __init__.py │ ├── abi │ │ ├── loyalty_points.json │ │ └── space_station.json │ ├── account.py │ └── constants.py ├── storage │ ├── __init__.py │ └── storage.py ├── tls │ ├── __init__.py │ └── client.py ├── twitter │ ├── __init__.py │ ├── client_transaction │ │ ├── __init__.py │ │ ├── cubic_curve.py │ │ ├── interpolate.py │ │ ├── rotation.py │ │ ├── transaction.py │ │ └── utils.py │ └── twitter.py ├── utils │ ├── __init__.py │ ├── async_web3.py │ └── utils.py └── vars │ ├── __init__.py │ └── vars.py ├── logs └── errors.txt ├── main.py ├── requirements.txt ├── results └── stats.csv └── storage └── quizzes.json /.gitignore: -------------------------------------------------------------------------------- 1 | /.idea 2 | 3 | .DS_Store 4 | 5 | /venv 6 | 7 | /__pycache__ 8 | /*/__pycache__ 9 | /*/*/__pycache__ 10 | /*/*/*/__pycache__ 11 | 12 | /storage/data.json 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Galxe AIO 2 | 3 | - Link Twitter account 4 | - Link email: IMAP or mail3.me 5 | - Link Discord 6 | - Complete requirements 7 | - Complete tasks 8 | - Claim rewards 9 | - Quiz solver 10 | - Submit survey 11 | - Referral links 12 | - Accounts statistics 13 | 14 | ### Supported tasks: 15 | - Twitter: Follow, Retweet, Like, Quote 16 | - Visit link 17 | - Watch YouTube 18 | - Solve quiz 19 | - Survey 20 | - Follow Galxe space 21 | - Tries to verify all the others 22 | 23 | ### Supported rewards: 24 | - Points 25 | - Mystery Boxes 26 | - Gas-less OATs 27 | - Gas OATs and NFTs 28 | - Participate in raffles 29 | - Discord Roles 30 | 31 | ### Follow: https://t.me/thelaziestcoder 32 | 33 | ### Settings 34 | - `files/evm_wallets.txt` - Wallets with EVM private keys 35 | - `files/proxies.txt` - Corresponding proxies for wallets 36 | - `files/twitters.txt` - Corresponding twitters for wallets 37 | - `files/emails.txt` - Corresponding emails for wallets 38 | - `files/discords.txt` - Corresponding discords for wallets. Can be empty if not needed 39 | - `config.toml` - Custom settings 40 | 41 | ### Config 42 | - `FAKE_TWITTER` - Verify Twitter tasks without real Twitter actions 43 | - `GALXE_CAMPAIGN_IDS` - Campaigns to complete, parent campaigns are supported 44 | - `HIDE_UNSUPPORTED` - Don't log unsuccessful completing of unsupported tasks 45 | 46 | ### Run 47 | 48 | Python version: 3.11 49 | 50 | Installing virtual env: \ 51 | `python3 -m venv venv` 52 | 53 | Activating: 54 | - Mac/Linux - `source venv/bin/activate` 55 | - Windows - `.\venv\Scripts\activate` 56 | 57 | Installing all dependencies: \ 58 | `pip install -r requirements.txt` \ 59 | `playwright install` 60 | 61 | Run main script: \ 62 | `python main.py` 63 | 64 | Run twitter checker: \ 65 | `python checker.py` 66 | 67 | ### Results 68 | 69 | `results/` - Folder with results of run \ 70 | `logs/` - Folder with logs of run \ 71 | `storage/` - Local database 72 | 73 | ### Donate :) 74 | 75 | TRC-20 - `TX7yeJVHwhNsNy4ksF1pFRFnunF1aFRmet` \ 76 | ERC-20 - `0x5aa3c82045f944f5afa477d3a1d0be3c96196319` 77 | -------------------------------------------------------------------------------- /checker.py: -------------------------------------------------------------------------------- 1 | import aiohttp 2 | import asyncio 3 | 4 | from termcolor import cprint 5 | from loguru import logger 6 | from typing import Tuple 7 | from eth_account import Account as EthAccount 8 | 9 | from internal.storage import AccountStorage 10 | from internal.models import AccountInfo 11 | from internal.twitter import Twitter 12 | from internal.config import THREADS_NUM, CHECKER_UPDATE_STORAGE 13 | from internal.utils import async_retry, log_long_exc 14 | 15 | 16 | @async_retry 17 | async def change_ip(link: str): 18 | async with aiohttp.ClientSession() as sess: 19 | async with sess.get(link) as resp: 20 | if resp.status != 200: 21 | raise Exception(f'Failed to change ip: Status = {resp.status}. Response = {await resp.text()}') 22 | 23 | 24 | async def check_account(account_data: Tuple[int, Tuple[str, str, str]]): 25 | idx, (evm_wallet, proxy, twitter_token, _) = account_data 26 | address = EthAccount().from_key(evm_wallet).address 27 | logger.info(f'{idx}) Processing {address}') 28 | 29 | account_info = AccountInfo(evm_address=address, proxy=proxy, twitter_auth_token=twitter_token) 30 | 31 | if '|' in account_info.proxy: 32 | change_link = account_info.proxy.split('|')[1] 33 | await change_ip(change_link) 34 | logger.info(f'{idx}) Successfully changed ip') 35 | 36 | twitter = Twitter(account_info) 37 | await twitter.start() 38 | 39 | await twitter.follow('elonmusk') 40 | 41 | return True 42 | 43 | 44 | async def process_batch(bid: int, batch, async_func): 45 | failed = [] 46 | for idx, d in enumerate(batch): 47 | try: 48 | await async_func(d) 49 | except Exception as e: 50 | e_msg = str(e) 51 | if 'Could not authenticate you' in e_msg or 'account is suspended' in e_msg \ 52 | or 'account has been locked' in e_msg or 'account is temporarily locked' in e_msg: 53 | failed.append(d) 54 | await log_long_exc(d[0], 'Process account error', e) 55 | return failed 56 | 57 | 58 | async def process(batches, async_func): 59 | tasks = [] 60 | for idx, b in enumerate(batches): 61 | tasks.append(asyncio.create_task(process_batch(idx, b, async_func))) 62 | return await asyncio.gather(*tasks) 63 | 64 | 65 | def main(): 66 | with open('files/evm_wallets.txt', 'r', encoding='utf-8') as file: 67 | evm_wallets = file.read().splitlines() 68 | evm_wallets = [w.strip() for w in evm_wallets] 69 | with open('files/proxies.txt', 'r', encoding='utf-8') as file: 70 | proxies = file.read().splitlines() 71 | proxies = [p.strip() for p in proxies] 72 | proxies = [p if '://' in p.split('|')[0] else 'http://' + p for p in proxies] 73 | with open('files/twitters.txt', 'r', encoding='utf-8') as file: 74 | twitters = file.read().splitlines() 75 | twitters = [t.strip() for t in twitters] 76 | with open('files/emails.txt', 'r', encoding='utf-8') as file: 77 | emails = file.read().splitlines() 78 | emails = [e.strip() for e in emails] 79 | 80 | if len(evm_wallets) != len(proxies): 81 | logger.error('Proxies count does not match wallets count') 82 | return 83 | if len(evm_wallets) != len(twitters): 84 | logger.error('Twitter count does not match wallets count') 85 | return 86 | if len(evm_wallets) != len(emails): 87 | logger.error('Emails count does not match wallets count') 88 | return 89 | 90 | def get_batches(threads: int = THREADS_NUM): 91 | _data = list(enumerate(list(zip(evm_wallets, proxies, twitters, emails)), start=1)) 92 | _batches = [[] for _ in range(threads)] 93 | for _idx, d in enumerate(_data): 94 | _batches[_idx % threads].append(d) 95 | return _batches 96 | 97 | loop = asyncio.new_event_loop() 98 | asyncio.set_event_loop(loop) 99 | results = loop.run_until_complete(process(get_batches(), check_account)) 100 | 101 | failed_twitter = set() 102 | for result in results: 103 | for r in result: 104 | failed_twitter.add(r[1][2]) 105 | 106 | storage = AccountStorage('storage/data.json') 107 | storage.init() 108 | 109 | failed_cnt = 0 110 | 111 | print() 112 | 113 | open('results/working_evm_wallets.txt', 'w', encoding='utf-8').close() 114 | open('results/working_proxies.txt', 'w', encoding='utf-8').close() 115 | open('results/working_twitters.txt', 'w', encoding='utf-8').close() 116 | open('results/working_emails.txt', 'w', encoding='utf-8').close() 117 | for evm_wallet, proxy, twitter, email in zip(evm_wallets, proxies, twitters, emails): 118 | if twitter in failed_twitter: 119 | failed_cnt += 1 120 | address = EthAccount().from_key(evm_wallet).address 121 | logger.info(f'Removed for EVM address {address} twitter token {twitter}, proxy {proxy}') 122 | if CHECKER_UPDATE_STORAGE: 123 | storage.remove(address) 124 | continue 125 | with open('results/working_evm_wallets.txt', 'a', encoding='utf-8') as file: 126 | file.write(f'{evm_wallet}\n') 127 | with open('results/working_proxies.txt', 'a', encoding='utf-8') as file: 128 | file.write(f'{proxy}\n') 129 | with open('results/working_twitters.txt', 'a', encoding='utf-8') as file: 130 | file.write(f'{twitter}\n') 131 | with open('results/working_emails.txt', 'a', encoding='utf-8') as file: 132 | file.write(f'{email}\n') 133 | 134 | logger.info(f'Total failed count: {failed_cnt}') 135 | 136 | if CHECKER_UPDATE_STORAGE: 137 | storage.save() 138 | 139 | print() 140 | 141 | 142 | if __name__ == '__main__': 143 | cprint('###############################################################', 'cyan') 144 | cprint('#################', 'cyan', end='') 145 | cprint(' https://t.me/thelaziestcoder ', 'magenta', end='') 146 | cprint('################', 'cyan') 147 | cprint('#################', 'cyan', end='') 148 | cprint(' https://t.me/thelaziestcoder ', 'magenta', end='') 149 | cprint('################', 'cyan') 150 | cprint('#################', 'cyan', end='') 151 | cprint(' https://t.me/thelaziestcoder ', 'magenta', end='') 152 | cprint('################', 'cyan') 153 | cprint('###############################################################\n', 'cyan') 154 | main() 155 | -------------------------------------------------------------------------------- /config.toml: -------------------------------------------------------------------------------- 1 | WAIT_BETWEEN_ACCOUNTS = [0, 0] 2 | MAX_TRIES = 2 3 | 4 | # Only one of this needed 5 | TWO_CAPTCHA_API_KEY = "" 6 | CAP_SOLVER_API_KEY = "" 7 | 8 | THREADS_NUM = 1 9 | DISABLE_SSL = false 10 | 11 | CHECKER_UPDATE_STORAGE = false 12 | 13 | UPDATE_STORAGE_ACCOUNT_INFO = true 14 | SKIP_FIRST_ACCOUNTS = 0 15 | RANDOM_ORDER = false 16 | 17 | FAKE_TWITTER = false 18 | FORCE_LINK_EMAIL = false 19 | 20 | GALXE_CAMPAIGN_IDS = ["GCTN3ttM4T", "GC433ttn6N"] 21 | HIDE_UNSUPPORTED = false 22 | 23 | # Spaces aliases to track all points 24 | SPACES_STATS = [] 25 | 26 | [RPCs] 27 | Ethereum = 'https://rpc.ankr.com/eth' 28 | Optimism = 'https://mainnet.optimism.io' 29 | BSC = 'https://rpc.ankr.com/bsc' 30 | Polygon = 'https://polygon-bor.publicnode.com' 31 | Arbitrum = 'https://arb1.arbitrum.io/rpc' 32 | Avalanche = 'https://avalanche-c-chain.publicnode.com' 33 | Base = 'https://mainnet.base.org' 34 | Scroll = 'https://1rpc.io/scroll' 35 | Gravity = 'https://rpc.gravity.xyz' 36 | -------------------------------------------------------------------------------- /files/discords.txt: -------------------------------------------------------------------------------- 1 | discord_token1 2 | discord_token2 3 | discord_token3 -------------------------------------------------------------------------------- /files/emails.txt: -------------------------------------------------------------------------------- 1 | email_username1:email_password1 2 | email_username2:email_password2 3 | mail3.me -------------------------------------------------------------------------------- /files/evm_wallets.txt: -------------------------------------------------------------------------------- 1 | private_key1 2 | private_key2 3 | private_key3 -------------------------------------------------------------------------------- /files/proxies.txt: -------------------------------------------------------------------------------- 1 | user1:pass1@ip1:port1 2 | user2:pass2@ip2:port2 3 | user3:pass3@ip3:port3| -------------------------------------------------------------------------------- /files/referral_links.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akasakaid/lazy-galxe/599ac9c40181471c17cf6280ffcd6c7da92d0f36/files/referral_links.txt -------------------------------------------------------------------------------- /files/surveys.csv: -------------------------------------------------------------------------------- 1 | Address,, 2 | ,,|| -------------------------------------------------------------------------------- /files/twitters.txt: -------------------------------------------------------------------------------- 1 | auth_token1 2 | auth_token2 3 | auth_token3 -------------------------------------------------------------------------------- /internal/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akasakaid/lazy-galxe/599ac9c40181471c17cf6280ffcd6c7da92d0f36/internal/__init__.py -------------------------------------------------------------------------------- /internal/captcha/__init__.py: -------------------------------------------------------------------------------- 1 | from .captcha import solve_recaptcha_v2, solve_recaptcha_v3, solve_cloudflare_challenge, solve_geetest 2 | -------------------------------------------------------------------------------- /internal/captcha/captcha.py: -------------------------------------------------------------------------------- 1 | import json 2 | import asyncio 3 | import aiohttp 4 | from enum import Enum 5 | from loguru import logger 6 | from urllib.parse import urlparse 7 | 8 | from ..utils import async_retry, get_proxy_url 9 | from ..config import TWO_CAPTCHA_API_KEY, CAP_MONSTER_API_KEY, CAP_SOLVER_API_KEY, DISABLE_SSL 10 | from ..vars import USER_AGENT 11 | 12 | from .constants import TWO_CAPTCHA_API_URL, CAP_MONSTER_API_URL, CAP_SOLVER_API_URL 13 | 14 | 15 | def solve_captcha_retry(async_func): 16 | async def wrapper(idx, *args, **kwargs): 17 | last_exc = None 18 | for _ in range(5): 19 | try: 20 | return await async_func(idx, *args, **kwargs) 21 | except Exception as e: 22 | last_exc = e 23 | if last_exc is not None: 24 | raise last_exc 25 | 26 | return wrapper 27 | 28 | 29 | class TaskType(Enum): 30 | RECAPTCHA_V2 = 'RecaptchaV2Task' 31 | RECAPTCHA_V3 = 'RecaptchaV3Task' 32 | RECAPTCHA_V3_PROXY_LESS = 'RecaptchaV3TaskProxyless' 33 | TURNSTILE_TASK = 'TurnstileTask' 34 | ANTI_CLOUDFLARE_TASK = 'AntiCloudflareTask' 35 | GEETEST = 'GeeTestTask' 36 | 37 | 38 | @solve_captcha_retry 39 | async def solve_recaptcha_v2(idx, url, site_key, proxy=None, **kwargs): 40 | if CAP_SOLVER_API_KEY: 41 | return await _solve_captcha( 42 | CAP_SOLVER_API_URL, CAP_SOLVER_API_KEY, TaskType.RECAPTCHA_V2, 43 | idx, url, site_key, proxy, proxy_one_line=True, 44 | userAgent=USER_AGENT, **kwargs, 45 | ) 46 | elif TWO_CAPTCHA_API_KEY: 47 | return await _solve_captcha( 48 | TWO_CAPTCHA_API_URL, TWO_CAPTCHA_API_KEY, TaskType.RECAPTCHA_V2, 49 | idx, url, site_key, proxy, userAgent=USER_AGENT, **kwargs, 50 | ) 51 | elif CAP_MONSTER_API_KEY: 52 | return await _solve_captcha( 53 | CAP_MONSTER_API_URL, CAP_MONSTER_API_KEY, TaskType.RECAPTCHA_V2, 54 | idx, url, site_key, proxy, userAgent=USER_AGENT, **kwargs, 55 | ) 56 | else: 57 | raise Exception('No captcha service API keys specified for recaptcha v2') 58 | 59 | 60 | @solve_captcha_retry 61 | async def solve_recaptcha_v3(idx, url, site_key, page_action, proxy=None, **kwargs): 62 | if CAP_SOLVER_API_KEY: 63 | return await _solve_captcha( 64 | CAP_SOLVER_API_URL, CAP_SOLVER_API_KEY, TaskType.RECAPTCHA_V3, 65 | idx, url, site_key, proxy, proxy_one_line=True, 66 | pageAction=page_action, minScore=0.9, userAgent=USER_AGENT, **kwargs, 67 | ) 68 | elif TWO_CAPTCHA_API_KEY: 69 | return await _solve_captcha( 70 | TWO_CAPTCHA_API_URL, TWO_CAPTCHA_API_KEY, TaskType.RECAPTCHA_V3_PROXY_LESS, 71 | idx, url, site_key, proxy, pageAction=page_action, minScore=0.9, userAgent=USER_AGENT, **kwargs, 72 | ) 73 | elif CAP_MONSTER_API_KEY: 74 | return await _solve_captcha( 75 | CAP_MONSTER_API_URL, CAP_MONSTER_API_KEY, TaskType.RECAPTCHA_V3_PROXY_LESS, 76 | idx, url, site_key, proxy, pageAction=page_action, minScore=0.9, userAgent=USER_AGENT, **kwargs, 77 | ) 78 | else: 79 | raise Exception('No captcha service API keys specified for recaptcha v3') 80 | 81 | 82 | @solve_captcha_retry 83 | async def solve_cloudflare_challenge(idx, url, site_key, proxy): 84 | if TWO_CAPTCHA_API_KEY: 85 | return await _solve_captcha( 86 | TWO_CAPTCHA_API_URL, TWO_CAPTCHA_API_KEY, TaskType.TURNSTILE_TASK, 87 | idx, url, site_key, proxy=proxy, userAgent=USER_AGENT, 88 | ) 89 | else: 90 | raise Exception('No captcha service API keys specified for cloudflare') 91 | 92 | 93 | async def solve_geetest(idx, url, proxy, gt, challenge, version, init_parameters): 94 | if CAP_SOLVER_API_KEY: 95 | return await _solve_captcha( 96 | CAP_SOLVER_API_URL, CAP_SOLVER_API_KEY, TaskType.GEETEST, 97 | idx, url, proxy=proxy, proxy_one_line=True, gt=gt, challenge=challenge, captchaId=gt, 98 | ) 99 | elif TWO_CAPTCHA_API_KEY: 100 | return await _solve_captcha( 101 | TWO_CAPTCHA_API_URL, TWO_CAPTCHA_API_KEY, TaskType.GEETEST, 102 | idx, url, proxy=proxy, userAgent=USER_AGENT, gt=gt, challenge=challenge, 103 | version=version, initParameters=init_parameters, 104 | ) 105 | elif CAP_MONSTER_API_KEY: 106 | return await _solve_captcha( 107 | CAP_MONSTER_API_URL, CAP_MONSTER_API_KEY, TaskType.GEETEST, 108 | idx, url, proxy=proxy, userAgent=USER_AGENT, gt=gt, challenge=challenge, 109 | version=version, initParameters=init_parameters, 110 | ) 111 | else: 112 | raise Exception('No captcha service API keys specified for geetest') 113 | 114 | 115 | async def _solve_captcha(api_url, client_key, 116 | task_type, idx, url, site_key='', 117 | proxy=None, proxy_one_line=False, 118 | **additional_task_properties): 119 | create_task_req = { 120 | 'clientKey': client_key, 121 | 'task': { 122 | 'type': task_type.value, 123 | 'websiteURL': url, 124 | **additional_task_properties, 125 | }, 126 | } 127 | 128 | ref_name, ref_value = None, None 129 | if api_url == CAP_SOLVER_API_URL: 130 | ref_name, ref_value = 'appId', '373E3CAC-2E7E-4748-B107-908AC039873D' 131 | elif api_url == TWO_CAPTCHA_API_URL: 132 | ref_name, ref_value = 'softId', 4669 133 | if ref_name is not None: 134 | create_task_req[ref_name] = ref_value 135 | 136 | if site_key: 137 | create_task_req['task']['websiteKey'] = site_key 138 | proxy = get_proxy_url(proxy) 139 | if proxy and 'Proxyless' not in task_type.value: 140 | if proxy_one_line: 141 | create_task_req['task'].update({ 142 | 'proxy': proxy, 143 | }) 144 | else: 145 | parsed_proxy = urlparse(proxy) 146 | create_task_req['task'].update({ 147 | 'proxyType': parsed_proxy.scheme, 148 | 'proxyAddress': parsed_proxy.hostname, 149 | 'proxyPort': parsed_proxy.port, 150 | 'proxyLogin': parsed_proxy.username, 151 | 'proxyPassword': parsed_proxy.password, 152 | }) 153 | 154 | req_kwargs = {} 155 | if DISABLE_SSL: 156 | req_kwargs['ssl'] = False 157 | 158 | @async_retry 159 | async def create_task(): 160 | async with aiohttp.ClientSession() as sess: 161 | async with sess.post(f'{api_url}/createTask', json=create_task_req, **req_kwargs) as resp: 162 | result = json.loads(await resp.text()) 163 | if result['errorId'] != 0: 164 | raise Exception(f'Create task error {result.get("errorCode")}: ' 165 | f'{result.get("errorDescription")}') 166 | return result['taskId'] 167 | 168 | async def get_task_result(tid): 169 | async with aiohttp.ClientSession() as sess: 170 | async with sess.post(f'{api_url}/getTaskResult', json={ 171 | 'clientKey': client_key, 172 | 'taskId': tid, 173 | }, **req_kwargs) as resp: 174 | result = json.loads(await resp.text()) 175 | if result['errorId'] != 0: 176 | raise Exception(f'Get task result error {result.get("errorCode")}: ' 177 | f'{result.get("errorDescription")}') 178 | return result.get('status'), result.get('solution') 179 | 180 | logger.info(f'{idx}) Creating captcha task') 181 | task_id = await create_task() 182 | logger.info(f'{idx}) Waiting for captcha solution: {task_id}') 183 | waited, response = 0, None 184 | while waited <= 180: 185 | await asyncio.sleep(10) 186 | waited += 10 187 | status, solution = await get_task_result(task_id) 188 | logger.info(f'{idx}) Captcha task status: {status}') 189 | if solution is None: 190 | continue 191 | if 'GeeTestTask' in task_type.value: 192 | response = solution 193 | elif 'TurnstileTask' in task_type.value: 194 | response = solution['token'] 195 | else: 196 | response = solution['gRecaptchaResponse'] 197 | break 198 | if response is None: 199 | raise Exception(f'Captcha solving takes too long') 200 | logger.success(f'{idx}) Captcha solution received') 201 | return response 202 | -------------------------------------------------------------------------------- /internal/captcha/constants.py: -------------------------------------------------------------------------------- 1 | TWO_CAPTCHA_API_URL = 'https://api.2captcha.com' 2 | CAP_MONSTER_API_URL = 'https://api.capmonster.cloud' 3 | CAP_SOLVER_API_URL = 'https://api.capsolver.com' 4 | -------------------------------------------------------------------------------- /internal/config/__init__.py: -------------------------------------------------------------------------------- 1 | from .config import * 2 | -------------------------------------------------------------------------------- /internal/config/config.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import sys 3 | import toml 4 | import platform 5 | import subprocess 6 | from importlib.metadata import version 7 | 8 | 9 | cfg = toml.load(open('config.toml', 'r', encoding='utf-8')) 10 | 11 | 12 | WAIT_BETWEEN_ACCOUNTS = cfg.get('WAIT_BETWEEN_ACCOUNTS') 13 | MAX_TRIES = cfg.get('MAX_TRIES') 14 | CAP_MONSTER_API_KEY = cfg.get('CAP_MONSTER_API_KEY') 15 | TWO_CAPTCHA_API_KEY = cfg.get('TWO_CAPTCHA_API_KEY') 16 | CAP_SOLVER_API_KEY = cfg.get('CAP_SOLVER_API_KEY') 17 | THREADS_NUM = cfg.get('THREADS_NUM') 18 | DISABLE_SSL = cfg.get('DISABLE_SSL') 19 | CHECKER_UPDATE_STORAGE = cfg.get('CHECKER_UPDATE_STORAGE') 20 | UPDATE_STORAGE_ACCOUNT_INFO = cfg.get('UPDATE_STORAGE_ACCOUNT_INFO') 21 | SKIP_FIRST_ACCOUNTS = cfg.get('SKIP_FIRST_ACCOUNTS') 22 | RANDOM_ORDER = cfg.get('RANDOM_ORDER') 23 | FAKE_TWITTER = cfg.get('FAKE_TWITTER') 24 | FORCE_LINK_EMAIL = cfg.get('FORCE_LINK_EMAIL') 25 | GALXE_CAMPAIGN_IDS = cfg.get('GALXE_CAMPAIGN_IDS') 26 | REFERRAL_LINKS = [line.strip() for line in open('files/referral_links.txt', 'r', encoding='utf-8').read().splitlines() 27 | if line.strip() != ''] 28 | with open('files/surveys.csv', 'r', encoding='utf-8') as file: 29 | reader = csv.reader(file) 30 | SURVEYS = [row for row in reader] 31 | SURVEYS = {row[0].lower(): row[1:] for row in SURVEYS} 32 | HIDE_UNSUPPORTED = cfg.get('HIDE_UNSUPPORTED') 33 | SPACES_STATS = cfg.get('SPACES_STATS') 34 | RPCs = cfg.get('RPCs') 35 | 36 | 37 | curl_cffi_installed_version = version('curl_cffi') 38 | if platform.system() == 'Windows': 39 | if curl_cffi_installed_version == '0.8.1b8': 40 | print('\nNeed to downgrade curl-cffi version for Windows') 41 | subprocess.check_call([sys.executable, "-m", "pip", "install", 'curl_cffi==0.7.4']) 42 | print('Successfully installed curl_cffi==0.7.4\n') 43 | else: 44 | if curl_cffi_installed_version != '0.8.1b8': 45 | print('\nNeed to upgrade curl-cffi version for Mac/Linux') 46 | subprocess.check_call([sys.executable, "-m", "pip", "install", 'curl_cffi==0.8.1b8']) 47 | print('Successfully installed curl_cffi==0.8.1b8\n') 48 | -------------------------------------------------------------------------------- /internal/email/__init__.py: -------------------------------------------------------------------------------- 1 | from .factory import Email 2 | -------------------------------------------------------------------------------- /internal/email/base.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from loguru import logger 3 | from typing import Optional, Tuple 4 | 5 | from ..models import AccountInfo 6 | 7 | from .constants import FOLDERS 8 | 9 | 10 | class BaseClient: 11 | 12 | def __init__(self, account: AccountInfo, email_type: str): 13 | self.account = account 14 | self.email_type = email_type 15 | 16 | async def close(self): 17 | raise NotImplementedError() 18 | 19 | async def __aenter__(self) -> "BaseClient": 20 | return self 21 | 22 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: 23 | await self.close() 24 | 25 | def username(self) -> str: 26 | raise NotImplementedError() 27 | 28 | async def _login(self): 29 | raise NotImplementedError() 30 | 31 | async def login(self): 32 | try: 33 | await self._login() 34 | logger.info(f'{self.account.idx}) Successfully logged in {self.email_type} email') 35 | except Exception as e: 36 | raise Exception(f'Email login failed: {str(e)}') 37 | 38 | async def _find_email(self, folder: str, subject_condition_func) -> Optional[str]: 39 | raise NotImplementedError() 40 | 41 | async def find_email(self, subject_condition_func) -> Tuple[Optional[str], Optional[str]]: 42 | try: 43 | for folder in FOLDERS: 44 | subj, text = await self._find_email(folder, subject_condition_func) 45 | if subj is not None: 46 | return subj, text 47 | return None, None 48 | except Exception as e: 49 | raise Exception(f'Find email failed: {str(e)}') 50 | 51 | async def wait_for_email(self, subject_condition_func, timeout=90, polling=10) -> Tuple[Optional[str], Optional[str]]: 52 | exc_cnt = 0 53 | for t in range(0, timeout + 1, polling): 54 | await asyncio.sleep(polling) 55 | try: 56 | subj, text = await self.find_email(subject_condition_func) 57 | exc_cnt = 0 58 | except Exception as e: 59 | exc_cnt += 1 60 | if exc_cnt > 2: 61 | raise Exception(f'Wait for email failed: {str(e)}') 62 | logger.warning(f'{self.account.idx}) Wait for email failed: {str(e)}') 63 | subj, text = None, None 64 | 65 | if subj is not None: 66 | return subj, text 67 | 68 | logger.info(f'{self.account.idx}) Email not found. Waiting for {polling}s') 69 | 70 | raise Exception(f'Email was not found') 71 | -------------------------------------------------------------------------------- /internal/email/constants.py: -------------------------------------------------------------------------------- 1 | IMAP_SERVERS = { 2 | 'outlook.com': 'imap-mail.outlook.com', 3 | 'hotmail.com': 'imap-mail.outlook.com', 4 | 'rambler.ru': 'imap.rambler.ru', 5 | 'mail.ru': 'imap.mail.ru', 6 | } 7 | 8 | FOLDERS = ['INBOX', 'JUNK'] 9 | 10 | MAIL3_SIGN_MESSAGE_FORMAT = "I authorize sending and checking my emails on mail3 from this device. " \ 11 | "This doesn't cost anything.\n\nNonce: {{nonce}}" 12 | -------------------------------------------------------------------------------- /internal/email/factory.py: -------------------------------------------------------------------------------- 1 | from ..models import AccountInfo 2 | 3 | from .base import BaseClient 4 | from .imap import IMAPClient 5 | from .mail3 import Mail3Client 6 | 7 | 8 | class Email: 9 | 10 | @classmethod 11 | def from_account(cls, account: AccountInfo) -> BaseClient: 12 | if 'mail3.me' in account.email_username: 13 | return Mail3Client(account) 14 | return IMAPClient(account) 15 | -------------------------------------------------------------------------------- /internal/email/imap.py: -------------------------------------------------------------------------------- 1 | import email 2 | from email.header import decode_header 3 | from email.message import Message 4 | from typing import Optional, Tuple 5 | from loguru import logger 6 | from aioimaplib import aioimaplib 7 | 8 | from ..models import AccountInfo 9 | 10 | from .base import BaseClient 11 | from .constants import IMAP_SERVERS 12 | 13 | 14 | class IMAPClient(BaseClient): 15 | 16 | def __init__(self, account: AccountInfo): 17 | super().__init__(account, 'IMAP') 18 | self.imap: aioimaplib.IMAP4_SSL | None = None 19 | 20 | async def close(self): 21 | if self.imap is not None: 22 | try: 23 | await self.imap.close() 24 | except Exception as e: 25 | logger.warning(f'{self.account.idx}) Failed to close IMAP client: {str(e)}') 26 | 27 | def username(self) -> str: 28 | return self.account.email_username 29 | 30 | async def _login(self): 31 | email_domain = self.account.email_username.split('@')[1] 32 | if email_domain not in IMAP_SERVERS: 33 | raise Exception(f'Imap server for {email_domain} not found. Add it in internal/email/constants.py') 34 | self.imap = aioimaplib.IMAP4_SSL(IMAP_SERVERS[email_domain]) 35 | await self.imap.wait_hello_from_server() 36 | await self.imap.login(self.account.email_username, self.account.email_password) 37 | await self.imap.select() 38 | 39 | async def _find_email(self, folder: str, subject_condition_func) -> Tuple[Optional[str], Optional[str]]: 40 | _, messages = await self.imap.select(folder) 41 | msg_cnt = 0 42 | for message in messages: 43 | if message.endswith(b'EXISTS'): 44 | msg_cnt = int(message.split()[0]) 45 | break 46 | for i in range(msg_cnt, 0, -1): 47 | res, msg = await self.imap.fetch(str(i), '(RFC822)') 48 | if res != 'OK': 49 | continue 50 | raw_email = msg[1] 51 | msg = email.message_from_bytes(raw_email) 52 | subject, encoding = decode_header(msg['Subject'])[0] 53 | if isinstance(subject, bytes): 54 | subject = subject.decode(encoding if encoding else 'utf-8') 55 | if subject_condition_func(subject): 56 | return subject, self.get_email_body(msg) 57 | return None, None 58 | 59 | def get_email_body(self, msg: Message): 60 | if msg.is_multipart(): 61 | return self.get_email_body(msg.get_payload(0)) 62 | return msg.get_payload(decode=True).decode() 63 | -------------------------------------------------------------------------------- /internal/email/mail3.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Tuple 2 | from loguru import logger 3 | 4 | from ..tls import TLSClient 5 | from ..models import AccountInfo 6 | from ..utils import get_proxy_url 7 | 8 | from .base import BaseClient 9 | from .constants import MAIL3_SIGN_MESSAGE_FORMAT 10 | 11 | 12 | class Mail3Client(BaseClient): 13 | 14 | API_URL = 'https://api.mail3.me/api/v1/' 15 | 16 | def __init__(self, account: AccountInfo): 17 | super().__init__(account, 'Mail3.me') 18 | self.proxy = get_proxy_url(self.account.proxy) 19 | self.tls = TLSClient(account, { 20 | 'origin': 'https://app.mail3.me', 21 | 'referrer': 'https://app.mail3.me/', 22 | }) 23 | 24 | async def close(self): 25 | await self.tls.close() 26 | 27 | def username(self) -> str: 28 | return f'{self.account.evm_address}@mail3.me' 29 | 30 | async def _login(self): 31 | nonce, resp = await self.tls.get( 32 | f'{self.API_URL}/address_nonces/{self.account.evm_address}', 33 | [200, 404], lambda r: (r.get('nonce') or r['metadata'].get('nonce'), r) 34 | ) 35 | is_new = 'nonce' not in resp 36 | message = MAIL3_SIGN_MESSAGE_FORMAT.replace('{{nonce}}', str(nonce)) 37 | signature = self.account.sign_message(message) 38 | payload = { 39 | 'address': self.account.evm_address, 40 | 'message': message, 41 | 'signature': signature, 42 | } 43 | if is_new: 44 | logger.info(f'{self.account.idx}) Registering Mail3.me account') 45 | await self.tls.post(f'{self.API_URL}/registrations', [200, 204, 400], json=payload) 46 | logger.info(f'{self.account.idx}) Registered') 47 | 48 | payload.update({'pub_key': ''}) 49 | jwt = await self.tls.post(f'{self.API_URL}/sessions', [200], lambda r: r['jwt'], json=payload) 50 | self.tls.update_headers({'Authorization': 'Bearer ' + jwt}) 51 | 52 | async def _get_message_body(self, message_id: str) -> str: 53 | message = await self.tls.get(f'{self.API_URL}/mailbox/account/message/{message_id}', [200]) 54 | return message['text']['html'] 55 | 56 | async def _find_email(self, folder: str, subject_condition_func) -> Tuple[Optional[str], Optional[str]]: 57 | folder = folder if folder == 'INBOX' else folder.capitalize() 58 | messages = await self.tls.post(f'{self.API_URL}/mailbox/account/search', [200], lambda r: r['messages'], json={ 59 | 'path': folder, 60 | 'pageSize': 20, 61 | 'page': 0, 62 | 'search': {'unseen': True}, 63 | }) 64 | 65 | for message in messages: 66 | subject = message.get('subject') 67 | if subject_condition_func(subject): 68 | return subject, await self._get_message_body(message.get('id')) 69 | 70 | return None, None 71 | 72 | -------------------------------------------------------------------------------- /internal/galxe/__init__.py: -------------------------------------------------------------------------------- 1 | from .account import GalxeAccount 2 | -------------------------------------------------------------------------------- /internal/galxe/account.py: -------------------------------------------------------------------------------- 1 | import re 2 | import time 3 | import json 4 | import random 5 | import base64 6 | import asyncio 7 | import colorama 8 | from uuid import uuid4 9 | from faker import Faker 10 | from loguru import logger 11 | from asyncio import Lock 12 | from datetime import datetime, timedelta 13 | from termcolor import colored 14 | 15 | from ..vars import GALXE_CAPTCHA_ID 16 | from ..email import Email 17 | from ..models import AccountInfo 18 | from ..storage import Storage 19 | from ..twitter import Twitter, UserNotFound 20 | from ..onchain import OnchainAccount 21 | from ..captcha import solve_geetest 22 | from ..config import FAKE_TWITTER, HIDE_UNSUPPORTED, MAX_TRIES, FORCE_LINK_EMAIL, REFERRAL_LINKS, SURVEYS 23 | from ..utils import wait_a_bit, get_query_param, get_proxy_url, async_retry, log_long_exc, plural_str 24 | 25 | from .client import Client 26 | from .fingerprint import fingerprints, captcha_retry 27 | from .utils import random_string_for_entropy 28 | from .models import Recurring, Credential, CredSource, ConditionRelation, QuizType, Gamification, GasType 29 | from .constants import DISCORD_AUTH_URL, GALXE_DISCORD_CLIENT_ID, CHAIN_NAME_MAPPING, VERIFY_TRIES 30 | 31 | colorama.init() 32 | Faker.seed(int(time.time() * 1000)) 33 | faker = Faker() 34 | faker_lock = asyncio.Lock() 35 | 36 | quiz_storage = Storage('storage/quizzes.json') 37 | quiz_storage.init() 38 | 39 | 40 | class GalxeAccount: 41 | 42 | def __init__(self, idx, account: AccountInfo, private_key: str): 43 | self.idx = idx 44 | self.account = account 45 | self.proxy = get_proxy_url(self.account.proxy) 46 | self.private_key = private_key 47 | self.client = Client(account) 48 | self.twitter = None 49 | self.profile = None 50 | self.captcha = None 51 | self.captcha_lock = Lock() 52 | self.twitter_credentials_done = set() 53 | 54 | async def close(self): 55 | await self.client.close() 56 | 57 | async def __aenter__(self) -> "GalxeAccount": 58 | return self 59 | 60 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: 61 | await self.close() 62 | 63 | @async_retry 64 | async def get_captcha(self): 65 | try: 66 | solution = await solve_geetest( 67 | self.idx, 'https://app.galxe.com/quest', self.account.proxy, 68 | GALXE_CAPTCHA_ID, str(uuid4()), 4, 69 | { 70 | 'captcha_id': GALXE_CAPTCHA_ID, 71 | 'client_type': 'web', 72 | 'lang': 'en-us', 73 | } 74 | ) 75 | return { 76 | 'lotNumber': solution['lot_number'], 77 | 'captchaOutput': solution['captcha_output'], 78 | 'passToken': solution['pass_token'], 79 | 'genTime': solution['gen_time'], 80 | } 81 | except Exception as e: 82 | raise Exception(f'Failed to solve captcha: {str(e)}') 83 | 84 | async def _get_evm_login_signature(self): 85 | exp_time = (datetime.utcnow() + timedelta(days=7)).isoformat()[:-3] + 'Z' 86 | iss_time = datetime.utcnow().isoformat()[:-3] + 'Z' 87 | msg = f'galxe.com wants you to sign in with your Ethereum account:\n{self.account.evm_address}\n\n' \ 88 | f'Sign in with Ethereum to the app.\n\n' \ 89 | f'URI: https://galxe.com\n' \ 90 | f'Version: 1\n' \ 91 | f'Chain ID: 1\n' \ 92 | f'Nonce: {random_string_for_entropy(96)}\n' \ 93 | f'Issued At: {iss_time}\n' \ 94 | f'Expiration Time: {exp_time}' 95 | 96 | return msg, self.account.sign_message(msg) 97 | 98 | async def sign_in(self): 99 | msg, signature = await self._get_evm_login_signature() 100 | await self.client.sign_in(msg, signature) 101 | 102 | @classmethod 103 | async def fake_username(cls): 104 | async with faker_lock: 105 | return faker.user_name() 106 | 107 | async def create_account(self): 108 | username = await self.fake_username() 109 | while await self.client.is_username_exist(username): 110 | username += str(random.randint(0, 9)) 111 | logger.info(f'{self.idx}) Creating Galxe account with {username} username') 112 | await self.client.create_account(username) 113 | 114 | async def login(self): 115 | exists = await self.client.galxe_id_exist() 116 | await self.sign_in() 117 | if not exists: 118 | await self.create_account() 119 | await self.refresh_profile() 120 | 121 | async def refresh_profile(self): 122 | self.profile = await self.client.basic_user_info() 123 | 124 | async def link_twitter(self, fake_twitter=FAKE_TWITTER): 125 | existed_twitter_username = self.profile.get('twitterUserName', '') 126 | if existed_twitter_username != '' and fake_twitter: 127 | return 128 | 129 | if self.twitter is None: 130 | try: 131 | self.twitter = Twitter(self.account) 132 | await self.twitter.start() 133 | except Exception as e: 134 | self.twitter = None 135 | raise e 136 | 137 | if existed_twitter_username != '': 138 | if existed_twitter_username.lower() == self.twitter.my_username.lower(): 139 | return 140 | else: 141 | logger.info(f'{self.idx}) Another Twitter account already linked with this EVM address: ' 142 | f'{existed_twitter_username}. Current: {self.twitter.my_username}') 143 | 144 | logger.info(f'{self.idx}) Starting link new Twitter account') 145 | 146 | galxe_id = self.profile.get('id') 147 | tweet_text = f'Verifying my Twitter account for my #GalxeID gid:{galxe_id} @Galxe \n\n galxe.com/id ' 148 | 149 | try: 150 | try: 151 | tweet_url = await self.twitter.post_tweet(tweet_text) 152 | except Exception as e: 153 | if 'Authorization: Status is a duplicate. (187)' in str(e): 154 | logger.info(f'{self.idx}) Duplicate tweet. Trying to find original one') 155 | tweet_url = await self.twitter.find_posted_tweet(lambda t: tweet_text.split('\n')[0] in t) 156 | if tweet_url is None: 157 | raise Exception("Tried to post duplicate tweet. Can't find original one") 158 | logger.info(f'{self.idx}) Duplicate tweet found: {tweet_url}') 159 | else: 160 | raise e 161 | logger.info(f'{self.idx}) Posted link tweet: {tweet_url}') 162 | await wait_a_bit() 163 | await self.client.check_twitter_account(tweet_url) 164 | await self.client.verify_twitter_account(tweet_url) 165 | except Exception as e: 166 | raise Exception(f'Failed ot link Twitter: {str(e)}') 167 | 168 | logger.info(f'{self.idx}) Twitter account linked') 169 | await wait_a_bit(4) 170 | await self.refresh_profile() 171 | 172 | async def link_email(self, strict=False): 173 | existed_email = self.profile.get('email', '') 174 | if existed_email != '': 175 | current_email = Email.from_account(self.account).username() 176 | if existed_email.lower() == current_email.lower(): 177 | return 178 | else: 179 | if not strict: 180 | return 181 | logger.info(f'{self.idx}) Another email already linked with this EVM address: ' 182 | f'{existed_email}. Current: {current_email}') 183 | 184 | logger.info(f'{self.idx}) Starting link new email') 185 | 186 | try: 187 | async with Email.from_account(self.account) as email_client: 188 | await email_client.login() 189 | email_username = email_client.username() 190 | captcha = await self.get_captcha() 191 | await self.client.send_verify_code(email_username, captcha) 192 | logger.info(f'{self.idx}) Verify code was sent to {email_username}') 193 | email_subj, _ = await email_client.wait_for_email( 194 | lambda s: s.startswith('Your Galxe Verification Code is '), 195 | ) 196 | code = self._extract_code_from_email_subj(email_subj) 197 | await self.client.update_email(email_username, code) 198 | except Exception as e: 199 | raise Exception(f'Failed to link email: {str(e)}') 200 | 201 | logger.info(f'{self.idx}) Email linked') 202 | await wait_a_bit(4) 203 | await self.refresh_profile() 204 | 205 | async def link_discord(self): 206 | existed_discord = self.profile.get('discordUserID', '') 207 | existed_discord_username = self.profile.get('discordUserName', '') 208 | discord_user_id = self._get_discord_user_id() 209 | 210 | if existed_discord != '': 211 | if existed_discord == discord_user_id: 212 | return 213 | else: 214 | logger.info(f'{self.idx}) Another Discord account already linked with this EVM address: ' 215 | f'{existed_discord_username}') 216 | 217 | discord_auth_link = await self.client.get_social_auth_url() 218 | state = get_query_param(discord_auth_link, 'state') 219 | 220 | params = { 221 | 'client_id': GALXE_DISCORD_CLIENT_ID, 222 | 'response_type': 'code', 223 | 'redirect_uri': 'https://galxe.com', 224 | 'scope': 'identify guilds guilds.members.read', 225 | 'state': f'Discord_Auth,{self.account.evm_address},false,{state}', 226 | } 227 | body = { 228 | 'permissions': '0', 229 | 'authorize': True, 230 | 'integration_type': 0, 231 | } 232 | token = None 233 | try: 234 | location = await self.client.post(DISCORD_AUTH_URL, [200], lambda r: r['location'], 235 | params=params, json=body, 236 | headers={'Authorization': self.account.discord_token}) 237 | token = get_query_param(location, 'code') 238 | await self.client.check_discord_account(state, token) 239 | await self.client.verify_discord_account(state, token) 240 | except Exception as e: 241 | if token is None: 242 | self.account.discord_error = True 243 | raise Exception(f'Failed to link Discord: {str(e)}') 244 | 245 | logger.info(f'{self.idx}) Discord account linked') 246 | await wait_a_bit(4) 247 | await self.refresh_profile() 248 | 249 | def _get_discord_user_id(self): 250 | if self.account.discord_token == '': 251 | raise Exception('Empty Discord token') 252 | token = self.account.discord_token.split('.')[0] 253 | token += '=' * (4 - len(token) % 4) 254 | return str(base64.b64decode(token.encode("utf-8")), 'utf-8') 255 | 256 | @classmethod 257 | def _extract_code_from_email_subj(cls, subj): 258 | return subj.split()[5] 259 | 260 | @classmethod 261 | def _is_parent_campaign(cls, campaign): 262 | return campaign.get('type') == 'Parent' 263 | 264 | @classmethod 265 | def _is_daily_campaign(cls, campaign): 266 | return campaign.get('recurringType') == Recurring.DAILY 267 | 268 | @classmethod 269 | def _is_sequential_campaign(cls, campaign): 270 | return campaign['parentCampaign'].get('isSequencial') 271 | 272 | @classmethod 273 | def _get_gamification_type(cls, campaign): 274 | if 'gamification' not in campaign: 275 | return None 276 | return campaign['gamification']['type'] 277 | 278 | def _update_campaign_points(self, campaign, process_result=None): 279 | if self._is_parent_campaign(campaign): 280 | return 281 | daily_claimed = None 282 | if self._is_daily_campaign(campaign): 283 | daily_claimed = self._daily_points_claimed(campaign) 284 | if process_result and type(process_result) is tuple \ 285 | and process_result[0] == 'Points' and process_result[1] > 0: 286 | daily_claimed = True 287 | self.account.points[campaign['id']] = (campaign['name'], campaign['claimedLoyaltyPoints'], daily_claimed) 288 | if self._get_gamification_type(campaign) in [Gamification.OAT, Gamification.DROP]: 289 | self.account.nfts[campaign['id']] = campaign['whitelistInfo']['usedCount'] 290 | 291 | async def _process_campaign(self, campaign_id, process_async_func, aggr_func=None): 292 | info = await self.client.get_campaign_info(campaign_id) 293 | self._update_campaign_points(info) 294 | 295 | if self._is_parent_campaign(info): 296 | results = [await self._process_campaign(child['id'], process_async_func, aggr_func) 297 | for child in info['childrenCampaigns']] 298 | if aggr_func is None: 299 | return 300 | return aggr_func(results) 301 | 302 | if campaign_id not in self.account.actual_campaigns: 303 | self.account.actual_campaigns.append(campaign_id) 304 | if await self.verify_all_credentials(info): 305 | info = await self.client.get_campaign_info(campaign_id) 306 | 307 | result = await process_async_func(info) 308 | await wait_a_bit(5) 309 | 310 | info = await self.client.get_campaign_info(campaign_id) 311 | self._update_campaign_points(info, result) 312 | return result 313 | 314 | # Complete part 315 | 316 | async def complete_campaign(self, campaign_id: str): 317 | return await self._process_campaign(campaign_id, self._complete_campaign_process) 318 | 319 | async def _complete_campaign_process(self, campaign): 320 | logger.info(f'{self.idx}) Starting complete {campaign["name"]}') 321 | 322 | if campaign['requireEmail']: 323 | try: 324 | await self.link_email() 325 | except Exception as e: 326 | logger.warning(f'{self.idx}) Campaign require email: {str(e)}') 327 | 328 | if campaign['taskConfig'] and campaign['taskConfig'].get('participateCondition') is not None: 329 | logger.info(f'{self.idx}) Completing requirements') 330 | for i in range(max(VERIFY_TRIES, MAX_TRIES)): 331 | if i > 0: 332 | logger.info(f'{self.idx}) Waiting for 30s to retry') 333 | await asyncio.sleep(31) 334 | campaign = await self.client.get_campaign_info(campaign['id']) 335 | conditions = campaign['taskConfig']['participateCondition']['conditions'] 336 | credentials = [c['cred'] for c in conditions] 337 | conditions = [{'eligible': c['eligible']} for c in conditions] 338 | cred_group = {'conditions': conditions, 'credentials': credentials} 339 | need_retry = await self._complete_cred_group(campaign['id'], cred_group) 340 | await wait_a_bit(2) 341 | if not need_retry: 342 | break 343 | await wait_a_bit(5) 344 | if await self.verify_all_credentials(campaign): 345 | campaign = await self.client.get_campaign_info(campaign['id']) 346 | await asyncio.sleep(5) 347 | 348 | logger.info(f'{self.idx}) Completing main tasks') 349 | 350 | for i in range(max(VERIFY_TRIES, MAX_TRIES)): 351 | 352 | if i > 0: 353 | logger.info(f'{self.idx}) Waiting for 30s to retry') 354 | await asyncio.sleep(31) 355 | campaign = await self.client.get_campaign_info(campaign['id']) 356 | 357 | try_again = False 358 | for group_id in range(len(campaign['credentialGroups'])): 359 | 360 | cred_group = campaign['credentialGroups'][group_id] 361 | need_retry = await self._complete_cred_group(campaign['id'], cred_group) 362 | try_again = need_retry or try_again 363 | await wait_a_bit(2) 364 | 365 | if not try_again: 366 | break 367 | 368 | async def _complete_cred_group(self, campaign_id: str, cred_group) -> bool: 369 | try_again = False 370 | for condition, credential in zip(cred_group['conditions'], cred_group['credentials']): 371 | try: 372 | try: 373 | await self._complete_credential(campaign_id, condition, credential, FAKE_TWITTER) 374 | except Exception as exc: 375 | if FAKE_TWITTER and ('Error: pass_token used' in str(exc)): 376 | logger.info(f"{self.idx}) Probably can't complete with fake twitter. Trying without it") 377 | await self._complete_credential(campaign_id, condition, credential, False) 378 | else: 379 | raise exc 380 | await wait_a_bit() 381 | except Exception as e: 382 | s_e = str(e) 383 | if ('try again in 30 seconds' in s_e or 'please verify after 1 minutes' in s_e or 384 | ('Message: "None": Status = 200' in s_e and 385 | 'Galxe Web3 Score - Humanity Score' not in credential["name"])): 386 | try_again = True 387 | if 'Message: "None": Status = 200' in s_e: 388 | logger.info(f'{self.idx}) Completion was not registered, need to wait') 389 | continue 390 | await log_long_exc(self.idx, f'Failed to complete "{credential["name"]}"', e, warning=True) 391 | return try_again 392 | 393 | async def _complete_credential(self, campaign_id: str, condition, credential, fake_twitter): 394 | if condition['eligible']: 395 | return 396 | 397 | match credential['type']: 398 | case Credential.TWITTER: 399 | need_sync = await self._complete_twitter(campaign_id, credential, fake_twitter) 400 | case Credential.EMAIL: 401 | need_sync = await self._complete_email(campaign_id, credential) 402 | case Credential.EVM_ADDRESS: 403 | need_sync = await self._complete_eth(campaign_id, credential) 404 | case Credential.GALXE_ID: 405 | need_sync = await self._complete_galxe_id(campaign_id, credential) 406 | case Credential.DISCORD: 407 | need_sync = await self._complete_discord(credential) 408 | case unexpected: 409 | if HIDE_UNSUPPORTED: 410 | return 411 | raise Exception(f'{unexpected} credential type is not supported yet') 412 | 413 | if need_sync: 414 | await self._sync_credential(campaign_id, credential['id'], credential['type']) 415 | logger.success(f'{self.idx}) Verified "{credential["name"]}"') 416 | 417 | if credential['type'] == Credential.DISCORD: 418 | logger.info(f'{self.idx}) Extra wait 15s after Discord task verification') 419 | await asyncio.sleep(15) 420 | 421 | quote_mention_re = re.compile(r'mention \d+ friends') 422 | 423 | async def _complete_twitter(self, campaign_id: str, credential, fake_twitter) -> bool: 424 | await self.link_twitter(fake_twitter) 425 | await self.add_typed_credential(campaign_id, credential) 426 | if credential['id'] in self.twitter_credentials_done: 427 | logger.info(f'{self.idx}) Twitter action was already done. Just verifying it') 428 | return True 429 | if fake_twitter: 430 | return True 431 | try: 432 | match credential['credSource']: 433 | case CredSource.TWITTER_FOLLOW: 434 | user_to_follow = get_query_param(credential['referenceLink'], 'screen_name') 435 | await self.twitter.follow(user_to_follow) 436 | logger.info(f'{self.idx}) @{user_to_follow} followed') 437 | case CredSource.TWITTER_RT: 438 | tweet_id = get_query_param(credential['referenceLink'], 'tweet_id') 439 | await self.twitter.retweet(tweet_id) 440 | logger.info(f'{self.idx}) Retweet done') 441 | case CredSource.TWITTER_LIKE: 442 | logger.info(f'{self.idx}) Currently can skip likes, because it\'s not visible') 443 | # tweet_id = get_query_param(credential['referenceLink'], 'tweet_id') 444 | # await self.twitter.like(tweet_id) 445 | case CredSource.TWITTER_QUOTE: 446 | text = get_query_param(credential['referenceLink'], 'text') 447 | tweet_link = text[text.rfind(' ') + 1:] 448 | text = text[:text.rfind(' ')] 449 | mentions = self.quote_mention_re.findall(credential['name'].lower()) 450 | if mentions: 451 | mentions_number = int(mentions[0].split()[1]) 452 | usernames = [] 453 | for _ in range(mentions_number): 454 | username = await self.fake_username() 455 | for _ in range(5): 456 | try: 457 | await self.twitter.get_user_id(username) 458 | except UserNotFound: 459 | username = await self.fake_username() 460 | continue 461 | break 462 | usernames.append(username) 463 | text += ''.join([f' @{un}' for un in usernames]) 464 | logger.info(f'{self.idx}) Tweet quote with text: {text}') 465 | text += '\n' + tweet_link 466 | quote_url = await self.twitter.post_tweet(text) 467 | logger.info(f'{self.idx}) Quote done: {quote_url}') 468 | case unexpected: 469 | if HIDE_UNSUPPORTED: 470 | return False 471 | raise Exception(f'{unexpected} credential source for Twitter task is not supported yet') 472 | self.twitter_credentials_done.add(credential['id']) 473 | except Exception as e: 474 | await log_long_exc(self.idx, 'Twitter action failed. Trying to verify anyway', e, warning=True) 475 | return True 476 | 477 | async def _complete_email(self, campaign_id: str, credential) -> bool: 478 | await self.link_email() 479 | match credential['credSource']: 480 | case CredSource.VISIT_LINK: 481 | await self.add_typed_credential(campaign_id, credential) 482 | return True 483 | case CredSource.QUIZ: 484 | await self.solve_quiz(credential) 485 | return False 486 | case CredSource.WATCH_YOUTUBE: 487 | await self.add_typed_credential(campaign_id, credential) 488 | return True 489 | case CredSource.SURVEY: 490 | await self._complete_survey(campaign_id, credential) 491 | return False 492 | case unexpected: 493 | if HIDE_UNSUPPORTED: 494 | return False 495 | raise Exception(f'{unexpected} credential source for Email task is not supported yet') 496 | 497 | async def _complete_eth(self, campaign_id: str, credential) -> bool: 498 | match credential['credSource']: 499 | case CredSource.VISIT_LINK: 500 | await self.add_typed_credential(campaign_id, credential) 501 | return True 502 | case CredSource.QUIZ: 503 | await self.solve_quiz(credential) 504 | return False 505 | case CredSource.SURVEY: 506 | await self._complete_survey(campaign_id, credential) 507 | return False 508 | case CredSource.WATCH_YOUTUBE: 509 | await self.add_typed_credential(campaign_id, credential) 510 | return True 511 | case CredSource.CSV: 512 | raise Exception(f'{self.idx}) It seems like you are not eligible for custom project requirements') 513 | logger.warning(f'{self.idx}) {credential["name"]} is not done or not updated yet. Trying to verify it anyway') 514 | return True 515 | 516 | async def _complete_galxe_id(self, campaign_id: str, credential) -> bool: 517 | match credential['credSource']: 518 | case CredSource.SPACE_USERS: 519 | await self._follow_space(campaign_id, credential['id']) 520 | case CredSource.QUIZ: 521 | await self.solve_quiz(credential) 522 | case CredSource.SURVEY: 523 | await self._complete_survey(campaign_id, credential) 524 | case CredSource.VISIT_LINK: 525 | await self.add_typed_credential(campaign_id, credential) 526 | return True 527 | case CredSource.WATCH_YOUTUBE: 528 | await self.add_typed_credential(campaign_id, credential) 529 | return True 530 | case unexpected: 531 | if not HIDE_UNSUPPORTED: 532 | raise Exception(f'{unexpected} credential source for Galxe ID task is not supported yet') 533 | return False 534 | 535 | async def _complete_discord(self, credential) -> bool: 536 | await self.link_discord() 537 | return True 538 | 539 | async def _follow_space(self, campaign_id: str, credential_id): 540 | info = await self.client.get_campaign_info(campaign_id) 541 | space = info['space'] 542 | space_id = int(space['id']) 543 | if not space['isFollowing']: 544 | await self.client.follow_space(space_id) 545 | logger.info(f'{self.idx}) Space {space["name"]} followed') 546 | sync_options = self._default_sync_options(credential_id) 547 | eval_expr = sync_options.copy() 548 | eval_expr.update({ 549 | 'entityExpr': { 550 | 'attrFormula': 'ALL', 551 | 'attrs': [{ 552 | 'attrName': 'follow', 553 | 'operatorSymbol': '==', 554 | 'targetValue': '1', 555 | '__typename': 'ExprEntityAttr', 556 | }], 557 | 'credId': credential_id, 558 | }, 559 | }) 560 | await self.client.sync_evaluate_credential_value(eval_expr, sync_options) 561 | 562 | def _default_sync_options(self, credential_id: str): 563 | return { 564 | 'address': self.client.full_address, 565 | 'credId': credential_id, 566 | } 567 | 568 | async def solve_quiz(self, quiz): 569 | quiz_id = quiz['id'] 570 | answers = await quiz_storage.get_value(quiz_id) 571 | if answers is None: 572 | quizzes = await self.client.read_quiz(quiz_id) 573 | 574 | if any(q['type'] != QuizType.MULTI_CHOICE for q in quizzes): 575 | raise Exception(f"Can't solve quiz with not multi-choice items: {quiz_id}") 576 | 577 | answers = [-1 for _ in quizzes] 578 | correct = [False for _ in quizzes] 579 | 580 | while not all(correct): 581 | answers = [answers[i] if correct[i] else answers[i] + 1 for i in range(len(answers))] 582 | if any(a >= len(quizzes[i]['items']) for i, a in enumerate(answers)): 583 | raise Exception(f"Can't find answers for {quiz['name']}") 584 | 585 | logger.info(f'{self.idx}) {quiz["name"]} attempt to answer with {answers}') 586 | sync_options = self._default_sync_options(quiz_id) 587 | sync_options.update({'quiz': {'answers': [str(a) for a in answers]}}) 588 | 589 | result = await self.client.sync_credential_value(sync_options, only_allow=False, quiz=True) 590 | correct = result['quiz']['correct'] 591 | 592 | logger.success(f'{self.idx}) {quiz["name"]} solved') 593 | await quiz_storage.set_value(quiz_id, answers) 594 | await quiz_storage.async_save() 595 | else: 596 | sync_options = self._default_sync_options(quiz_id) 597 | sync_options.update({'quiz': {'answers': [str(a) for a in answers]}}) 598 | await self.client.sync_credential_value(sync_options, quiz=True) 599 | logger.success(f'{self.idx}) {quiz["name"]} answers restored and verified') 600 | 601 | async def _complete_survey(self, campaign_id, survey): 602 | survey_id = survey['id'] 603 | survey_name = survey['name'] 604 | logger.info(f'{self.idx}) Processing survey "{survey_name}"') 605 | questions = await self.client.read_survey(survey_id) 606 | answers = SURVEYS.get(self.account.evm_address.lower(), {}).get(campaign_id) 607 | if not answers: 608 | logger.warning(f'{self.idx}) No answers provided for {self.account.evm_address}') 609 | return False 610 | answers = [a.strip() for a in answers.split('|')] 611 | if len(answers) != len(questions): 612 | logger.warning(f'{self.idx}) Expected {len(questions)} answers, but only {len(answers)} provided') 613 | return False 614 | sync_options = self._default_sync_options(survey_id) 615 | logger.info(f'{self.idx}) Sending answers: {answers}') 616 | sync_options.update({'survey': {'answers': answers}}) 617 | await self.client.sync_credential_value(sync_options, only_allow=False) 618 | logger.success(f'{self.idx}) "{survey_name}" submitted') 619 | 620 | @captcha_retry 621 | async def add_typed_credential(self, campaign_id: str, credential): 622 | captcha = await self.get_captcha() 623 | await self.client.add_typed_credential_items(campaign_id, credential['id'], captcha) 624 | await wait_a_bit(3) 625 | 626 | @captcha_retry 627 | async def _sync_credential(self, campaign_id: str, credential_id: str, cred_type: str): 628 | sync_options = self._default_sync_options(credential_id) 629 | match cred_type: 630 | case Credential.TWITTER: 631 | await self.client.twitter_oauth2_status() 632 | captcha = await self.get_captcha() 633 | sync_options.update({ 634 | 'twitter': { 635 | 'campaignID': campaign_id, 636 | 'captcha': captcha, 637 | } 638 | }) 639 | await self.client.sync_credential_value(sync_options) 640 | 641 | async def verify_all_credentials(self, campaign): 642 | cred_ids = [] 643 | for cred_group in campaign['credentialGroups']: 644 | cred_ids.extend([cred['id'] for cred in cred_group['credentials'] if cred['eligible'] == 0]) 645 | if campaign['taskConfig'] and campaign['taskConfig'].get('participateCondition') is not None: 646 | conditions = campaign['taskConfig']['participateCondition']['conditions'] 647 | cred_ids.extend([c['cred']['id'] for c in conditions]) 648 | if len(cred_ids) == 0: 649 | return False 650 | await self.client.verify_credentials(cred_ids) 651 | await wait_a_bit(3) 652 | return True 653 | 654 | # Claim part 655 | 656 | def _daily_points_claimed(self, campaign): 657 | if not self._is_daily_campaign(campaign) or self._is_parent_campaign(campaign): 658 | return True 659 | if campaign['whitelistInfo']['currentPeriodClaimedLoyaltyPoints'] < \ 660 | campaign['whitelistInfo']['currentPeriodMaxLoyaltyPoints']: 661 | return False 662 | if campaign['whitelistInfo']['currentPeriodMaxLoyaltyPoints'] > 0: 663 | return True 664 | return all(cg['claimedLoyaltyPoints'] > 0 for cg in campaign['credentialGroups']) 665 | 666 | def _campaign_points_claimed(self, campaign) -> bool: 667 | return campaign['whitelistInfo']['currentPeriodClaimedLoyaltyPoints'] >= \ 668 | campaign['whitelistInfo']['currentPeriodMaxLoyaltyPoints'] and \ 669 | campaign['claimedLoyaltyPoints'] >= campaign['loyaltyPoints'] and self._daily_points_claimed(campaign) 670 | 671 | @classmethod 672 | def _campaign_nft_claimed(cls, campaign) -> bool: 673 | return 0 < campaign['whitelistInfo']['maxCount'] <= campaign['whitelistInfo']['usedCount'] 674 | 675 | def already_claimed(self, campaign) -> bool: 676 | gamification_type = self._get_gamification_type(campaign) 677 | if gamification_type is None: 678 | return True 679 | match gamification_type: 680 | case Gamification.POINTS: 681 | return self._campaign_points_claimed(campaign) 682 | case Gamification.OAT | Gamification.DROP: 683 | return self._campaign_points_claimed(campaign) and self._campaign_nft_claimed(campaign) 684 | case Gamification.POINTS_MYSTERY_BOX | Gamification.BOUNTY | Gamification.DISCORD_ROLE | Gamification.TOKEN: 685 | return self._campaign_nft_claimed(campaign) 686 | case unexpected: 687 | if HIDE_UNSUPPORTED: 688 | return False 689 | logger.warning(f'{self.idx}) {unexpected} gamification type is not supported yet') 690 | return False 691 | 692 | async def claim_campaign(self, campaign_id: str): 693 | return await self._process_campaign(campaign_id, self._claim_campaign_process) 694 | 695 | async def _claim_campaign_process(self, campaign): 696 | if self.already_claimed(campaign): 697 | nft_cnt = self.account.nfts.get(campaign["id"]) 698 | nft_info = f' and {plural_str(nft_cnt, "NFT")}' if nft_cnt is not None else '' 699 | bounty_info, discord_role_info, raffle_info = '', '', '' 700 | match self._get_gamification_type(campaign): 701 | case Gamification.BOUNTY: 702 | bounty_info = ' and participated in bounty' 703 | case Gamification.DISCORD_ROLE: 704 | discord_role_info = ' and discord role claimed' 705 | case Gamification.TOKEN: 706 | raffle_info = ' and participated in raffle' 707 | logger.info(f'{self.idx}) {campaign["name"]} already claimed ' 708 | f'{self.account.points[campaign["id"]][1]} points' 709 | f'{nft_info}{bounty_info}{discord_role_info}{raffle_info}') 710 | return 711 | logger.info(f'{self.idx}) Starting claim {campaign["name"]}') 712 | claimable = False 713 | for cred_idx, cred_group in enumerate(campaign['credentialGroups'], start=1): 714 | if claimable: 715 | break 716 | try: 717 | claimable = await self._is_cred_group_claimable(cred_group, cred_idx) 718 | except Exception as e: 719 | await log_long_exc(self.idx, f'Failed to check cred group#{cred_idx} for claim', e, warning=True) 720 | if not claimable: 721 | return 722 | try: 723 | two_step_claim = self._is_two_step_claim(campaign) 724 | claim_res = await self._claim_campaign_rewards(campaign) 725 | if two_step_claim: 726 | logger.info(f'{self.idx}) Two step claim') 727 | await asyncio.sleep(random.uniform(4.5, 5.5)) 728 | try: 729 | campaign = await self.client.get_campaign_info(campaign['id']) 730 | await self._claim_campaign_rewards(campaign) 731 | except Exception as e: 732 | await log_long_exc(self.idx, 'Second claim step failed', e, warning=True) 733 | return claim_res 734 | except Exception as e: 735 | await log_long_exc(self.idx, 'Failed to claim campaign', e, warning=True) 736 | 737 | @classmethod 738 | def _is_two_step_claim(cls, campaign) -> bool: 739 | wl_info = campaign['whitelistInfo'] 740 | point_mint_amount = wl_info['currentPeriodMaxLoyaltyPoints'] - wl_info['currentPeriodClaimedLoyaltyPoints'] 741 | mint_amount = 0 if wl_info['maxCount'] == -1 else wl_info['maxCount'] - wl_info['usedCount'] 742 | return point_mint_amount > 0 and mint_amount > 0 743 | 744 | async def _is_cred_group_claimable(self, cred_group, cred_idx): 745 | points_rewards = [r for r in cred_group['rewards'] if r['rewardType'] == 'LOYALTYPOINTS'] 746 | only_points = len(points_rewards) == len(cred_group['rewards']) 747 | available_points = sum(0 if '{{' in r['expression'] else int(r['expression']) for r in points_rewards) 748 | claimed_points = cred_group['claimedLoyaltyPoints'] 749 | if claimed_points >= available_points and only_points: 750 | return False 751 | eligible = [c['eligible'] for c in cred_group['conditions']] 752 | left_points = available_points - claimed_points 753 | 754 | claimable = False 755 | match cred_group['conditionRelation']: 756 | case ConditionRelation.ALL: 757 | claimable = all(eligible) 758 | case ConditionRelation.ANY: 759 | claimable = any(eligible) 760 | case unexpected: 761 | if not HIDE_UNSUPPORTED: 762 | logger.warning(f'{self.idx}) {unexpected} condition relation is not supported yet') 763 | if not claimable: 764 | not_claimable_msg = ([('[+] ' if c['eligible'] == 1 else '[-] ') + c["name"] 765 | for c in cred_group["credentials"]] + 766 | [f"{left_points} points left"]) 767 | if len(not_claimable_msg) > 1: 768 | not_claimable_msg[0] = ' ' + not_claimable_msg[0] 769 | not_claimable_msg = f'group#{cred_idx} [{" | ".join(not_claimable_msg)}]' 770 | not_claimable_msg = colored(f'Not enough conditions eligible to claim {not_claimable_msg}', 'cyan') 771 | logger.info(f'{self.idx}) ' + not_claimable_msg) 772 | return claimable 773 | 774 | async def _claim_campaign_rewards(self, campaign): 775 | campaign = await self.client.get_campaign_info(campaign['id']) 776 | 777 | reward_type = self._get_gamification_type(campaign) 778 | if reward_type is None: 779 | return 780 | 781 | params = self._get_claim_params(campaign) 782 | point_mint_amount, mint_amount = params['pointMintAmount'], params['mintCount'] 783 | 784 | chains = [] 785 | if point_mint_amount > 0: chains.append('GRAVITY_ALPHA') 786 | if mint_amount > 0: chains.append(campaign['chain']) 787 | if campaign['gasType'] == GasType.GAS_LESS: 788 | sufficient = await self.client.sufficient_for_gasless_chain_query(int(campaign['space']['id']), chains) 789 | insuff_for_points = any(suff['chain'] == 'GRAVITY_ALPHA' and not suff['sufficient'] for suff in sufficient) 790 | if insuff_for_points: 791 | logger.info(f'{self.idx}) Need $G token to claim points') 792 | else: 793 | sufficient = [] 794 | insuff_for_points = False 795 | if campaign['chain'] == 'GRAVITY_ALPHA' and point_mint_amount > 0: 796 | sufficient = await self.client.sufficient_for_gasless_chain_query(int(campaign['space']['id']), chains) 797 | insuff_for_points = any( 798 | suff['chain'] == 'GRAVITY_ALPHA' and not suff['sufficient'] for suff in sufficient) 799 | if insuff_for_points: 800 | logger.info(f'{self.idx}) Need $G token to claim points') 801 | 802 | claim_data = await self._get_claim_data(campaign) 803 | 804 | if reward_type != Gamification.POINTS and point_mint_amount > 0 and mint_amount == 0: 805 | reward_type = Gamification.POINTS 806 | 807 | claimed_points = 0 808 | claimed_nfts = 0 809 | nft_type = '' 810 | match reward_type: 811 | case Gamification.POINTS | Gamification.POINTS_MYSTERY_BOX: 812 | lp_tx_resp = claim_data.get('loyaltyPointsTxResp', {}) 813 | claimed_points = sum(lp_tx_resp.get('Points', [])) 814 | if lp_tx_resp.get('loyaltyPointContract'): 815 | await self._claim_gravity_points( 816 | campaign, 817 | lp_tx_resp, 818 | claimed_points, 819 | ) 820 | allowed = lp_tx_resp.get('allow') 821 | claimed_points = claimed_points if allowed else 0 822 | claimed_log = f'{claimed_points} points' 823 | if reward_type == Gamification.POINTS_MYSTERY_BOX: 824 | claimed_log += ' from Mystery Box' 825 | case Gamification.OAT | Gamification.DROP: 826 | nft_type = 'NFT' if reward_type == Gamification.DROP else 'OAT' 827 | gas_less = campaign['gasType'] == GasType.GAS_LESS 828 | was_gasless = False 829 | if gas_less: 830 | sufficient = await self.client.sufficient_for_gasless_chain_query( 831 | int(campaign['space']['id']), 832 | campaign['chain'], 833 | ) 834 | if not sufficient: 835 | logger.info(f'{self.idx}) Insufficient space balance for gasless claim') 836 | gas_less, was_gasless = False, True 837 | if not gas_less and claim_data['mintFuncInfo'].get('nftCoreAddress'): 838 | await self._claim_gas_reward(campaign, claim_data, was_gasless) 839 | claimed_nfts = len(claim_data['mintFuncInfo']['verifyIDs']) 840 | claimed_log = plural_str(claimed_nfts, nft_type) 841 | case Gamification.BOUNTY: 842 | claimed_log = '[Participated in Bounty]' 843 | case Gamification.DISCORD_ROLE: 844 | claimed_log = '[Discord Role]' 845 | case Gamification.TOKEN: 846 | if campaign.get('distributionType') == 'RAFFLE': 847 | claimed_log = '[Participated in Raffle]' 848 | else: 849 | raise Exception('Unexpected distribution type for token reward') 850 | case unexpected: 851 | raise Exception(f'{unexpected} reward type is not supported for claim yet') 852 | 853 | logger.success(f'{self.idx}) Campaign {campaign["name"]} claimed {claimed_log}') 854 | 855 | result = ('Points', claimed_points) if claimed_points > 0 else None 856 | result = (nft_type, claimed_nfts) if claimed_nfts > 0 else result 857 | 858 | return result 859 | 860 | @classmethod 861 | def get_referral_code(cls, campaign): 862 | for campaign_id, ref_code in REFERRAL_LINKS: 863 | if campaign['id'] == campaign_id: 864 | return ref_code 865 | if campaign['parentCampaign'] is not None and campaign['parentCampaign']['id'] == campaign_id: 866 | return ref_code 867 | return None 868 | 869 | def _get_claim_params(self, campaign, silent=False): 870 | wl_info = campaign['whitelistInfo'] 871 | point_mint_amount = wl_info['currentPeriodMaxLoyaltyPoints'] - wl_info['currentPeriodClaimedLoyaltyPoints'] 872 | mint_amount = 0 if wl_info['maxCount'] == -1 else wl_info['maxCount'] - wl_info['usedCount'] 873 | chain = 'GRAVITY_ALPHA' if point_mint_amount > 0 else campaign['chain'] 874 | if point_mint_amount <= 0 and mint_amount <= 0: 875 | raise Exception('Nothing to claim') 876 | if chain.lower() == 'aptos': 877 | raise Exception(f'Aptos claim rewards is not supported') 878 | mint_log = [] 879 | if point_mint_amount > 0: mint_log.append(f'{point_mint_amount} points') 880 | if mint_amount > 0: mint_log.append(plural_str(mint_amount, self._get_gamification_type(campaign).upper())) 881 | mint_log = ' and '.join(mint_log) 882 | if not silent: 883 | logger.info(f'{self.idx}) Will claim {mint_log}') 884 | return { 885 | 'pointMintAmount': point_mint_amount, 886 | 'mintCount': mint_amount, 887 | 'chain': chain, 888 | } 889 | 890 | @captcha_retry 891 | async def _get_claim_data(self, campaign): 892 | chain = campaign['chain'] 893 | if chain == 'APTOS': 894 | raise Exception(f'Aptos claim rewards is not supported') 895 | params = self._get_claim_params(campaign, silent=True) 896 | if params['pointMintAmount'] > 0 and params['mintCount'] > 0: 897 | params['pointMintAmount'] = 0 898 | captcha = await self.get_captcha() 899 | return await self.client.prepare_participate( 900 | campaign['id'], captcha, chain, 901 | referral_code=self.get_referral_code(campaign), 902 | input_kwargs=params, 903 | ) 904 | 905 | async def _claim_gravity_points(self, campaign, lp_tx_resp, amount): 906 | async with OnchainAccount(self.account, 'Gravity') as onchain: 907 | tx_hash = await onchain.claim_loyalty_points( 908 | lp_tx_resp['loyaltyPointDistributionStation'], 909 | lp_tx_resp['loyaltyPointContract'], 910 | lp_tx_resp['VerifyIDs'][0], 911 | amount, 912 | lp_tx_resp['signature'], 913 | ) 914 | 915 | try: 916 | await self.client.participate_point(campaign['id'], lp_tx_resp['nonce'], tx_hash, lp_tx_resp['VerifyIDs']) 917 | except Exception as e: 918 | await log_long_exc(self.idx, 'Claim points confirmation in API failed', e, warning=True) 919 | 920 | async def _claim_gas_reward(self, campaign, claim_data, was_gasless=False): 921 | space_station = campaign['spaceStation'] 922 | space_station_address, space_chain = space_station['address'], space_station['chain'] 923 | if was_gasless: 924 | space_chain = campaign['chain'] 925 | chain = CHAIN_NAME_MAPPING.get(space_chain, space_chain.capitalize()) 926 | number_id = campaign['numberID'] 927 | 928 | signature = claim_data['signature'] 929 | nonce = claim_data['nonce'] 930 | nft_core_address = claim_data['mintFuncInfo']['nftCoreAddress'] 931 | verify_id = claim_data['mintFuncInfo']['verifyIDs'][0] 932 | powah = claim_data['mintFuncInfo']['powahs'][0] 933 | cap = claim_data['mintFuncInfo'].get('cap') 934 | 935 | async with OnchainAccount(self.account, chain) as onchain: 936 | if cap: 937 | tx_hash = await onchain.claim_capped( 938 | space_station_address, 939 | number_id, signature, nft_core_address, verify_id, powah, cap 940 | ) 941 | else: 942 | tx_hash = await onchain.claim( 943 | space_station_address, 944 | number_id, signature, nft_core_address, verify_id, powah 945 | ) 946 | 947 | try: 948 | await self.client.participate(campaign['id'], space_chain, nonce, tx_hash, verify_id) 949 | except Exception as e: 950 | await log_long_exc(self.idx, 'Claim confirmation in API failed', e, warning=True) 951 | 952 | # Stats part 953 | 954 | async def spaces_stats(self): 955 | cursor, has_next_page = '', True 956 | while has_next_page: 957 | result = await self.client.profile_leaderboard(cursor) 958 | page_info = result['pageInfo'] 959 | cursor, has_next_page = page_info['endCursor'], page_info['hasNextPage'] 960 | for edge in result['edges']: 961 | node = edge['node'] 962 | space = node['space'] 963 | self.account.spaces_points[space['alias']] = (space['name'], node['points'], node['rank']) 964 | -------------------------------------------------------------------------------- /internal/galxe/client.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | 3 | from ..models import AccountInfo 4 | from ..tls import TLSClient 5 | from ..utils import async_retry 6 | 7 | 8 | class Client(TLSClient): 9 | GRAPH_URL = 'https://graphigo.prd.galaxy.eco/query' 10 | 11 | def __init__(self, account: AccountInfo): 12 | self.with_evm = True 13 | super().__init__(account, { 14 | 'origin': 'https://galxe.com', 15 | 'sec-fetch-site': 'cross-site', 16 | }) 17 | 18 | async def api_request(self, body, response_extract_func=None, exc_condition=None): 19 | 20 | def resp_handler(resp): 21 | if resp.get('errors') is not None and len(resp['errors']) > 0: 22 | raise Exception(' | '.join([err['message'] for err in resp['errors']])) 23 | if exc_condition is not None: 24 | if exc_condition(resp): 25 | operation_name = body['operationName'] 26 | operation_name = operation_name[0].lower() + operation_name[1:] 27 | if 'data' in resp and operation_name in resp['data'] and 'message' in resp['data'][operation_name]: 28 | raise Exception(f"Message: \"{resp['data'][operation_name]['message']}\"") 29 | raise Exception() 30 | if response_extract_func is not None: 31 | return response_extract_func(resp) 32 | return resp 33 | 34 | try: 35 | return await self.post(self.GRAPH_URL, json=body, timeout=60, 36 | acceptable_statuses=[200], resp_handler=resp_handler, 37 | headers={'Request-Id': str(uuid4())}) 38 | except Exception as e: 39 | raise Exception(f"{body['operationName']} request failed: {str(e)}") 40 | 41 | @property 42 | def full_address(self): 43 | if self.with_evm: 44 | return f'EVM:{self.account.evm_address.lower()}' 45 | else: 46 | return f'APTOS:{self.account.aptos_address.lower()}' 47 | 48 | @property 49 | def raw_address(self): 50 | return self.account.evm_address.lower() if self.with_evm else self.account.aptos_address.lower() 51 | 52 | @property 53 | def address(self): 54 | if self.with_evm: 55 | return self.raw_address 56 | else: 57 | return self.full_address 58 | 59 | @property 60 | def address_type(self): 61 | return 'EVM' if self.with_evm else 'APTOS' 62 | 63 | async def galxe_id_exist(self) -> bool: 64 | body = { 65 | "operationName": "GalxeIDExist", 66 | "query": "query GalxeIDExist($schema: String!) {\n galxeIdExist(schema: $schema)\n}\n", 67 | "variables": { 68 | "schema": self.full_address, 69 | }, 70 | } 71 | return await self.api_request(body, lambda resp: resp['data']['galxeIdExist']) 72 | 73 | async def sign_in(self, msg, signature, with_aptos=False): 74 | self.with_evm = not with_aptos 75 | body = { 76 | 'operationName': 'SignIn', 77 | 'query': 'mutation SignIn($input: Auth) {\n signin(input: $input)\n}\n', 78 | 'variables': { 79 | 'input': { 80 | 'address': self.raw_address, 81 | 'addressType': self.address_type, 82 | 'message': msg, 83 | 'signature': signature, 84 | }, 85 | }, 86 | } 87 | auth_token = await self.api_request(body, lambda resp: resp['data']['signin']) 88 | self.update_headers({'Authorization': auth_token}) 89 | 90 | async def is_username_exist(self, username: str): 91 | body = { 92 | "operationName": "IsUsernameExisting", 93 | "variables": {"username": username}, 94 | "query": "query IsUsernameExisting($username: String!) {\n usernameExist(username: $username)\n}\n" 95 | } 96 | return await self.api_request(body, lambda resp: resp['data']['usernameExist']) 97 | 98 | async def create_account(self, username): 99 | body = { 100 | "operationName": "CreateNewAccount", 101 | "variables": { 102 | "input": { 103 | "schema": self.full_address, 104 | "socialUsername": "", 105 | "username": username, 106 | } 107 | }, 108 | "query": "mutation CreateNewAccount($input: CreateNewAccount!) {\n createNewAccount(input: $input)\n}\n" 109 | } 110 | await self.api_request(body) 111 | 112 | async def basic_user_info(self): 113 | body = { 114 | "operationName": "BasicUserInfo", 115 | "variables": { 116 | "address": self.full_address, 117 | }, 118 | "query": "query BasicUserInfo($address: String!) {\n addressInfo(address: $address) {\n id\n username\n avatar\n address\n evmAddressSecondary {\n address\n __typename\n }\n hasEmail\n solanaAddress\n aptosAddress\n seiAddress\n injectiveAddress\n flowAddress\n starknetAddress\n bitcoinAddress\n hasEvmAddress\n hasSolanaAddress\n hasAptosAddress\n hasInjectiveAddress\n hasFlowAddress\n hasStarknetAddress\n hasBitcoinAddress\n hasTwitter\n hasGithub\n hasDiscord\n hasTelegram\n displayEmail\n displayTwitter\n displayGithub\n displayDiscord\n displayTelegram\n displayNamePref\n email\n twitterUserID\n twitterUserName\n githubUserID\n githubUserName\n discordUserID\n discordUserName\n telegramUserID\n telegramUserName\n enableEmailSubs\n subscriptions\n isWhitelisted\n isInvited\n isAdmin\n accessToken\n __typename\n }\n}\n" 119 | } 120 | return await self.api_request(body, lambda resp: resp['data']['addressInfo']) 121 | 122 | async def update_user_address(self, input_vars): 123 | body = { 124 | 'operationName': 'UpdateUserAddress', 125 | 'query': 'mutation UpdateUserAddress($input: UpdateUserAddressInput!) {\n updateUserAddress(input: $input) {\n code\n message\n __typename\n }\n}\n', 126 | 'variables': { 127 | 'input': input_vars, 128 | }, 129 | } 130 | await self.api_request(body) 131 | 132 | async def remove_user_address(self, input_vars): 133 | body = { 134 | 'operationName': 'RemoveUserAddress', 135 | 'query': 'mutation RemoveUserAddress($input: UpdateUserAddressInput!) {\n removeUserAddress(input: $input) {\n code\n __typename\n }\n}\n', 136 | 'variables': { 137 | 'input': input_vars, 138 | }, 139 | } 140 | await self.api_request(body) 141 | 142 | async def check_twitter_account(self, tweet_url): 143 | body = { 144 | 'operationName': 'checkTwitterAccount', 145 | 'query': 'mutation checkTwitterAccount($input: VerifyTwitterAccountInput!) {\n checkTwitterAccount(input: $input) {\n address\n twitterUserID\n twitterUserName\n __typename\n }\n}\n', 146 | 'variables': { 147 | 'input': { 148 | 'address': self.full_address, 149 | 'tweetURL': tweet_url, 150 | }, 151 | }, 152 | } 153 | await self.api_request(body, exc_condition=lambda resp: resp['data']['checkTwitterAccount'] is None) 154 | 155 | async def verify_twitter_account(self, tweet_url): 156 | body = { 157 | 'operationName': 'VerifyTwitterAccount', 158 | 'query': 'mutation VerifyTwitterAccount($input: VerifyTwitterAccountInput!) {\n verifyTwitterAccount(input: $input) {\n address\n twitterUserID\n twitterUserName\n __typename\n }\n}\n', 159 | 'variables': { 160 | 'input': { 161 | 'address': self.full_address, 162 | 'tweetURL': tweet_url, 163 | }, 164 | }, 165 | } 166 | await self.api_request(body, exc_condition=lambda resp: resp['data']['verifyTwitterAccount'] is None) 167 | 168 | async def get_social_auth_url(self): 169 | body = { 170 | 'operationName': 'getSocialAuthUrl', 171 | 'query': 'query getSocialAuthUrl($schema: String!, $type: SocialAccountType!) {\n getSocialAuthUrl(schema: $schema, type: $type)\n}\n', 172 | 'variables': { 173 | 'schema': self.full_address, 174 | 'type': 'DISCORD', 175 | }, 176 | } 177 | return await self.api_request(body, lambda resp: resp['data']['getSocialAuthUrl']) 178 | 179 | async def check_discord_account(self, state, token): 180 | body = { 181 | 'operationName': 'checkDiscordAccount', 182 | 'query': 'mutation checkDiscordAccount($input: VerifyDiscordAccountInput!) {\n checkDiscordAccount(input: $input) {\n address\n discordUserID\n __typename\n }\n}\n', 183 | 'variables': { 184 | 'input': { 185 | 'address': self.full_address, 186 | 'state': state, 187 | 'token': token, 188 | }, 189 | }, 190 | } 191 | await self.api_request(body, exc_condition=lambda resp: resp['data']['checkDiscordAccount'] is None) 192 | 193 | async def verify_discord_account(self, state, token): 194 | body = { 195 | 'operationName': 'VerifyDiscord', 196 | 'query': 'mutation VerifyDiscord($input: VerifyDiscordAccountInput!) {\n verifyDiscordAccount(input: $input) {\n address\n discordUserID\n discordUserName\n __typename\n }\n}\n', 197 | 'variables': { 198 | 'input': { 199 | 'address': self.full_address, 200 | 'state': state, 201 | 'token': token, 202 | }, 203 | }, 204 | } 205 | await self.api_request(body, exc_condition=lambda resp: resp['data']['verifyDiscordAccount'] is None) 206 | 207 | @async_retry 208 | async def get_campaign_info(self, campaign_id): 209 | body = { 210 | 'operationName': 'CampaignDetailAll', 211 | 'query': 'query CampaignDetailAll($id: ID!, $address: String!, $withAddress: Boolean!) {\n campaign(id: $id) {\n ...CampaignForSiblingSlide\n coHostSpaces {\n ...SpaceDetail\n isAdmin(address: $address) @include(if: $withAddress)\n isFollowing @include(if: $withAddress)\n followersCount\n categories\n __typename\n }\n bannerUrl\n ...CampaignDetailFrag\n userParticipants(address: $address, first: 1) @include(if: $withAddress) {\n list {\n status\n premintTo\n __typename\n }\n __typename\n }\n space {\n ...SpaceDetail\n isAdmin(address: $address) @include(if: $withAddress)\n isFollowing @include(if: $withAddress)\n followersCount\n categories\n __typename\n }\n isBookmarked(address: $address) @include(if: $withAddress)\n inWatchList\n claimedLoyaltyPoints(address: $address) @include(if: $withAddress)\n parentCampaign {\n id\n isSequencial\n thumbnail\n __typename\n }\n isSequencial\n numNFTMinted\n childrenCampaigns {\n ...ChildrenCampaignsForCampaignDetailAll\n __typename\n }\n __typename\n }\n}\n\nfragment CampaignDetailFrag on Campaign {\n id\n ...CampaignMedia\n ...CampaignForgePage\n ...CampaignForCampaignParticipantsBox\n name\n numberID\n type\n inWatchList\n cap\n info\n useCred\n smartbalancePreCheck(mintCount: 1)\n smartbalanceDeposited\n formula\n status\n seoImage\n creator\n tags\n thumbnail\n gasType\n isPrivate\n createdAt\n requirementInfo\n description\n enableWhitelist\n chain\n startTime\n endTime\n requireEmail\n requireUsername\n blacklistCountryCodes\n whitelistRegions\n rewardType\n distributionType\n rewardName\n claimEndTime\n loyaltyPoints\n tokenRewardContract {\n id\n address\n chain\n __typename\n }\n tokenReward {\n userTokenAmount\n tokenAddress\n depositedTokenAmount\n tokenRewardId\n tokenDecimal\n tokenLogo\n tokenSymbol\n __typename\n }\n nftHolderSnapshot {\n holderSnapshotBlock\n __typename\n }\n spaceStation {\n id\n address\n chain\n __typename\n }\n ...WhitelistInfoFrag\n ...WhitelistSubgraphFrag\n gamification {\n ...GamificationDetailFrag\n __typename\n }\n creds {\n id\n name\n type\n credType\n credSource\n referenceLink\n description\n lastUpdate\n lastSync\n syncStatus\n credContractNFTHolder {\n timestamp\n __typename\n }\n chain\n eligible(address: $address, campaignId: $id)\n subgraph {\n endpoint\n query\n expression\n __typename\n }\n dimensionConfig\n value {\n gitcoinPassport {\n score\n lastScoreTimestamp\n __typename\n }\n __typename\n }\n commonInfo {\n participateEndTime\n modificationInfo\n __typename\n }\n __typename\n }\n credentialGroups(address: $address) {\n ...CredentialGroupForAddress\n __typename\n }\n rewardInfo {\n discordRole {\n guildId\n guildName\n roleId\n roleName\n inviteLink\n __typename\n }\n premint {\n startTime\n endTime\n chain\n price\n totalSupply\n contractAddress\n banner\n __typename\n }\n loyaltyPoints {\n points\n __typename\n }\n loyaltyPointsMysteryBox {\n points\n weight\n __typename\n }\n __typename\n }\n participants {\n participantsCount\n bountyWinnersCount\n __typename\n }\n taskConfig(address: $address) {\n participateCondition {\n conditions {\n ...ExpressionEntity\n __typename\n }\n conditionalFormula\n eligible\n __typename\n }\n rewardConfigs {\n id\n conditions {\n ...ExpressionEntity\n __typename\n }\n conditionalFormula\n description\n rewards {\n ...ExpressionReward\n __typename\n }\n eligible\n rewardAttrVals {\n attrName\n attrTitle\n attrVal\n __typename\n }\n __typename\n }\n referralConfig {\n id\n conditions {\n ...ExpressionEntity\n __typename\n }\n conditionalFormula\n description\n rewards {\n ...ExpressionReward\n __typename\n }\n eligible\n rewardAttrVals {\n attrName\n attrTitle\n attrVal\n __typename\n }\n __typename\n }\n __typename\n }\n referralCode(address: $address)\n recurringType\n latestRecurringTime\n nftTemplates {\n id\n image\n treasureBack\n __typename\n }\n __typename\n}\n\nfragment CampaignMedia on Campaign {\n thumbnail\n rewardName\n type\n gamification {\n id\n type\n __typename\n }\n __typename\n}\n\nfragment CredentialGroupForAddress on CredentialGroup {\n id\n description\n credentials {\n ...CredForAddressWithoutMetadata\n __typename\n }\n conditionRelation\n conditions {\n expression\n eligible\n ...CredentialGroupConditionForVerifyButton\n __typename\n }\n rewards {\n expression\n eligible\n rewardCount\n rewardType\n __typename\n }\n rewardAttrVals {\n attrName\n attrTitle\n attrVal\n __typename\n }\n claimedLoyaltyPoints\n __typename\n}\n\nfragment CredForAddressWithoutMetadata on Cred {\n id\n name\n type\n credType\n credSource\n referenceLink\n description\n lastUpdate\n lastSync\n syncStatus\n credContractNFTHolder {\n timestamp\n __typename\n }\n chain\n eligible(address: $address)\n subgraph {\n endpoint\n query\n expression\n __typename\n }\n dimensionConfig\n value {\n gitcoinPassport {\n score\n lastScoreTimestamp\n __typename\n }\n __typename\n }\n __typename\n}\n\nfragment CredentialGroupConditionForVerifyButton on CredentialGroupCondition {\n expression\n eligibleAddress\n __typename\n}\n\nfragment WhitelistInfoFrag on Campaign {\n id\n whitelistInfo(address: $address) {\n address\n maxCount\n usedCount\n claimedLoyaltyPoints\n currentPeriodClaimedLoyaltyPoints\n currentPeriodMaxLoyaltyPoints\n __typename\n }\n __typename\n}\n\nfragment WhitelistSubgraphFrag on Campaign {\n id\n whitelistSubgraph {\n query\n endpoint\n expression\n variable\n __typename\n }\n __typename\n}\n\nfragment GamificationDetailFrag on Gamification {\n id\n type\n nfts {\n nft {\n id\n animationURL\n category\n powah\n image\n name\n treasureBack\n nftCore {\n ...NftCoreInfoFrag\n __typename\n }\n traits {\n name\n value\n __typename\n }\n __typename\n }\n __typename\n }\n forgeConfig {\n minNFTCount\n maxNFTCount\n requiredNFTs {\n nft {\n category\n powah\n image\n name\n nftCore {\n capable\n contractAddress\n __typename\n }\n __typename\n }\n count\n __typename\n }\n __typename\n }\n __typename\n}\n\nfragment NftCoreInfoFrag on NFTCore {\n id\n capable\n chain\n contractAddress\n name\n symbol\n dao {\n id\n name\n logo\n alias\n __typename\n }\n __typename\n}\n\nfragment ExpressionEntity on ExprEntity {\n cred {\n id\n name\n type\n credType\n credSource\n dimensionConfig\n referenceLink\n description\n lastUpdate\n lastSync\n chain\n eligible(address: $address)\n metadata {\n visitLink {\n link\n __typename\n }\n twitter {\n isAuthentic\n __typename\n }\n __typename\n }\n commonInfo {\n participateEndTime\n modificationInfo\n __typename\n }\n __typename\n }\n attrs {\n attrName\n operatorSymbol\n targetValue\n __typename\n }\n attrFormula\n eligible\n eligibleAddress\n __typename\n}\n\nfragment ExpressionReward on ExprReward {\n arithmetics {\n ...ExpressionEntity\n __typename\n }\n arithmeticFormula\n rewardType\n rewardCount\n rewardVal\n __typename\n}\n\nfragment CampaignForgePage on Campaign {\n id\n numberID\n chain\n spaceStation {\n address\n __typename\n }\n gamification {\n forgeConfig {\n maxNFTCount\n minNFTCount\n requiredNFTs {\n nft {\n category\n __typename\n }\n __typename\n }\n __typename\n }\n __typename\n }\n __typename\n}\n\nfragment CampaignForCampaignParticipantsBox on Campaign {\n ...CampaignForParticipantsDialog\n id\n chain\n space {\n id\n isAdmin(address: $address)\n __typename\n }\n participants {\n participants(first: 10, after: \"-1\", download: false) {\n list {\n address {\n id\n avatar\n __typename\n }\n __typename\n }\n __typename\n }\n participantsCount\n bountyWinners(first: 10, after: \"-1\", download: false) {\n list {\n createdTime\n address {\n id\n avatar\n __typename\n }\n __typename\n }\n __typename\n }\n bountyWinnersCount\n __typename\n }\n __typename\n}\n\nfragment CampaignForParticipantsDialog on Campaign {\n id\n name\n type\n rewardType\n chain\n nftHolderSnapshot {\n holderSnapshotBlock\n __typename\n }\n space {\n isAdmin(address: $address)\n __typename\n }\n rewardInfo {\n discordRole {\n guildName\n roleName\n __typename\n }\n __typename\n }\n __typename\n}\n\nfragment SpaceDetail on Space {\n id\n name\n info\n thumbnail\n alias\n status\n links\n isVerified\n discordGuildID\n followersCount\n nftCores(input: {first: 1}) {\n list {\n id\n marketLink\n __typename\n }\n __typename\n }\n __typename\n}\n\nfragment ChildrenCampaignsForCampaignDetailAll on Campaign {\n space {\n ...SpaceDetail\n isAdmin(address: $address) @include(if: $withAddress)\n isFollowing @include(if: $withAddress)\n followersCount\n categories\n __typename\n }\n ...CampaignDetailFrag\n claimedLoyaltyPoints(address: $address) @include(if: $withAddress)\n userParticipants(address: $address, first: 1) @include(if: $withAddress) {\n list {\n status\n __typename\n }\n __typename\n }\n parentCampaign {\n id\n isSequencial\n __typename\n }\n __typename\n}\n\nfragment CampaignForSiblingSlide on Campaign {\n id\n space {\n id\n alias\n __typename\n }\n parentCampaign {\n id\n thumbnail\n isSequencial\n childrenCampaigns {\n id\n ...CampaignForGetImage\n ...CampaignForCheckFinish\n __typename\n }\n __typename\n }\n __typename\n}\n\nfragment CampaignForCheckFinish on Campaign {\n claimedLoyaltyPoints(address: $address)\n whitelistInfo(address: $address) {\n usedCount\n __typename\n }\n __typename\n}\n\nfragment CampaignForGetImage on Campaign {\n ...GetImageCommon\n nftTemplates {\n image\n __typename\n }\n __typename\n}\n\nfragment GetImageCommon on Campaign {\n ...CampaignForTokenObject\n id\n type\n thumbnail\n __typename\n}\n\nfragment CampaignForTokenObject on Campaign {\n tokenReward {\n tokenAddress\n tokenSymbol\n tokenDecimal\n tokenLogo\n __typename\n }\n tokenRewardContract {\n id\n chain\n __typename\n }\n __typename\n}\n', 212 | 'variables': { 213 | 'address': self.full_address, 214 | 'id': campaign_id, 215 | 'withAddress': True, 216 | }, 217 | } 218 | return await self.api_request(body, lambda resp: resp['data']['campaign']) 219 | 220 | async def read_quiz(self, quiz_id): 221 | body = { 222 | 'operationName': 'readQuiz', 223 | 'query': 'query readQuiz($id: ID!) {\n credential(id: $id) {\n ...CredQuizFrag\n __typename\n }\n}\n\nfragment CredQuizFrag on Cred {\n credQuiz {\n quizzes {\n title\n type\n items {\n value\n __typename\n }\n __typename\n }\n __typename\n }\n __typename\n}\n', 224 | 'variables': { 225 | 'id': quiz_id, 226 | }, 227 | } 228 | return await self.api_request(body, lambda resp: resp['data']['credential']['credQuiz']['quizzes']) 229 | 230 | async def read_survey(self, survey_id): 231 | body = { 232 | 'operationName': 'readSurvey', 233 | 'query': 'query readSurvey($id: ID!) {\n credential(id: $id) {\n metadata {\n survey {\n ...SurveyCredMetadataFrag\n __typename\n }\n __typename\n }\n __typename\n }\n}\n\nfragment SurveyCredMetadataFrag on SurveyCredMetadata {\n surveies {\n title\n type\n items {\n value\n __typename\n }\n __typename\n }\n __typename\n}\n', 234 | 'variables': { 235 | 'id': survey_id, 236 | } 237 | } 238 | return await self.api_request(body, lambda resp: resp['data']['credential']['metadata']['survey']['surveies']) 239 | 240 | async def add_typed_credential_items(self, campaign_id, credential_id, captcha): 241 | body = { 242 | 'operationName': 'AddTypedCredentialItems', 243 | 'query': 'mutation AddTypedCredentialItems($input: MutateTypedCredItemInput!) {\n typedCredentialItems(input: $input) {\n id\n __typename\n }\n}\n', 244 | 'variables': { 245 | 'input': { 246 | 'campaignId': campaign_id, 247 | 'captcha': captcha, 248 | 'credId': credential_id, 249 | 'items': [self.full_address], 250 | 'operation': 'APPEND', 251 | } 252 | } 253 | } 254 | await self.api_request(body) 255 | 256 | async def twitter_oauth2_status(self): 257 | body = { 258 | 'operationName': 'TwitterOauth2Status', 259 | 'query': 'query TwitterOauth2Status {\n twitterOauth2Status {\n oauthRateLimited\n __typename\n }\n}\n', 260 | 'variables': {}, 261 | } 262 | await self.api_request(body) 263 | 264 | async def sync_credential_value(self, sync_options, only_allow=True, quiz=False): 265 | body = { 266 | 'operationName': 'SyncCredentialValue', 267 | 'query': 'mutation SyncCredentialValue($input: SyncCredentialValueInput!) {\n syncCredentialValue(input: $input) {\n value {\n address\n spaceUsers {\n follow\n points\n participations\n __typename\n }\n campaignReferral {\n count\n __typename\n }\n gitcoinPassport {\n score\n lastScoreTimestamp\n __typename\n }\n walletBalance {\n balance\n __typename\n }\n multiDimension {\n value\n __typename\n }\n allow\n survey {\n answers\n __typename\n }\n quiz {\n allow\n correct\n __typename\n }\n __typename\n }\n message\n __typename\n }\n}\n', 268 | 'variables': { 269 | 'input': { 270 | 'syncOptions': sync_options 271 | }, 272 | }, 273 | } 274 | 275 | def exc_cond(resp): 276 | value = resp['data']['syncCredentialValue']['value'] 277 | if quiz: 278 | value = value['quiz'] 279 | return not value['allow'] 280 | 281 | return await self.api_request( 282 | body, 283 | lambda resp: resp['data']['syncCredentialValue']['value'], 284 | exc_condition=exc_cond if only_allow else None, 285 | ) 286 | 287 | async def prepare_participate(self, campaign_id, captcha, chain, 288 | referral_code=None, input_kwargs=None): 289 | body = { 290 | 'operationName': 'PrepareParticipate', 291 | 'query': 'mutation PrepareParticipate($input: PrepareParticipateInput!) {\n prepareParticipate(input: $input) {\n allow\n disallowReason\n signature\n nonce\n mintFuncInfo {\n funcName\n nftCoreAddress\n verifyIDs\n powahs\n cap\n __typename\n }\n extLinkResp {\n success\n data\n error\n __typename\n }\n metaTxResp {\n metaSig2\n autoTaskUrl\n metaSpaceAddr\n forwarderAddr\n metaTxHash\n reqQueueing\n __typename\n }\n solanaTxResp {\n mint\n updateAuthority\n explorerUrl\n signedTx\n verifyID\n __typename\n }\n aptosTxResp {\n signatureExpiredAt\n tokenName\n __typename\n }\n spaceStation\n airdropRewardCampaignTxResp {\n airdropID\n verifyID\n index\n account\n amount\n proof\n customReward\n __typename\n }\n tokenRewardCampaignTxResp {\n signatureExpiredAt\n verifyID\n encodeAddress\n weight\n __typename\n }\n loyaltyPointsTxResp {\n TotalClaimedPoints\n VerifyIDs\n loyaltyPointDistributionStation\n signature\n disallowReason\n nonce\n allow\n loyaltyPointContract\n Points\n reqQueueing\n __typename\n }\n flowTxResp {\n Name\n Description\n Thumbnail\n __typename\n }\n xrplLinks\n suiTxResp {\n packageId\n tableId\n nftName\n campaignId\n verifyID\n imgUrl\n signatureExpiredAt\n __typename\n }\n algorandTxResp {\n algorandArgs {\n args\n __typename\n }\n algorandBoxes {\n boxes\n __typename\n }\n __typename\n }\n __typename\n }\n}', 292 | 'variables': { 293 | 'input': { 294 | 'address': self.full_address, 295 | 'campaignID': campaign_id, 296 | 'captcha': captcha, 297 | 'chain': chain, 298 | 'mintCount': 1, 299 | 'signature': '', 300 | }, 301 | }, 302 | } 303 | if referral_code: 304 | body['variables']['input']['referralCode'] = referral_code 305 | input_kwargs = input_kwargs or {} 306 | for name, value in input_kwargs.items(): 307 | body['variables']['input'][name] = value 308 | 309 | def handle_resp(resp): 310 | result = resp['data']['prepareParticipate'] 311 | if result.get('disallowReason'): 312 | raise Exception(f'Not allowed, reason: {result["disallowReason"]}') 313 | return result 314 | 315 | return await self.api_request(body, handle_resp) 316 | 317 | async def participate(self, campaign_id, chain, nonce, tx_hash, verify_id): 318 | body = { 319 | 'operationName': 'Participate', 320 | 'query': 'mutation Participate($input: ParticipateInput!) {\n participate(input: $input) {\n participated\n __typename\n }\n}\n', 321 | 'variables': { 322 | 'input': { 323 | 'address': self.full_address, 324 | 'campaignID': campaign_id, 325 | 'chain': chain, 326 | 'nonce': nonce, 327 | 'signature': '', 328 | 'tx': tx_hash, 329 | 'verifyIDs': [verify_id], 330 | }, 331 | }, 332 | } 333 | await self.api_request(body, exc_condition=lambda resp: not resp['data']['participate']['participated']) 334 | 335 | async def participate_point(self, campaign_id, nonce, tx_hash, verify_ids): 336 | body = { 337 | 'operationName': 'ParticipatePoint', 338 | 'query': 'mutation ParticipatePoint($input: ParticipatePointInput!) {\n participatePoint(input: $input) {\n participated\n failReason\n __typename\n }\n}', 339 | 'variables': { 340 | 'input': { 341 | 'address': self.full_address, 342 | 'campaignID': campaign_id, 343 | 'chain': 'GRAVITY_ALPHA', 344 | 'nonce': nonce, 345 | 'signature': '', 346 | 'tx': tx_hash, 347 | 'verifyIDs': verify_ids, 348 | }, 349 | }, 350 | } 351 | await self.api_request(body, exc_condition=lambda resp: not resp['data']['participatePoint']['participated']) 352 | 353 | async def send_verify_code(self, email_username, captcha): 354 | body = { 355 | 'operationName': 'SendVerifyCode', 356 | 'query': 'mutation SendVerifyCode($input: SendVerificationEmailInput!) {\n sendVerificationCode(input: $input) {\n code\n message\n __typename\n }\n}\n', 357 | 'variables': { 358 | 'input': { 359 | 'address': self.full_address, 360 | 'captcha': captcha, 361 | 'email': email_username, 362 | }, 363 | }, 364 | } 365 | await self.api_request(body) 366 | 367 | async def update_email(self, email_username, code): 368 | body = { 369 | 'operationName': 'UpdateEmail', 370 | 'query': 'mutation UpdateEmail($input: UpdateEmailInput!) {\n updateEmail(input: $input) {\n code\n message\n __typename\n }\n}\n', 371 | 'variables': { 372 | 'input': { 373 | 'address': self.full_address, 374 | 'email': email_username, 375 | 'verificationCode': code, 376 | }, 377 | }, 378 | } 379 | await self.api_request(body) 380 | 381 | async def follow_space(self, space_id): 382 | body = { 383 | 'operationName': 'followSpace', 384 | 'query': 'mutation followSpace($spaceIds: [Int!]) {\n followSpace(spaceIds: $spaceIds)\n}\n', 385 | 'variables': { 386 | 'spaceIds': [space_id], 387 | } 388 | } 389 | await self.api_request(body, exc_condition=lambda r: r['data']['followSpace'] != 1) 390 | 391 | async def sync_evaluate_credential_value(self, eval_expr, sync_options): 392 | body = { 393 | 'operationName': 'syncEvaluateCredentialValue', 394 | 'query': 'mutation syncEvaluateCredentialValue($input: SyncEvaluateCredentialValueInput!) {\n syncEvaluateCredentialValue(input: $input) {\n result\n value {\n allow\n survey {\n answers\n __typename\n }\n quiz {\n allow\n correct\n __typename\n }\n __typename\n }\n message\n __typename\n }\n}\n', 395 | 'variables': { 396 | 'input': { 397 | 'evalExpr': eval_expr, 398 | 'syncOptions': sync_options 399 | }, 400 | }, 401 | } 402 | return await self.api_request( 403 | body, 404 | exc_condition=lambda r: not r['data']['syncEvaluateCredentialValue'], 405 | ) 406 | 407 | async def verify_credentials(self, cred_ids): 408 | body = { 409 | 'operationName': 'VerifyCredentials', 410 | 'query': 'mutation VerifyCredentials($input: VerifyCredentialsInput!) {\n verifyCredentials(input: $input)\n}\n', 411 | 'variables': { 412 | 'input': { 413 | 'address': self.full_address, 414 | 'credIds': cred_ids, 415 | }, 416 | }, 417 | } 418 | return await self.api_request(body) 419 | 420 | async def profile_leaderboard(self, cursor: str): 421 | body = { 422 | 'operationName': 'ProfileLeaderboard', 423 | 'query': 'query ProfileLeaderboard($address: String!, $pageSize: Int, $cursorAfter: String) {\n addressInfo(address: $address) {\n id\n loyaltyPointsRanks(first: $pageSize, cursorAfter: $cursorAfter) {\n pageInfo {\n endCursor\n hasNextPage\n __typename\n }\n edges {\n node {\n id\n rank\n points\n space {\n name\n alias\n thumbnail\n __typename\n }\n __typename\n }\n __typename\n }\n __typename\n }\n __typename\n }\n}\n', 424 | 'variables': { 425 | 'address': self.full_address, 426 | 'pageSize': 100, 427 | }, 428 | } 429 | if cursor: 430 | body['variables']['cursorAfter'] = cursor 431 | return await self.api_request(body, lambda r: r['data']['addressInfo']['loyaltyPointsRanks']) 432 | 433 | async def sufficient_for_gasless_chain_query(self, space_id: int, chains: str | list): 434 | if type(chains) is not list: 435 | chains = [chains] 436 | body = { 437 | 'operationName': 'SufficientForGaslessChainQuery', 438 | 'query': 'query SufficientForGaslessChainQuery($id: Int, $chains: [Chain!]!) {\n space(id: $id) {\n id\n spaceBalance {\n sufficientForGaslessClaimOnChain(chains: $chains) {\n sufficient\n chain\n __typename\n }\n __typename\n }\n __typename\n }\n}', 439 | 'variables': { 440 | 'chains': chains, 441 | 'id': space_id, 442 | }, 443 | } 444 | return await self.api_request( 445 | body, 446 | lambda r: r['data']['space']['spaceBalance']['sufficientForGaslessClaimOnChain'] 447 | ) 448 | -------------------------------------------------------------------------------- /internal/galxe/constants.py: -------------------------------------------------------------------------------- 1 | DISCORD_AUTH_URL = 'https://discord.com/api/v9/oauth2/authorize' 2 | GALXE_DISCORD_CLIENT_ID = '947863296789323776' 3 | 4 | CHAIN_NAME_MAPPING = { 5 | 'MATIC': 'Polygon', 6 | 'GRAVITY_ALPHA': 'Gravity', 7 | } 8 | 9 | VERIFY_TRIES = 4 10 | -------------------------------------------------------------------------------- /internal/galxe/fingerprint.py: -------------------------------------------------------------------------------- 1 | from asyncio import Lock 2 | from loguru import logger 3 | from playwright.async_api import async_playwright 4 | 5 | from ..vars import GALXE_CAPTCHA_ID 6 | from ..utils import get_query_param 7 | 8 | 9 | class Fingerprints: 10 | 11 | def __init__(self): 12 | self.current_fingerprint = None 13 | self.lock = Lock() 14 | 15 | async def _generate_new_no_lock(self): 16 | async with async_playwright() as pw: 17 | browser = await pw.chromium.launch(headless=True, args=[ 18 | '--lang=en-US,en', 19 | '--disable-blink-features=AutomationControlled', 20 | ]) 21 | context = await browser.new_context() 22 | await context.add_init_script('() => {}') 23 | fingerprint = '' 24 | try: 25 | page = await context.new_page() 26 | await page.goto('https://app.galxe.com/quest', wait_until='domcontentloaded', timeout=15000) 27 | await page.evaluate(f''' 28 | window.initGeetest4({{captchaId: "{GALXE_CAPTCHA_ID}", product: "bind"}}) 29 | ''') 30 | async with page.expect_response(lambda resp: resp.status == 200 and 'verify' in resp.url, 31 | timeout=15000) as r: 32 | fingerprint = get_query_param((await r.value).url, 'w') 33 | except Exception as e: 34 | logger.error(f'Failed to get fingerprint: {str(e)}') 35 | await context.close() 36 | await browser.close() 37 | self.current_fingerprint = fingerprint 38 | if self.current_fingerprint != '': 39 | logger.success(f'Successfully fetched fingerprint for captcha') 40 | 41 | async def generate_new(self): 42 | async with self.lock: 43 | await self._generate_new_no_lock() 44 | 45 | async def get(self) -> str: 46 | async with self.lock: 47 | if self.current_fingerprint is None or self.current_fingerprint == '': 48 | await self._generate_new_no_lock() 49 | return self.current_fingerprint 50 | 51 | 52 | fingerprints = Fingerprints() 53 | 54 | 55 | def captcha_retry(async_func): 56 | async def wrapper(*args, **kwargs): 57 | try: 58 | return await async_func(*args, **kwargs) 59 | except Exception as e: 60 | if 'recaptcha' in str(e): 61 | logger.info('Recaptcha error. Trying to update fingerprint') 62 | await fingerprints.generate_new() 63 | return await async_func(*args, **kwargs) 64 | raise 65 | 66 | return wrapper 67 | -------------------------------------------------------------------------------- /internal/galxe/models.py: -------------------------------------------------------------------------------- 1 | from enum import StrEnum 2 | 3 | 4 | class Recurring(StrEnum): 5 | DAILY = 'DAILY' 6 | 7 | 8 | class Credential(StrEnum): 9 | TWITTER = 'TWITTER' 10 | EMAIL = 'EMAIL' 11 | EVM_ADDRESS = 'EVM_ADDRESS' 12 | GALXE_ID = 'GALXE_ID' 13 | DISCORD = 'DISCORD' 14 | 15 | 16 | class CredSource(StrEnum): 17 | TWITTER_FOLLOW = 'TWITTER_FOLLOW' 18 | TWITTER_RT = 'TWITTER_RT' 19 | TWITTER_LIKE = 'TWITTER_LIKE' 20 | TWITTER_QUOTE = 'TWITTER_QUOTE' 21 | VISIT_LINK = 'VISIT_LINK' 22 | QUIZ = 'QUIZ' 23 | SURVEY = 'SURVEY' 24 | SPACE_USERS = 'SPACE_USERS' 25 | WATCH_YOUTUBE = 'WATCH_YOUTUBE' 26 | CSV = 'CSV' 27 | 28 | 29 | class ConditionRelation(StrEnum): 30 | ALL = 'ALL' 31 | ANY = 'ANY' 32 | 33 | 34 | class QuizType(StrEnum): 35 | MULTI_CHOICE = 'MULTI_CHOICE' 36 | 37 | 38 | class Gamification(StrEnum): 39 | POINTS = 'Points' 40 | OAT = 'Oat' 41 | POINTS_MYSTERY_BOX = 'PointsMysteryBox' 42 | DROP = 'Drop' 43 | BOUNTY = 'Bounty' 44 | DISCORD_ROLE = 'DiscordRole' 45 | TOKEN = 'Token' 46 | 47 | 48 | class GasType(StrEnum): 49 | GAS_LESS = 'Gasless' 50 | GAS = 'Gas' 51 | -------------------------------------------------------------------------------- /internal/galxe/utils.py: -------------------------------------------------------------------------------- 1 | import secrets 2 | import math 3 | import random 4 | 5 | 6 | alp = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' 7 | 8 | 9 | def random_string(e, n=alp): 10 | o = '' 11 | f = len(n) 12 | h = 256 - 256 % f 13 | while e > 0: 14 | t = secrets.token_bytes(math.ceil(256 * e / h)) 15 | i = 0 16 | while i < len(t) and e > 0: 17 | r = t[i] 18 | if r < h: 19 | o += n[r % f] 20 | e -= 1 21 | i += 1 22 | return o 23 | 24 | 25 | def random_string_for_entropy(e, n=alp): 26 | return random_string(math.ceil(e / (math.log(len(n)) / math.log(2)))) 27 | 28 | 29 | def random_user_prefix(n=3): 30 | prefix = '' 31 | for _ in range(n): 32 | prefix += random.choice(alp[-26:]) 33 | return prefix 34 | -------------------------------------------------------------------------------- /internal/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .models import AccountInfo 2 | -------------------------------------------------------------------------------- /internal/models/models.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Optional 2 | from dataclasses import dataclass, field 3 | from dataclasses_json import dataclass_json 4 | from eth_account import Account as EvmAccount 5 | from eth_account.messages import encode_defunct 6 | 7 | from ..utils import plural_str 8 | 9 | 10 | STATUS_BY_BOOL = { 11 | False: '❌', 12 | True: '✅', 13 | } 14 | 15 | 16 | @dataclass_json 17 | @dataclass 18 | class AccountInfo: 19 | idx: int = 0 20 | evm_address: str = '' 21 | evm_private_key: str = '' 22 | proxy: str = '' 23 | twitter_auth_token: str = '' 24 | twitter_ct0: str = '' 25 | email_username: str = '' 26 | email_password: str = '' 27 | discord_token: str = '' 28 | twitter_error: bool = False 29 | discord_error: bool = False 30 | points: dict[str, Tuple[str, int, Optional[bool]]] = field(default_factory=dict) 31 | nfts: dict[str, Optional[int]] = field(default_factory=dict) 32 | actual_campaigns: list[str] = field(default_factory=list) 33 | spaces_points: dict[str, Tuple[str, int, int]] = field(default_factory=dict) 34 | 35 | def sign_message(self, msg): 36 | return EvmAccount().sign_message(encode_defunct(text=msg), self.evm_private_key).signature.hex() 37 | 38 | @property 39 | def actual_points(self): 40 | return {k: v for k, v in self.points.items() if k in self.actual_campaigns} 41 | 42 | def str_stats(self) -> str: 43 | stats = [(n, self.campaign_points_str(c_id, with_nft=False), self.nfts.get(c_id)) 44 | for c_id, (n, _, _) in self.actual_points.items()] 45 | total = sum(v for _, v, _ in self.actual_points.values()) 46 | total_nfts = sum(v for _, _, v in stats if v is not None) 47 | if not any(v is not None for _, _, v in stats): 48 | total_nfts = None 49 | stats.append(('Total', total, total_nfts)) 50 | return ''.join([f'\t{name}: {pv} Points{", " + plural_str(nv, "NFT") if nv is not None else ""}\n' 51 | for name, pv, nv in stats])[:-1] 52 | 53 | def campaign_points(self, campaign_id) -> int: 54 | return self.points.get(campaign_id, ('', 0, None))[1] 55 | 56 | def campaign_points_str(self, campaign_id, with_nft=True) -> str: 57 | points = self.points.get(campaign_id) 58 | if not points: 59 | return '0' 60 | s = str(points[1]) 61 | if points[2] is not None: 62 | s += ' / ' + STATUS_BY_BOOL[points[2]] 63 | nft_cnt = self.nfts.get(campaign_id) 64 | if with_nft and nft_cnt is not None: 65 | s += f' / {nft_cnt}' 66 | return s 67 | 68 | @property 69 | def twitter_error_s(self): 70 | return '🔴' if self.twitter_error else '' 71 | 72 | @property 73 | def discord_error_s(self): 74 | return '🔴' if self.discord_error else '' 75 | -------------------------------------------------------------------------------- /internal/onchain/__init__.py: -------------------------------------------------------------------------------- 1 | from .account import OnchainAccount 2 | -------------------------------------------------------------------------------- /internal/onchain/abi/loyalty_points.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "inputs": [ 4 | { 5 | "internalType": "address", 6 | "name": "target", 7 | "type": "address" 8 | } 9 | ], 10 | "name": "AddressEmptyCode", 11 | "type": "error" 12 | }, 13 | { 14 | "inputs": [], 15 | "name": "ECDSAInvalidSignature", 16 | "type": "error" 17 | }, 18 | { 19 | "inputs": [ 20 | { 21 | "internalType": "uint256", 22 | "name": "length", 23 | "type": "uint256" 24 | } 25 | ], 26 | "name": "ECDSAInvalidSignatureLength", 27 | "type": "error" 28 | }, 29 | { 30 | "inputs": [ 31 | { 32 | "internalType": "bytes32", 33 | "name": "s", 34 | "type": "bytes32" 35 | } 36 | ], 37 | "name": "ECDSAInvalidSignatureS", 38 | "type": "error" 39 | }, 40 | { 41 | "inputs": [ 42 | { 43 | "internalType": "address", 44 | "name": "implementation", 45 | "type": "address" 46 | } 47 | ], 48 | "name": "ERC1967InvalidImplementation", 49 | "type": "error" 50 | }, 51 | { 52 | "inputs": [], 53 | "name": "ERC1967NonPayable", 54 | "type": "error" 55 | }, 56 | { 57 | "inputs": [], 58 | "name": "EnforcedPause", 59 | "type": "error" 60 | }, 61 | { 62 | "inputs": [], 63 | "name": "ExpectedPause", 64 | "type": "error" 65 | }, 66 | { 67 | "inputs": [], 68 | "name": "FailedInnerCall", 69 | "type": "error" 70 | }, 71 | { 72 | "inputs": [], 73 | "name": "InvalidAddress", 74 | "type": "error" 75 | }, 76 | { 77 | "inputs": [], 78 | "name": "InvalidInitialization", 79 | "type": "error" 80 | }, 81 | { 82 | "inputs": [], 83 | "name": "InvalidSignature", 84 | "type": "error" 85 | }, 86 | { 87 | "inputs": [], 88 | "name": "NotInitializing", 89 | "type": "error" 90 | }, 91 | { 92 | "inputs": [ 93 | { 94 | "internalType": "address", 95 | "name": "owner", 96 | "type": "address" 97 | } 98 | ], 99 | "name": "OwnableInvalidOwner", 100 | "type": "error" 101 | }, 102 | { 103 | "inputs": [ 104 | { 105 | "internalType": "address", 106 | "name": "account", 107 | "type": "address" 108 | } 109 | ], 110 | "name": "OwnableUnauthorizedAccount", 111 | "type": "error" 112 | }, 113 | { 114 | "inputs": [], 115 | "name": "ParamsLengthMissMatch", 116 | "type": "error" 117 | }, 118 | { 119 | "inputs": [], 120 | "name": "UUPSUnauthorizedCallContext", 121 | "type": "error" 122 | }, 123 | { 124 | "inputs": [ 125 | { 126 | "internalType": "bytes32", 127 | "name": "slot", 128 | "type": "bytes32" 129 | } 130 | ], 131 | "name": "UUPSUnsupportedProxiableUUID", 132 | "type": "error" 133 | }, 134 | { 135 | "inputs": [ 136 | { 137 | "internalType": "uint256", 138 | "name": "verifyId", 139 | "type": "uint256" 140 | } 141 | ], 142 | "name": "VerifyIdAlreadyUsed", 143 | "type": "error" 144 | }, 145 | { 146 | "anonymous": false, 147 | "inputs": [ 148 | { 149 | "indexed": false, 150 | "internalType": "uint256", 151 | "name": "verifyId", 152 | "type": "uint256" 153 | }, 154 | { 155 | "indexed": false, 156 | "internalType": "address", 157 | "name": "_user", 158 | "type": "address" 159 | }, 160 | { 161 | "indexed": false, 162 | "internalType": "uint256", 163 | "name": "_amount", 164 | "type": "uint256" 165 | } 166 | ], 167 | "name": "DecreasePoint", 168 | "type": "event" 169 | }, 170 | { 171 | "anonymous": false, 172 | "inputs": [ 173 | { 174 | "indexed": false, 175 | "internalType": "uint256[]", 176 | "name": "verifyIds", 177 | "type": "uint256[]" 178 | }, 179 | { 180 | "indexed": false, 181 | "internalType": "address[]", 182 | "name": "users", 183 | "type": "address[]" 184 | }, 185 | { 186 | "indexed": false, 187 | "internalType": "uint256[]", 188 | "name": "amounts", 189 | "type": "uint256[]" 190 | } 191 | ], 192 | "name": "DecreasePoints", 193 | "type": "event" 194 | }, 195 | { 196 | "anonymous": false, 197 | "inputs": [], 198 | "name": "EIP712DomainChanged", 199 | "type": "event" 200 | }, 201 | { 202 | "anonymous": false, 203 | "inputs": [ 204 | { 205 | "indexed": false, 206 | "internalType": "uint256", 207 | "name": "verifyId", 208 | "type": "uint256" 209 | }, 210 | { 211 | "indexed": false, 212 | "internalType": "address", 213 | "name": "user", 214 | "type": "address" 215 | }, 216 | { 217 | "indexed": false, 218 | "internalType": "uint256", 219 | "name": "_amount", 220 | "type": "uint256" 221 | } 222 | ], 223 | "name": "IncreasePoint", 224 | "type": "event" 225 | }, 226 | { 227 | "anonymous": false, 228 | "inputs": [ 229 | { 230 | "indexed": false, 231 | "internalType": "uint256[]", 232 | "name": "verifyIds", 233 | "type": "uint256[]" 234 | }, 235 | { 236 | "indexed": false, 237 | "internalType": "address[]", 238 | "name": "users", 239 | "type": "address[]" 240 | }, 241 | { 242 | "indexed": false, 243 | "internalType": "uint256[]", 244 | "name": "amounts", 245 | "type": "uint256[]" 246 | } 247 | ], 248 | "name": "IncreasePoints", 249 | "type": "event" 250 | }, 251 | { 252 | "anonymous": false, 253 | "inputs": [ 254 | { 255 | "indexed": false, 256 | "internalType": "uint64", 257 | "name": "version", 258 | "type": "uint64" 259 | } 260 | ], 261 | "name": "Initialized", 262 | "type": "event" 263 | }, 264 | { 265 | "anonymous": false, 266 | "inputs": [ 267 | { 268 | "indexed": true, 269 | "internalType": "address", 270 | "name": "previousOwner", 271 | "type": "address" 272 | }, 273 | { 274 | "indexed": true, 275 | "internalType": "address", 276 | "name": "newOwner", 277 | "type": "address" 278 | } 279 | ], 280 | "name": "OwnershipTransferStarted", 281 | "type": "event" 282 | }, 283 | { 284 | "anonymous": false, 285 | "inputs": [ 286 | { 287 | "indexed": true, 288 | "internalType": "address", 289 | "name": "previousOwner", 290 | "type": "address" 291 | }, 292 | { 293 | "indexed": true, 294 | "internalType": "address", 295 | "name": "newOwner", 296 | "type": "address" 297 | } 298 | ], 299 | "name": "OwnershipTransferred", 300 | "type": "event" 301 | }, 302 | { 303 | "anonymous": false, 304 | "inputs": [ 305 | { 306 | "indexed": false, 307 | "internalType": "address", 308 | "name": "account", 309 | "type": "address" 310 | } 311 | ], 312 | "name": "Paused", 313 | "type": "event" 314 | }, 315 | { 316 | "anonymous": false, 317 | "inputs": [ 318 | { 319 | "indexed": false, 320 | "internalType": "address", 321 | "name": "account", 322 | "type": "address" 323 | } 324 | ], 325 | "name": "Unpaused", 326 | "type": "event" 327 | }, 328 | { 329 | "anonymous": false, 330 | "inputs": [ 331 | { 332 | "indexed": true, 333 | "internalType": "address", 334 | "name": "implementation", 335 | "type": "address" 336 | } 337 | ], 338 | "name": "Upgraded", 339 | "type": "event" 340 | }, 341 | { 342 | "inputs": [], 343 | "name": "UPGRADE_INTERFACE_VERSION", 344 | "outputs": [ 345 | { 346 | "internalType": "string", 347 | "name": "", 348 | "type": "string" 349 | } 350 | ], 351 | "stateMutability": "view", 352 | "type": "function" 353 | }, 354 | { 355 | "inputs": [], 356 | "name": "acceptOwnership", 357 | "outputs": [], 358 | "stateMutability": "nonpayable", 359 | "type": "function" 360 | }, 361 | { 362 | "inputs": [ 363 | { 364 | "internalType": "contract ILoyaltyPoint", 365 | "name": "_loyaltyPoint", 366 | "type": "address" 367 | }, 368 | { 369 | "internalType": "uint256", 370 | "name": "_verifyId", 371 | "type": "uint256" 372 | }, 373 | { 374 | "internalType": "address", 375 | "name": "_user", 376 | "type": "address" 377 | }, 378 | { 379 | "internalType": "uint256", 380 | "name": "_amount", 381 | "type": "uint256" 382 | }, 383 | { 384 | "internalType": "bytes", 385 | "name": "_signature", 386 | "type": "bytes" 387 | } 388 | ], 389 | "name": "decreasePoint", 390 | "outputs": [], 391 | "stateMutability": "nonpayable", 392 | "type": "function" 393 | }, 394 | { 395 | "inputs": [ 396 | { 397 | "internalType": "contract ILoyaltyPoint", 398 | "name": "_loyaltyPoint", 399 | "type": "address" 400 | }, 401 | { 402 | "internalType": "uint256[]", 403 | "name": "_verifyIds", 404 | "type": "uint256[]" 405 | }, 406 | { 407 | "internalType": "address[]", 408 | "name": "_users", 409 | "type": "address[]" 410 | }, 411 | { 412 | "internalType": "uint256[]", 413 | "name": "_amounts", 414 | "type": "uint256[]" 415 | }, 416 | { 417 | "internalType": "bytes", 418 | "name": "_signature", 419 | "type": "bytes" 420 | } 421 | ], 422 | "name": "decreasePoints", 423 | "outputs": [], 424 | "stateMutability": "nonpayable", 425 | "type": "function" 426 | }, 427 | { 428 | "inputs": [], 429 | "name": "eip712Domain", 430 | "outputs": [ 431 | { 432 | "internalType": "bytes1", 433 | "name": "fields", 434 | "type": "bytes1" 435 | }, 436 | { 437 | "internalType": "string", 438 | "name": "name", 439 | "type": "string" 440 | }, 441 | { 442 | "internalType": "string", 443 | "name": "version", 444 | "type": "string" 445 | }, 446 | { 447 | "internalType": "uint256", 448 | "name": "chainId", 449 | "type": "uint256" 450 | }, 451 | { 452 | "internalType": "address", 453 | "name": "verifyingContract", 454 | "type": "address" 455 | }, 456 | { 457 | "internalType": "bytes32", 458 | "name": "salt", 459 | "type": "bytes32" 460 | }, 461 | { 462 | "internalType": "uint256[]", 463 | "name": "extensions", 464 | "type": "uint256[]" 465 | } 466 | ], 467 | "stateMutability": "view", 468 | "type": "function" 469 | }, 470 | { 471 | "inputs": [ 472 | { 473 | "internalType": "uint256", 474 | "name": "_verifyId", 475 | "type": "uint256" 476 | } 477 | ], 478 | "name": "hasDistributed", 479 | "outputs": [ 480 | { 481 | "internalType": "bool", 482 | "name": "", 483 | "type": "bool" 484 | } 485 | ], 486 | "stateMutability": "view", 487 | "type": "function" 488 | }, 489 | { 490 | "inputs": [ 491 | { 492 | "internalType": "contract ILoyaltyPoint", 493 | "name": "_loyaltyPoint", 494 | "type": "address" 495 | }, 496 | { 497 | "internalType": "uint256", 498 | "name": "_verifyId", 499 | "type": "uint256" 500 | }, 501 | { 502 | "internalType": "address", 503 | "name": "_user", 504 | "type": "address" 505 | }, 506 | { 507 | "internalType": "uint256", 508 | "name": "_amount", 509 | "type": "uint256" 510 | }, 511 | { 512 | "internalType": "bytes", 513 | "name": "_signature", 514 | "type": "bytes" 515 | } 516 | ], 517 | "name": "increasePoint", 518 | "outputs": [], 519 | "stateMutability": "nonpayable", 520 | "type": "function" 521 | }, 522 | { 523 | "inputs": [ 524 | { 525 | "internalType": "contract ILoyaltyPoint", 526 | "name": "_loyaltyPoint", 527 | "type": "address" 528 | }, 529 | { 530 | "internalType": "uint256[]", 531 | "name": "_verifyIds", 532 | "type": "uint256[]" 533 | }, 534 | { 535 | "internalType": "address[]", 536 | "name": "_users", 537 | "type": "address[]" 538 | }, 539 | { 540 | "internalType": "uint256[]", 541 | "name": "_amounts", 542 | "type": "uint256[]" 543 | }, 544 | { 545 | "internalType": "bytes", 546 | "name": "_signature", 547 | "type": "bytes" 548 | } 549 | ], 550 | "name": "increasePoints", 551 | "outputs": [], 552 | "stateMutability": "nonpayable", 553 | "type": "function" 554 | }, 555 | { 556 | "inputs": [ 557 | { 558 | "internalType": "address", 559 | "name": "_initialOwner", 560 | "type": "address" 561 | }, 562 | { 563 | "internalType": "address", 564 | "name": "_signer", 565 | "type": "address" 566 | } 567 | ], 568 | "name": "initialize", 569 | "outputs": [], 570 | "stateMutability": "nonpayable", 571 | "type": "function" 572 | }, 573 | { 574 | "inputs": [], 575 | "name": "owner", 576 | "outputs": [ 577 | { 578 | "internalType": "address", 579 | "name": "", 580 | "type": "address" 581 | } 582 | ], 583 | "stateMutability": "view", 584 | "type": "function" 585 | }, 586 | { 587 | "inputs": [], 588 | "name": "pause", 589 | "outputs": [], 590 | "stateMutability": "nonpayable", 591 | "type": "function" 592 | }, 593 | { 594 | "inputs": [], 595 | "name": "paused", 596 | "outputs": [ 597 | { 598 | "internalType": "bool", 599 | "name": "", 600 | "type": "bool" 601 | } 602 | ], 603 | "stateMutability": "view", 604 | "type": "function" 605 | }, 606 | { 607 | "inputs": [], 608 | "name": "pendingOwner", 609 | "outputs": [ 610 | { 611 | "internalType": "address", 612 | "name": "", 613 | "type": "address" 614 | } 615 | ], 616 | "stateMutability": "view", 617 | "type": "function" 618 | }, 619 | { 620 | "inputs": [], 621 | "name": "proxiableUUID", 622 | "outputs": [ 623 | { 624 | "internalType": "bytes32", 625 | "name": "", 626 | "type": "bytes32" 627 | } 628 | ], 629 | "stateMutability": "view", 630 | "type": "function" 631 | }, 632 | { 633 | "inputs": [], 634 | "name": "renounceOwnership", 635 | "outputs": [], 636 | "stateMutability": "nonpayable", 637 | "type": "function" 638 | }, 639 | { 640 | "inputs": [ 641 | { 642 | "internalType": "address", 643 | "name": "_signer", 644 | "type": "address" 645 | } 646 | ], 647 | "name": "setSigner", 648 | "outputs": [], 649 | "stateMutability": "nonpayable", 650 | "type": "function" 651 | }, 652 | { 653 | "inputs": [], 654 | "name": "signer", 655 | "outputs": [ 656 | { 657 | "internalType": "address", 658 | "name": "", 659 | "type": "address" 660 | } 661 | ], 662 | "stateMutability": "view", 663 | "type": "function" 664 | }, 665 | { 666 | "inputs": [ 667 | { 668 | "internalType": "address", 669 | "name": "newOwner", 670 | "type": "address" 671 | } 672 | ], 673 | "name": "transferOwnership", 674 | "outputs": [], 675 | "stateMutability": "nonpayable", 676 | "type": "function" 677 | }, 678 | { 679 | "inputs": [], 680 | "name": "unpause", 681 | "outputs": [], 682 | "stateMutability": "nonpayable", 683 | "type": "function" 684 | }, 685 | { 686 | "inputs": [ 687 | { 688 | "internalType": "address", 689 | "name": "newImplementation", 690 | "type": "address" 691 | }, 692 | { 693 | "internalType": "bytes", 694 | "name": "data", 695 | "type": "bytes" 696 | } 697 | ], 698 | "name": "upgradeToAndCall", 699 | "outputs": [], 700 | "stateMutability": "payable", 701 | "type": "function" 702 | } 703 | ] 704 | -------------------------------------------------------------------------------- /internal/onchain/abi/space_station.json: -------------------------------------------------------------------------------- 1 | [{"inputs":[{"internalType":"address","name":"_galaxy_signer","type":"address"},{"internalType":"address","name":"_campaign_setter","type":"address"},{"internalType":"address","name":"_contract_manager","type":"address"},{"internalType":"address","name":"_treasury_manager","type":"address"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"_cid","type":"uint256"}],"name":"EventActivateCampaign","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"_cid","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_dummyId","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_nftID","type":"uint256"},{"indexed":false,"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"indexed":false,"internalType":"address","name":"_sender","type":"address"}],"name":"EventClaim","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"_cid","type":"uint256"},{"indexed":false,"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"indexed":false,"internalType":"uint256[]","name":"_nftIDArr","type":"uint256[]"},{"indexed":false,"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"indexed":false,"internalType":"address","name":"_sender","type":"address"}],"name":"EventClaimBatch","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"_cid","type":"uint256"},{"indexed":false,"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"indexed":false,"internalType":"uint256[]","name":"_nftIDArr","type":"uint256[]"},{"indexed":false,"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"indexed":false,"internalType":"address","name":"_sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"_minted","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_cap","type":"uint256"}],"name":"EventClaimBatchCapped","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"_cid","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_dummyId","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_nftID","type":"uint256"},{"indexed":false,"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"indexed":false,"internalType":"address","name":"_sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"_minted","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_cap","type":"uint256"}],"name":"EventClaimCapped","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"_cid","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_dummyId","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"_nftID","type":"uint256"},{"indexed":false,"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"indexed":false,"internalType":"address","name":"_sender","type":"address"}],"name":"EventForge","type":"event"},{"stateMutability":"payable","type":"fallback"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"address","name":"_account","type":"address"}],"name":"_hash","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"internalType":"uint256[]","name":"_powahArr","type":"uint256[]"},{"internalType":"address","name":"_account","type":"address"}],"name":"_hashBatch","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"internalType":"uint256[]","name":"_powahArr","type":"uint256[]"},{"internalType":"uint256","name":"_cap","type":"uint256"},{"internalType":"address","name":"_account","type":"address"}],"name":"_hashBatchCapped","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"uint256","name":"_cap","type":"uint256"},{"internalType":"address","name":"_account","type":"address"}],"name":"_hashCapped","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_nftIDs","type":"uint256[]"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"address","name":"_account","type":"address"}],"name":"_hashForge","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"hash","type":"bytes32"},{"internalType":"bytes","name":"signature","type":"bytes"}],"name":"_verify","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"uint256","name":"_platformFee","type":"uint256"},{"internalType":"uint256","name":"_erc20Fee","type":"uint256"},{"internalType":"address","name":"_erc20","type":"address"}],"name":"activateCampaign","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"campaignFeeConfigs","outputs":[{"internalType":"address","name":"erc20","type":"address"},{"internalType":"uint256","name":"erc20Fee","type":"uint256"},{"internalType":"uint256","name":"platformFee","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"campaign_setter","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claim","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"address","name":"_mintTo","type":"address"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claim","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"internalType":"uint256[]","name":"_powahArr","type":"uint256[]"},{"internalType":"address","name":"_mintTo","type":"address"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claimBatch","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"internalType":"uint256[]","name":"_powahArr","type":"uint256[]"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claimBatch","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"internalType":"uint256[]","name":"_powahArr","type":"uint256[]"},{"internalType":"uint256","name":"_cap","type":"uint256"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claimBatchCapped","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_dummyIdArr","type":"uint256[]"},{"internalType":"uint256[]","name":"_powahArr","type":"uint256[]"},{"internalType":"uint256","name":"_cap","type":"uint256"},{"internalType":"address","name":"_mintTo","type":"address"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claimBatchCapped","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"uint256","name":"_cap","type":"uint256"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claimCapped","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"uint256","name":"_cap","type":"uint256"},{"internalType":"address","name":"_mintTo","type":"address"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"claimCapped","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_nftIDs","type":"uint256[]"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"forge","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_cid","type":"uint256"},{"internalType":"contract IStarNFT","name":"_starNFT","type":"address"},{"internalType":"uint256[]","name":"_nftIDs","type":"uint256[]"},{"internalType":"uint256","name":"_dummyId","type":"uint256"},{"internalType":"uint256","name":"_powah","type":"uint256"},{"internalType":"address","name":"_mintTo","type":"address"},{"internalType":"bytes","name":"_signature","type":"bytes"}],"name":"forge","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[],"name":"galaxy_signer","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"hasMinted","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"manager","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"numMinted","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bool","name":"_paused","type":"bool"}],"name":"setPause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"treasury_manager","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newAddress","type":"address"}],"name":"updateCampaignSetter","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newAddress","type":"address"}],"name":"updateGalaxySigner","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newAddress","type":"address"}],"name":"updateManager","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address payable","name":"newAddress","type":"address"}],"name":"updateTreasureManager","outputs":[],"stateMutability":"nonpayable","type":"function"},{"stateMutability":"payable","type":"receive"}] -------------------------------------------------------------------------------- /internal/onchain/account.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from loguru import logger 3 | from web3 import Web3 4 | from web3.exceptions import TransactionNotFound 5 | from web3.middleware import async_geth_poa_middleware 6 | from web3.contract.async_contract import AsyncContractConstructor 7 | 8 | from ..models import AccountInfo 9 | from ..utils import async_retry, get_proxy_url, get_w3, to_bytes 10 | from ..config import RPCs 11 | 12 | from .constants import SCANS, EIP1559_CHAINS, SPACE_STATION_ABI, LOYALTY_POINTS_ABI 13 | 14 | 15 | class OnchainAccount: 16 | 17 | def __init__(self, account: AccountInfo, chain: str): 18 | self.idx = account.idx 19 | self.account = account 20 | self.private_key = account.evm_private_key 21 | self.proxy = get_proxy_url(self.account.proxy) 22 | self.chain = chain 23 | self.w3 = get_w3(RPCs[chain], self.proxy) 24 | 25 | async def close(self): 26 | pass 27 | 28 | async def __aenter__(self) -> "OnchainAccount": 29 | return self 30 | 31 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: 32 | await self.close() 33 | 34 | @async_retry 35 | async def _build_and_send_tx(self, func: AsyncContractConstructor, **tx_vars): 36 | if self.chain in EIP1559_CHAINS: 37 | max_priority_fee = await self.w3.eth.max_priority_fee 38 | max_priority_fee = int(max_priority_fee * 2) 39 | base_fee_per_gas = int((await self.w3.eth.get_block("latest"))["baseFeePerGas"]) 40 | max_fee_per_gas = max_priority_fee + int(base_fee_per_gas * 2) 41 | gas_vars = {'maxPriorityFeePerGas': max_priority_fee, 'maxFeePerGas': max_fee_per_gas} 42 | else: 43 | gas_vars = {'gasPrice': await self.w3.eth.gas_price} 44 | tx = await func.build_transaction({ 45 | 'from': self.account.evm_address, 46 | 'nonce': await self.w3.eth.get_transaction_count(self.account.evm_address), 47 | 'gas': 0, 48 | **gas_vars, 49 | **tx_vars, 50 | }) 51 | try: 52 | estimate = await self.w3.eth.estimate_gas(tx) 53 | tx['gas'] = int(estimate * 1.2) 54 | except Exception as e: 55 | raise Exception(f'Tx simulation failed: {str(e)}') 56 | 57 | signed_tx = self.w3.eth.account.sign_transaction(tx, self.private_key) 58 | tx_hash = await self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) 59 | 60 | return tx_hash 61 | 62 | async def _tx_verification(self, tx_hash, action, poll_latency=1): 63 | logger.info(f'{self.idx}) {action} - Tx sent. Waiting for 120s') 64 | time_passed = 0 65 | tx_link = f'{SCANS.get(self.chain, "")}/tx/{tx_hash.hex()}' 66 | while time_passed < 120: 67 | try: 68 | tx_data = await self.w3.eth.get_transaction_receipt(tx_hash) 69 | if tx_data is not None: 70 | if tx_data.get('status') == 1: 71 | logger.success(f'{self.idx}) {action} - Successful tx: {tx_link}') 72 | return 73 | msg = f'Failed tx: {tx_link}' 74 | logger.error(f'{self.idx}) {msg}') 75 | raise Exception(msg) 76 | except TransactionNotFound: 77 | pass 78 | 79 | time_passed += poll_latency 80 | await asyncio.sleep(poll_latency) 81 | 82 | msg = f'{action} - Pending tx: {tx_link}' 83 | logger.warning(f'{self.idx}) {msg}') 84 | raise Exception(msg) 85 | 86 | async def build_and_send_tx(self, func: AsyncContractConstructor, action='', **tx_vars) -> str: 87 | try: 88 | tx_hash = await self._build_and_send_tx(func, **tx_vars) 89 | except Exception as e: 90 | if 'you are connected to a POA chain' in str(e): 91 | self.w3.middleware_onion.inject(async_geth_poa_middleware, layer=0) 92 | tx_hash = await self._build_and_send_tx(func, **tx_vars) 93 | else: 94 | raise e 95 | await self._tx_verification(tx_hash, action) 96 | return tx_hash.hex() 97 | 98 | async def claim_capped(self, space_station_address, number_id, signature, 99 | nft_core_address, verify_id, powah, cap) -> str: 100 | try: 101 | space_station_address = Web3.to_checksum_address(space_station_address) 102 | nft_core_address = Web3.to_checksum_address(nft_core_address) 103 | contract = self.w3.eth.contract(space_station_address, abi=SPACE_STATION_ABI) 104 | 105 | tx_hash = await self.build_and_send_tx( 106 | contract.functions.claimCapped(number_id, nft_core_address, verify_id, powah, cap, to_bytes(signature)), 107 | 'Claim' 108 | ) 109 | return tx_hash 110 | 111 | except Exception as e: 112 | raise Exception(f'Failed to claim capped: {str(e)}') 113 | 114 | @async_retry 115 | async def claim(self, space_station_address, number_id, signature, nft_core_address, verify_id, powah) -> str: 116 | try: 117 | space_station_address = Web3.to_checksum_address(space_station_address) 118 | nft_core_address = Web3.to_checksum_address(nft_core_address) 119 | contract = self.w3.eth.contract(space_station_address, abi=SPACE_STATION_ABI) 120 | 121 | tx_hash = await self.build_and_send_tx( 122 | contract.functions.claim(number_id, nft_core_address, verify_id, powah, to_bytes(signature)), 123 | 'Claim' 124 | ) 125 | return tx_hash 126 | 127 | except Exception as e: 128 | raise Exception(f'Failed to claim: {str(e)}') 129 | 130 | @async_retry 131 | async def claim_loyalty_points(self, lp_dist_station_address, lp_contract, verify_id, amount, signature) -> str: 132 | try: 133 | lp_dist_station_address = Web3.to_checksum_address(lp_dist_station_address) 134 | lp_contract = Web3.to_checksum_address(lp_contract) 135 | contract = self.w3.eth.contract(lp_dist_station_address, abi=LOYALTY_POINTS_ABI) 136 | amount = int(amount * 10 ** 18) 137 | tx_hash = await self.build_and_send_tx( 138 | contract.functions.increasePoint( 139 | lp_contract, verify_id, self.account.evm_address, amount, to_bytes(signature) 140 | ), 141 | 'Claim Loyalty Points' 142 | ) 143 | return tx_hash 144 | 145 | except Exception as e: 146 | raise Exception(f'Failed to claim loyalty points: {str(e)}') 147 | -------------------------------------------------------------------------------- /internal/onchain/constants.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | 5 | def read_file(path): 6 | return open(os.path.join(os.path.dirname(__file__), path), 'r', encoding='utf-8') 7 | 8 | 9 | SCANS = { 10 | 'Ethereum': 'https://etherscan.io', 11 | 'Optimism': 'https://optimistic.etherscan.io', 12 | 'BSC': 'https://bscscan.com', 13 | 'Gnosis': 'https://gnosisscan.io', 14 | 'Polygon': 'https://polygonscan.com', 15 | 'Fantom': 'https://ftmscan.com', 16 | 'Arbitrum': 'https://arbiscan.io', 17 | 'Avalanche': 'https://snowtrace.io', 18 | 'zkSync': 'https://explorer.zksync.io', 19 | 'Linea': 'https://lineascan.build', 20 | 'Base': 'https://basescan.org', 21 | 'zkEVM': 'https://zkevm.polygonscan.com', 22 | 'Scroll': 'https://scrollscan.com', 23 | 'Gravity': 'https://explorer.gravity.xyz', 24 | } 25 | 26 | ZERO_ADDRESS = '0x0000000000000000000000000000000000000000' 27 | 28 | SPACE_STATION_ABI = json.load(read_file('abi/space_station.json')) 29 | LOYALTY_POINTS_ABI = json.load(read_file('abi/loyalty_points.json')) 30 | 31 | EIP1559_CHAINS = ['Ethereum', 'Optimism', 'Polygon', 'Arbitrum', 'Linea', 'Base', 'Scroll'] 32 | -------------------------------------------------------------------------------- /internal/storage/__init__.py: -------------------------------------------------------------------------------- 1 | from .storage import Storage, AccountStorage 2 | -------------------------------------------------------------------------------- /internal/storage/storage.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import asyncio 4 | from copy import deepcopy 5 | from typing import Optional 6 | 7 | from ..models import AccountInfo 8 | 9 | 10 | class Storage: 11 | 12 | def __init__(self, filename: str): 13 | self.filename = filename 14 | self.data = {} 15 | self.lock = asyncio.Lock() 16 | 17 | def init(self): 18 | if not os.path.exists(self.filename): 19 | self.data = {} 20 | return 21 | with open(self.filename, 'r', encoding='utf-8') as file: 22 | if len(file.read().strip()) == 0: 23 | self.data = {} 24 | return 25 | with open(self.filename, 'r', encoding='utf-8') as file: 26 | converted_data = json.load(file) 27 | self.data = converted_data 28 | 29 | def get_final_value(self, key: str): 30 | value = self.data.get(key) 31 | if value is None: 32 | return None 33 | return deepcopy(value) 34 | 35 | def set_final_value(self, key: str, value): 36 | self.data[key] = deepcopy(value) 37 | 38 | def remove(self, key: str): 39 | if key in self.data: 40 | self.data.pop(key) 41 | 42 | async def get_value(self, key: str): 43 | async with self.lock: 44 | return self.get_final_value(key) 45 | 46 | async def set_value(self, key: str, value): 47 | async with self.lock: 48 | self.set_final_value(key, value) 49 | 50 | async def async_save(self): 51 | async with self.lock: 52 | self.save() 53 | 54 | def save(self): 55 | self._save(self.data) 56 | 57 | def _save(self, converted_data): 58 | with open(self.filename, 'w', encoding='utf-8') as file: 59 | json.dump(converted_data, file) 60 | 61 | 62 | class AccountStorage(Storage): 63 | 64 | def __init__(self, filename: str): 65 | super().__init__(filename) 66 | 67 | def init(self): 68 | super().init() 69 | self.data = {a: AccountInfo.from_dict(i) for a, i in self.data.items()} 70 | 71 | def get_final_account_info(self, address: str) -> Optional[AccountInfo]: 72 | return self.get_final_value(address) 73 | 74 | def set_final_account_info(self, address: str, info: AccountInfo): 75 | return self.set_final_value(address, info) 76 | 77 | async def get_account_info(self, address: str) -> Optional[AccountInfo]: 78 | return await self.get_value(address) 79 | 80 | async def set_account_info(self, address: str, info: AccountInfo): 81 | await self.set_value(address, info) 82 | 83 | async def async_save(self): 84 | await super().async_save() 85 | 86 | def save(self): 87 | converted_data = {a: i.to_dict() for a, i in self.data.items()} 88 | super()._save(converted_data) 89 | -------------------------------------------------------------------------------- /internal/tls/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import TLSClient 2 | -------------------------------------------------------------------------------- /internal/tls/client.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import warnings 3 | from curl_cffi.requests import AsyncSession, BrowserType 4 | 5 | from ..models import AccountInfo 6 | from ..config import DISABLE_SSL 7 | from ..utils import async_retry, get_proxy_url 8 | from ..vars import USER_AGENT, SEC_CH_UA, SEC_CH_UA_PLATFORM 9 | 10 | 11 | warnings.filterwarnings('ignore', module='curl_cffi') 12 | 13 | 14 | if platform.system() == 'Windows': 15 | IMPERSONATE = BrowserType.chrome124 16 | else: 17 | IMPERSONATE = BrowserType.chrome131 18 | 19 | 20 | def get_default_headers(): 21 | return { 22 | 'accept': '*/*', 23 | 'accept-encoding': 'gzip, deflate, br', 24 | 'accept-language': 'en-US,en;q=0.9', 25 | 'sec-ch-ua': SEC_CH_UA, 26 | 'sec-ch-ua-mobile': '?0', 27 | 'sec-ch-ua-platform': SEC_CH_UA_PLATFORM, 28 | 'sec-fetch-dest': 'empty', 29 | 'sec-fetch-mode': 'cors', 30 | 'sec-fetch-site': 'same-site', 31 | 'user-agent': USER_AGENT, 32 | } 33 | 34 | 35 | class TLSClient: 36 | 37 | def __init__(self, account: AccountInfo, custom_headers: dict = None, custom_cookies: dict = None, debug=False): 38 | self.account = account 39 | self._headers = {} 40 | self.proxy = get_proxy_url(self.account.proxy) 41 | self.proxies = {'http': self.proxy, 'https': self.proxy} if self.proxy else {} 42 | headers = get_default_headers() 43 | if custom_headers is not None: 44 | headers.update(custom_headers) 45 | self.sess = AsyncSession( 46 | proxies=self.proxies, 47 | headers=headers, 48 | cookies=custom_cookies, 49 | impersonate=IMPERSONATE, 50 | ) 51 | self.debug = debug 52 | 53 | async def close(self): 54 | await self.sess.close() 55 | 56 | @classmethod 57 | def _handle_response(cls, resp_raw, acceptable_statuses=None, resp_handler=None, with_text=False): 58 | if acceptable_statuses and len(acceptable_statuses) > 0: 59 | if resp_raw.status_code not in acceptable_statuses: 60 | raise Exception(f'Bad status code [{resp_raw.status_code}]: Response = {resp_raw.text}') 61 | try: 62 | if with_text: 63 | return resp_raw.text if resp_handler is None else resp_handler(resp_raw.text) 64 | else: 65 | return resp_raw.json() if resp_handler is None else resp_handler(resp_raw.json()) 66 | except Exception as e: 67 | raise Exception(f'{str(e)}: Status = {resp_raw.status_code}. ' 68 | f'Response saved in logs/errors.txt\n{resp_raw.text}') 69 | 70 | def update_headers(self, new_headers: dict): 71 | self.sess.headers.update(new_headers) 72 | 73 | @async_retry 74 | async def _raw_request(self, method, url, **kwargs): 75 | match method.lower(): 76 | case 'get': 77 | resp = await self.sess.get(url, **kwargs) 78 | case 'post': 79 | resp = await self.sess.post(url, **kwargs) 80 | case unexpected: 81 | raise Exception(f'Wrong request method: {unexpected}') 82 | return resp 83 | 84 | async def request(self, method, url, acceptable_statuses=None, resp_handler=None, with_text=False, 85 | raw=False, **kwargs): 86 | if 'timeout' not in kwargs: 87 | kwargs.update({'timeout': 60}) 88 | if DISABLE_SSL: 89 | kwargs.update({'verify': False}) 90 | resp = await self._raw_request(method, url, **kwargs) 91 | if raw: 92 | return resp 93 | return self._handle_response(resp, acceptable_statuses, resp_handler, with_text) 94 | 95 | async def get(self, url, acceptable_statuses=None, resp_handler=None, with_text=False, **kwargs): 96 | return await self.request('GET', url, acceptable_statuses, resp_handler, with_text, **kwargs) 97 | 98 | async def post(self, url, acceptable_statuses=None, resp_handler=None, with_text=False, **kwargs): 99 | return await self.request('POST', url, acceptable_statuses, resp_handler, with_text, **kwargs) 100 | -------------------------------------------------------------------------------- /internal/twitter/__init__.py: -------------------------------------------------------------------------------- 1 | from .twitter import Twitter, UserNotFound 2 | -------------------------------------------------------------------------------- /internal/twitter/client_transaction/__init__.py: -------------------------------------------------------------------------------- 1 | from .transaction import ClientTransaction 2 | -------------------------------------------------------------------------------- /internal/twitter/client_transaction/cubic_curve.py: -------------------------------------------------------------------------------- 1 | from typing import Union, List 2 | 3 | 4 | class Cubic: 5 | def __init__(self, curves: List[Union[float, int]]): 6 | self.curves = curves 7 | 8 | def get_value(self, time: Union[float, int]): 9 | start_gradient = end_gradient = start = mid = 0.0 10 | end = 1.0 11 | 12 | if time <= 0.0: 13 | if self.curves[0] > 0.0: 14 | start_gradient = self.curves[1] / self.curves[0] 15 | elif self.curves[1] == 0.0 and self.curves[2] > 0.0: 16 | start_gradient = self.curves[3] / self.curves[2] 17 | return start_gradient * time 18 | 19 | if time >= 1.0: 20 | if self.curves[2] < 1.0: 21 | end_gradient = (self.curves[3] - 1.0) / (self.curves[2] - 1.0) 22 | elif self.curves[2] == 1.0 and self.curves[0] < 1.0: 23 | end_gradient = (self.curves[1] - 1.0) / (self.curves[0] - 1.0) 24 | return 1.0 + end_gradient * (time - 1.0) 25 | 26 | while start < end: 27 | mid = (start + end) / 2 28 | x_est = self.calculate(self.curves[0], self.curves[2], mid) 29 | if abs(time - x_est) < 0.00001: 30 | return self.calculate(self.curves[1], self.curves[3], mid) 31 | if x_est < time: 32 | start = mid 33 | else: 34 | end = mid 35 | return self.calculate(self.curves[1], self.curves[3], mid) 36 | 37 | @staticmethod 38 | def calculate(a, b, m): 39 | return 3.0 * a * (1 - m) * (1 - m) * m + 3.0 * b * (1 - m) * m * m + m * m * m 40 | -------------------------------------------------------------------------------- /internal/twitter/client_transaction/interpolate.py: -------------------------------------------------------------------------------- 1 | from typing import Union, List 2 | 3 | 4 | def interpolate(from_list: List[Union[float, int]], to_list: List[Union[float, int]], f: Union[float, int]): 5 | if len(from_list) != len(to_list): 6 | raise Exception( 7 | f"Mismatched interpolation arguments {from_list}: {to_list}") 8 | out = [] 9 | for i in range(len(from_list)): 10 | out.append(interpolate_num(from_list[i], to_list[i], f)) 11 | return out 12 | 13 | 14 | def interpolate_num(from_val: List[Union[float, int]], to_val: List[Union[float, int]], f: Union[float, int]): 15 | if all([isinstance(number, (int, float)) for number in [from_val, to_val]]): 16 | return from_val * (1 - f) + to_val * f 17 | 18 | if all([isinstance(number, bool) for number in [from_val, to_val]]): 19 | return from_val if f < 0.5 else to_val 20 | 21 | -------------------------------------------------------------------------------- /internal/twitter/client_transaction/rotation.py: -------------------------------------------------------------------------------- 1 | import math 2 | from typing import Union 3 | 4 | 5 | def convert_rotation_to_matrix(rotation: Union[float, int]): 6 | rad = math.radians(rotation) 7 | return [math.cos(rad), -math.sin(rad), math.sin(rad), math.cos(rad)] 8 | 9 | 10 | def convertRotationToMatrix(degrees: Union[float, int]): 11 | # first convert degrees to radians 12 | radians = degrees * math.pi / 180 13 | # now we do this: 14 | """ 15 | [cos(r), -sin(r), 0] 16 | [sin(r), cos(r), 0] 17 | 18 | in this order: 19 | [cos(r), sin(r), -sin(r), cos(r), 0, 0] 20 | """ 21 | cos = math.cos(radians) 22 | sin = math.sin(radians) 23 | return [cos, sin, -sin, cos, 0, 0] 24 | -------------------------------------------------------------------------------- /internal/twitter/client_transaction/transaction.py: -------------------------------------------------------------------------------- 1 | import re 2 | import bs4 3 | import math 4 | import time 5 | import random 6 | import base64 7 | import hashlib 8 | from curl_cffi.requests.models import Response as CurlResponse 9 | from typing import Union, List 10 | from functools import reduce 11 | from .cubic_curve import Cubic 12 | from .interpolate import interpolate 13 | from .rotation import convert_rotation_to_matrix 14 | from .utils import float_to_hex, is_odd, base64_encode, handle_x_migration 15 | from ...tls import TLSClient 16 | 17 | 18 | ON_DEMAND_FILE_REGEX = re.compile( 19 | r"""['|\"]{1}ondemand\.s['|\"]{1}:\s*['|\"]{1}([\w]*)['|\"]{1}""", flags=(re.VERBOSE | re.MULTILINE)) 20 | INDICES_REGEX = re.compile( 21 | r"""(\(\w{1}\[(\d{1,2})\],\s*16\))+""", flags=(re.VERBOSE | re.MULTILINE)) 22 | 23 | 24 | class ClientTransaction: 25 | ADDITIONAL_RANDOM_NUMBER = 3 26 | DEFAULT_KEYWORD = "obfiowerehiring" 27 | DEFAULT_ROW_INDEX = None 28 | DEFAULT_KEY_BYTES_INDICES = None 29 | 30 | def __init__(self): 31 | self.home_page_response = None 32 | self.headers = {'Referer': 'https://x.com'} 33 | 34 | async def init(self, tls: TLSClient): 35 | try: 36 | home_page_response = await handle_x_migration(tls, self.headers) 37 | 38 | self.home_page_response = self.validate_response(home_page_response) 39 | self.DEFAULT_ROW_INDEX, self.DEFAULT_KEY_BYTES_INDICES = await self.get_indices( 40 | self.home_page_response, tls) 41 | self.key = self.get_key(response=self.home_page_response) 42 | self.key_bytes = self.get_key_bytes(key=self.key) 43 | self.animation_key = self.get_animation_key( 44 | key_bytes=self.key_bytes, response=self.home_page_response) 45 | except Exception as e: 46 | raise Exception(f'Init client transactions failed: {e}') 47 | 48 | async def get_indices(self, home_page_response, tls: TLSClient): 49 | key_byte_indices = [] 50 | response = self.validate_response( 51 | home_page_response) or self.home_page_response 52 | on_demand_file = ON_DEMAND_FILE_REGEX.search(str(response)) 53 | if on_demand_file: 54 | on_demand_file_url = f"https://abs.twimg.com/responsive-web/client-web/ondemand.s.{on_demand_file.group(1)}a.js" 55 | on_demand_file_response = await tls.get(on_demand_file_url, headers=self.headers, raw=True) 56 | key_byte_indices_match = INDICES_REGEX.finditer( 57 | str(on_demand_file_response.text)) 58 | for item in key_byte_indices_match: 59 | key_byte_indices.append(item.group(2)) 60 | if not key_byte_indices: 61 | raise Exception("Couldn't get KEY_BYTE indices") 62 | key_byte_indices = list(map(int, key_byte_indices)) 63 | return key_byte_indices[0], key_byte_indices[1:] 64 | 65 | def validate_response(self, response: Union[bs4.BeautifulSoup, CurlResponse]): 66 | if not isinstance(response, (bs4.BeautifulSoup, CurlResponse)): 67 | raise Exception("invalid response") 68 | return response if isinstance(response, bs4.BeautifulSoup) else bs4.BeautifulSoup(response.content, 'lxml') 69 | 70 | def get_key(self, response=None): 71 | response = self.validate_response(response) or self.home_page_response 72 | # 73 | element = response.select_one("[name='twitter-site-verification']") 74 | if not element: 75 | raise Exception("Couldn't get key from the page source") 76 | return element.get("content") 77 | 78 | def get_key_bytes(self, key: str): 79 | return list(base64.b64decode(bytes(key, 'utf-8'))) 80 | 81 | def get_frames(self, response=None): 82 | # loading-x-anim-0...loading-x-anim-3 83 | response = self.validate_response(response) or self.home_page_response 84 | return response.select("[id^='loading-x-anim']") 85 | 86 | def get_2d_array(self, key_bytes: List[Union[float, int]], response, frames: bs4.ResultSet = None): 87 | if not frames: 88 | frames = self.get_frames(response) 89 | # return list(list(frames[key[5] % 4].children)[0].children)[1].get("d")[9:].split("C") 90 | return [[int(x) for x in re.sub(r"[^\d]+", " ", item).strip().split()] for item in list(list(frames[key_bytes[5] % 4].children)[0].children)[1].get("d")[9:].split("C")] 91 | 92 | def solve(self, value, min_val, max_val, rounding: bool): 93 | result = value * (max_val-min_val) / 255 + min_val 94 | return math.floor(result) if rounding else round(result, 2) 95 | 96 | def animate(self, frames, target_time): 97 | # from_color = f"#{''.join(['{:x}'.format(digit) for digit in frames[:3]])}" 98 | # to_color = f"#{''.join(['{:x}'.format(digit) for digit in frames[3:6]])}" 99 | # from_rotation = "rotate(0deg)" 100 | # to_rotation = f"rotate({solve(frames[6], 60, 360, True)}deg)" 101 | # easing_values = [solve(value, -1 if count % 2 else 0, 1, False) 102 | # for count, value in enumerate(frames[7:])] 103 | # easing = f"cubic-bezier({','.join([str(value) for value in easing_values])})" 104 | # current_time = round(target_time / 10) * 10 105 | 106 | from_color = [float(item) for item in [*frames[:3], 1]] 107 | to_color = [float(item) for item in [*frames[3:6], 1]] 108 | from_rotation = [0.0] 109 | to_rotation = [self.solve(float(frames[6]), 60.0, 360.0, True)] 110 | frames = frames[7:] 111 | curves = [self.solve(float(item), is_odd(counter), 1.0, False) 112 | for counter, item in enumerate(frames)] 113 | cubic = Cubic(curves) 114 | val = cubic.get_value(target_time) 115 | color = interpolate(from_color, to_color, val) 116 | color = [value if value > 0 else 0 for value in color] 117 | rotation = interpolate(from_rotation, to_rotation, val) 118 | matrix = convert_rotation_to_matrix(rotation[0]) 119 | # str_arr = [format(int(round(color[i])), '02x') for i in range(len(color) - 1)] 120 | # str_arr = [format(int(round(color[i])), 'x') for i in range(len(color) - 1)] 121 | str_arr = [format(round(value), 'x') for value in color[:-1]] 122 | for value in matrix: 123 | rounded = round(value, 2) 124 | if rounded < 0: 125 | rounded = -rounded 126 | hex_value = float_to_hex(rounded) 127 | str_arr.append(f"0{hex_value}".lower() if hex_value.startswith( 128 | ".") else hex_value if hex_value else '0') 129 | str_arr.extend(["0", "0"]) 130 | animation_key = re.sub(r"[.-]", "", "".join(str_arr)) 131 | return animation_key 132 | 133 | def get_animation_key(self, key_bytes, response): 134 | total_time = 4096 135 | # row_index, frame_time = [key_bytes[2] % 16, key_bytes[12] % 16 * (key_bytes[14] % 16) * (key_bytes[7] % 16)] 136 | # row_index, frame_time = [key_bytes[2] % 16, key_bytes[2] % 16 * (key_bytes[42] % 16) * (key_bytes[45] % 16)] 137 | 138 | row_index = key_bytes[self.DEFAULT_ROW_INDEX] % 16 139 | frame_time = reduce(lambda num1, num2: num1*num2, 140 | [key_bytes[index] % 16 for index in self.DEFAULT_KEY_BYTES_INDICES]) 141 | arr = self.get_2d_array(key_bytes, response) 142 | frame_row = arr[row_index] 143 | 144 | target_time = float(frame_time) / total_time 145 | animation_key = self.animate(frame_row, target_time) 146 | return animation_key 147 | 148 | def generate_transaction_id(self, method: str, path: str, response=None, key=None, animation_key=None, time_now=None): 149 | try: 150 | time_now = time_now or math.floor( 151 | (time.time() * 1000 - 1682924400 * 1000) / 1000) 152 | time_now_bytes = [(time_now >> (i * 8)) & 0xFF for i in range(4)] 153 | key = key or self.key or self.get_key(response) 154 | key_bytes = self.get_key_bytes(key) 155 | animation_key = animation_key or self.animation_key or self.get_animation_key( 156 | key_bytes, response) 157 | # hash_val = hashlib.sha256(f"{method}!{path}!{time_now}bird{animation_key}".encode()).digest() 158 | hash_val = hashlib.sha256( 159 | f"{method}!{path}!{time_now}{self.DEFAULT_KEYWORD}{animation_key}".encode()).digest() 160 | # hash_bytes = [int(hash_val[i]) for i in range(len(hash_val))] 161 | hash_bytes = list(hash_val) 162 | random_num = random.randint(0, 255) 163 | bytes_arr = [*key_bytes, *time_now_bytes, * 164 | hash_bytes[:16], self.ADDITIONAL_RANDOM_NUMBER] 165 | out = bytearray( 166 | [random_num, *[item ^ random_num for item in bytes_arr]]) 167 | return base64_encode(out).strip("=") 168 | except Exception as e: 169 | raise Exception(f'Generate Client-Transaction-Id failed: {e}') 170 | -------------------------------------------------------------------------------- /internal/twitter/client_transaction/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import bs4 3 | import base64 4 | from typing import Union 5 | 6 | from ...tls import TLSClient 7 | 8 | 9 | async def handle_x_migration(tls: TLSClient, headers): 10 | home_page = None 11 | migration_redirection_regex = re.compile( 12 | r"""(http(?:s)?://(?:www\.)?(twitter|x){1}\.com(/x)?/migrate([/?])?tok=[a-zA-Z0-9%\-_]+)+""", re.VERBOSE) 13 | response = await tls.get("https://x.com", headers=headers, raw=True) 14 | home_page = bs4.BeautifulSoup(response.content, 'lxml') 15 | migration_url = home_page.select_one("meta[http-equiv='refresh']") 16 | migration_redirection_url = re.search(migration_redirection_regex, str( 17 | migration_url)) or re.search(migration_redirection_regex, str(response.content)) 18 | if migration_redirection_url: 19 | response = await tls.get(migration_redirection_url.group(0), headers=headers, raw=True) 20 | home_page = bs4.BeautifulSoup(response.content, 'lxml') 21 | migration_form = home_page.select_one("form[name='f']") or home_page.select_one(f"form[action='https://x.com/x/migrate']") 22 | if migration_form: 23 | url = migration_form.attrs.get("action", "https://x.com/x/migrate") + "/?mx=2" 24 | method = migration_form.attrs.get("method", "POST") 25 | request_payload = {input_field.get("name"): input_field.get("value") for input_field in migration_form.select("input")} 26 | response = await tls.request(method=method, url=url, data=request_payload, headers=headers, raw=True) 27 | home_page = bs4.BeautifulSoup(response.content, 'lxml') 28 | return home_page 29 | 30 | 31 | def float_to_hex(x): 32 | result = [] 33 | quotient = int(x) 34 | fraction = x - quotient 35 | 36 | while quotient > 0: 37 | quotient = int(x / 16) 38 | remainder = int(x - (float(quotient) * 16)) 39 | 40 | if remainder > 9: 41 | result.insert(0, chr(remainder + 55)) 42 | else: 43 | result.insert(0, str(remainder)) 44 | 45 | x = float(quotient) 46 | 47 | if fraction == 0: 48 | return ''.join(result) 49 | 50 | result.append('.') 51 | 52 | while fraction > 0: 53 | fraction *= 16 54 | integer = int(fraction) 55 | fraction -= float(integer) 56 | 57 | if integer > 9: 58 | result.append(chr(integer + 55)) 59 | else: 60 | result.append(str(integer)) 61 | 62 | return ''.join(result) 63 | 64 | 65 | def is_odd(num: Union[int, float]): 66 | if num % 2: 67 | return -1.0 68 | return 0.0 69 | 70 | 71 | def base64_encode(string): 72 | string = string.encode() if isinstance(string, str) else string 73 | return base64.b64encode(string).decode() 74 | 75 | 76 | def base64_decode(input): 77 | try: 78 | data = base64.b64decode(input) 79 | return data.decode() 80 | except Exception: 81 | # return bytes(input, "utf-8") 82 | return list(bytes(input, "utf-8")) 83 | -------------------------------------------------------------------------------- /internal/twitter/twitter.py: -------------------------------------------------------------------------------- 1 | import random 2 | import json 3 | import binascii 4 | from urllib.parse import urlparse 5 | 6 | from .client_transaction import ClientTransaction 7 | from ..models import AccountInfo 8 | from ..utils import get_proxy_url, handle_aio_response, async_retry, get_conn 9 | from ..config import DISABLE_SSL 10 | from ..tls import TLSClient 11 | from ..vars import USER_AGENT, SEC_CH_UA, SEC_CH_UA_PLATFORM 12 | 13 | 14 | class UserNotFound(Exception): 15 | def __init__(self): 16 | super().__init__('User not found') 17 | 18 | 19 | def generate_csrf_token(size=16): 20 | data = random.getrandbits(size * 8).to_bytes(size, "big") 21 | return binascii.hexlify(data).decode() 22 | 23 | 24 | def _get_headers() -> dict: 25 | # if is_empty(info.user_agent): 26 | # info.user_agent = USER_AGENT 27 | # info.sec_ch_ua = SEC_CH_UA 28 | # info.sec_ch_ua_platform = SEC_CH_UA_PLATFORM 29 | return { 30 | 'accept': '*/*', 31 | 'accept-language': 'en;q=0.9', 32 | 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', 33 | 'content-type': 'application/json', 34 | 'origin': 'https://x.com', 35 | 'referer': 'https://x.com/', 36 | 'sec-ch-ua': SEC_CH_UA, 37 | 'sec-ch-ua-mobile': '?0', 38 | 'sec-ch-ua-platform': SEC_CH_UA_PLATFORM, 39 | 'sec-fetch-dest': 'empty', 40 | 'sec-fetch-mode': 'cors', 41 | 'sec-fetch-site': 'same-origin', 42 | 'x-twitter-active-user': 'yes', 43 | 'x-twitter-auth-type': 'OAuth2Session', 44 | 'x-twitter-client-language': 'en', 45 | 'x-csrf-token': '', 46 | 'user-agent': USER_AGENT, 47 | } 48 | 49 | 50 | class Twitter: 51 | 52 | COOKIES_DOMAIN = '.x.com' 53 | 54 | def __init__(self, account_info: AccountInfo): 55 | self.account = account_info 56 | self.proxy = get_proxy_url(account_info.proxy) 57 | self.my_user_id = None 58 | self.my_username = None 59 | self.tls = TLSClient(self.account, _get_headers(), debug=True) 60 | self.client_transaction = ClientTransaction() 61 | 62 | async def start(self): 63 | await self.client_transaction.init(self.tls) 64 | self.set_cookies({'auth_token': self.account.twitter_auth_token}) 65 | ct0 = self.account.twitter_ct0 66 | if ct0 == '': 67 | ct0 = await self._get_ct0() 68 | self.account.twitter_ct0 = ct0 69 | self.set_cookies({'ct0': ct0}) 70 | self.tls.update_headers({'x-csrf-token': ct0}) 71 | self.my_username = await self.get_my_profile_info() 72 | self.my_user_id = await self.get_user_id(self.my_username) 73 | 74 | def set_cookies(self, cookies): 75 | for name, value in cookies.items(): 76 | self.tls.sess.cookies.set(name, value, self.COOKIES_DOMAIN) 77 | 78 | async def request(self, method, url, resp_handler=None, **kwargs): 79 | try: 80 | tx_id = self.client_transaction.generate_transaction_id(method, urlparse(url).path) 81 | headers = {'X-Client-Transaction-Id': tx_id} 82 | if 'headers' in kwargs: 83 | headers.update(kwargs.pop('headers')) 84 | resp_handler = self.get_check_errors_resp_handler(resp_handler) 85 | return await self.tls.request(method, url, headers=headers, resp_handler=resp_handler, **kwargs) 86 | except Exception as e: 87 | self.account.twitter_error = True 88 | raise e 89 | 90 | async def _get_ct0(self): 91 | try: 92 | await self.tls.get('https://api.x.com/1.1/account/settings.json', raw=True) 93 | return self.tls.sess.cookies.get('ct0', self.account.twitter_ct0, self.COOKIES_DOMAIN) 94 | except Exception as e: 95 | reason = 'Your account has been locked\n' if 'Your account has been locked' in str(e) else '' 96 | self.account.twitter_error = True 97 | raise Exception(f'Failed to get ct0 for twitter: {reason}{str(e)}') 98 | 99 | def get_check_errors_resp_handler(self, resp_handler): 100 | check = self.check_response_errors 101 | return lambda resp: check(resp) if resp_handler is None else resp_handler(check(resp)) 102 | 103 | @classmethod 104 | def check_response_errors(cls, resp): 105 | if type(resp) is not dict: 106 | return resp 107 | errors = resp.get('errors', []) 108 | if type(errors) is not list: 109 | return resp 110 | if len(errors) == 0: 111 | return resp 112 | msgs = [msg for msg in [f"{err.get('message')} (code={err.get('code')})" 113 | for err in errors if type(err) is dict] if msg] 114 | msgs = list(set(msgs)) 115 | error_msg = ' | '.join(msgs) 116 | if len(error_msg) == 0: 117 | return resp 118 | raise Exception(error_msg) 119 | 120 | async def get_my_profile_info(self): 121 | url = 'https://api.x.com/graphql/UhddhjWCl-JMqeiG4vPtvw/Viewer' 122 | features = { 123 | "rweb_tipjar_consumption_enabled": True, 124 | "responsive_web_graphql_exclude_directive_enabled": True, 125 | "verified_phone_label_enabled": False, 126 | "creator_subscriptions_tweet_preview_api_enabled": True, 127 | "responsive_web_graphql_skip_user_profile_image_extensions_enabled": False, 128 | "responsive_web_graphql_timeline_navigation_enabled": True, 129 | } 130 | field_toggles = { 131 | "isDelegate": False, 132 | "withAuxiliaryUserLabels": False, 133 | } 134 | variables = {"withCommunitiesMemberships": True} 135 | params = { 136 | "features": features, 137 | "fieldToggles": field_toggles, 138 | "variables": variables, 139 | } 140 | try: 141 | return await self.request( 142 | "GET", url, params=params, 143 | resp_handler=lambda r: r['data']['viewer']['user_results']['result']['legacy']['screen_name'], 144 | ) 145 | except Exception as e: 146 | raise Exception(f'Get my username error: {str(e)}') 147 | 148 | async def get_followers_count(self, username): 149 | url = 'https://x.com/i/api/graphql/G3KGOASz96M-Qu0nwmGXNg/UserByScreenName' 150 | params = { 151 | 'variables': to_json({"screen_name": username, "withSafetyModeUserFields": True}), 152 | 'features': to_json({ 153 | "hidden_profile_likes_enabled": True, 154 | "hidden_profile_subscriptions_enabled": True, 155 | "responsive_web_graphql_exclude_directive_enabled": True, 156 | "verified_phone_label_enabled": False, 157 | "subscriptions_verification_info_is_identity_verified_enabled": True, 158 | "subscriptions_verification_info_verified_since_enabled": True, 159 | "highlights_tweets_tab_ui_enabled": True, 160 | "creator_subscriptions_tweet_preview_api_enabled": True, 161 | "responsive_web_graphql_skip_user_profile_image_extensions_enabled": False, 162 | "responsive_web_graphql_timeline_navigation_enabled": True 163 | }), 164 | 'fieldToggles': to_json({"withAuxiliaryUserLabels": False}) 165 | } 166 | try: 167 | return await self.request( 168 | "GET", url, params=params, 169 | resp_handler=lambda r: r['data']['user']['result']['legacy']['followers_count'] 170 | ) 171 | except Exception as e: 172 | raise Exception(f'Get followers count error: {str(e)}') 173 | 174 | async def get_user_id(self, username): 175 | url = 'https://x.com/i/api/graphql/-0XdHI-mrHWBQd8-oLo1aA/ProfileSpotlightsQuery' 176 | if username[0] == '@': 177 | username = username[1:] 178 | username = username.lower() 179 | params = { 180 | 'variables': to_json({'screen_name': username}) 181 | } 182 | 183 | def _handler(resp): 184 | if type(resp) is dict and len(resp.get('data', {})) == 0: 185 | raise UserNotFound() 186 | return int(resp['data']['user_result_by_screen_name']['result']['rest_id']) 187 | 188 | try: 189 | return await self.request("GET", url, params=params, resp_handler=_handler) 190 | except Exception as e: 191 | raise Exception(f'Get user id error: {str(e)}') 192 | 193 | async def follow(self, username): 194 | user_id = await self.get_user_id(username) 195 | url = 'https://x.com/i/api/1.1/friendships/create.json' 196 | params = { 197 | 'include_profile_interstitial_type': '1', 198 | 'include_blocking': '1', 199 | 'include_blocked_by': '1', 200 | 'include_followed_by': '1', 201 | 'include_want_retweets': '1', 202 | 'include_mute_edge': '1', 203 | 'include_can_dm': '1', 204 | 'include_can_media_tag': '1', 205 | 'include_ext_has_nft_avatar': '1', 206 | 'include_ext_is_blue_verified': '1', 207 | 'include_ext_verified_type': '1', 208 | 'include_ext_profile_image_shape': '1', 209 | 'skip_status': '1', 210 | 'user_id': user_id, 211 | } 212 | headers = { 213 | 'content-type': 'application/x-www-form-urlencoded' 214 | } 215 | try: 216 | await self.request('POST', url, params=params, headers=headers, resp_handler=lambda r: r['id']) 217 | except Exception as e: 218 | raise Exception(f'Follow error: {str(e)}') 219 | 220 | async def post_tweet(self, text, tweet_id=None) -> str: 221 | action = "CreateTweet" 222 | query_id = "xT36w0XM3A8jDynpkram2A" 223 | _json = dict( 224 | variables=dict( 225 | tweet_text=text, 226 | media=dict( 227 | media_entities=[], 228 | possibly_sensitive=False 229 | ), 230 | semantic_annotation_ids=[], 231 | dark_request=False 232 | ), 233 | features=dict( 234 | communities_web_enable_tweet_community_results_fetch=True, 235 | c9s_tweet_anatomy_moderator_badge_enabled=True, 236 | tweetypie_unmention_optimization_enabled=True, 237 | responsive_web_edit_tweet_api_enabled=True, 238 | graphql_is_translatable_rweb_tweet_is_translatable_enabled=True, 239 | view_counts_everywhere_api_enabled=True, 240 | longform_notetweets_consumption_enabled=True, 241 | responsive_web_twitter_article_tweet_consumption_enabled=True, 242 | tweet_awards_web_tipping_enabled=False, 243 | creator_subscriptions_quote_tweet_preview_enabled=False, 244 | longform_notetweets_rich_text_read_enabled=True, 245 | longform_notetweets_inline_media_enabled=True, 246 | articles_preview_enabled=True, 247 | rweb_video_timestamps_enabled=True, 248 | rweb_tipjar_consumption_enabled=True, 249 | responsive_web_graphql_exclude_directive_enabled=True, 250 | verified_phone_label_enabled=False, 251 | freedom_of_speech_not_reach_fetch_enabled=True, 252 | standardized_nudges_misinfo=True, 253 | tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled=True, 254 | responsive_web_graphql_skip_user_profile_image_extensions_enabled=False, 255 | responsive_web_graphql_timeline_navigation_enabled=True, 256 | responsive_web_enhance_cards_enabled=False, 257 | ), 258 | queryId=query_id, 259 | ) 260 | 261 | if tweet_id: 262 | _json['variables']['reply'] = dict( 263 | in_reply_to_tweet_id=tweet_id, 264 | exclude_reply_user_ids=[] 265 | ) 266 | 267 | url = f'https://x.com/i/api/graphql/{query_id}/{action}' 268 | 269 | def _handler(resp): 270 | _result = resp['data']['create_tweet']['tweet_results']['result'] 271 | _username = _result['core']['user_results']['result']['legacy']['screen_name'] 272 | _tweet_id = _result['rest_id'] 273 | _url = f'https://x.com/{_username}/status/{_tweet_id}' 274 | return _url 275 | 276 | try: 277 | return await self.request('POST', url, json=_json, resp_handler=_handler) 278 | except Exception as e: 279 | raise Exception(f'Post tweet error: {str(e)}') 280 | 281 | async def retweet(self, tweet_id): 282 | action = 'CreateRetweet' 283 | query_id = 'ojPdsZsimiJrUGLR1sjUtA' 284 | url = f'https://x.com/i/api/graphql/{query_id}/{action}' 285 | _json = { 286 | 'variables': { 287 | 'tweet_id': tweet_id, 288 | 'dark_request': False 289 | }, 290 | 'queryId': query_id 291 | } 292 | try: 293 | resp = await self.request('POST', url, json=_json, resp_handler=lambda r: r) 294 | return resp 295 | except Exception as e: 296 | raise Exception(f'Retweet error: {str(e)}') 297 | 298 | async def like(self, tweet_id) -> bool: 299 | action = 'FavoriteTweet' 300 | query_id = 'lI07N6Otwv1PhnEgXILM7A' 301 | url = f'https://x.com/i/api/graphql/{query_id}/{action}' 302 | _json = { 303 | 'variables': { 304 | 'tweet_id': tweet_id, 305 | 'dark_request': False 306 | }, 307 | 'queryId': query_id 308 | } 309 | try: 310 | return await self.request( 311 | 'POST', url, json=_json, 312 | resp_handler=lambda r: r['data']['favorite_tweet'] == 'Done' 313 | ) 314 | except Exception as e: 315 | raise Exception(f'Like error: {str(e)}') 316 | 317 | async def find_posted_tweet(self, text_condition_func, count=20) -> str: 318 | action = "UserTweets" 319 | query_id = "E3opETHurmVJflFsUBVuUQ" 320 | params = { 321 | 'variables': to_json({ 322 | "userId": self.my_user_id, 323 | "count": count, 324 | "includePromotedContent": False, 325 | "withQuickPromoteEligibilityTweetFields": False, 326 | "withVoice": False, 327 | "withV2Timeline": True, 328 | }), 329 | 'features': to_json({ 330 | "profile_label_improvements_pcf_label_in_post_enabled": False, 331 | "rweb_tipjar_consumption_enabled": True, 332 | "responsive_web_graphql_exclude_directive_enabled": True, 333 | "verified_phone_label_enabled": False, 334 | "creator_subscriptions_tweet_preview_api_enabled": True, 335 | "responsive_web_graphql_timeline_navigation_enabled": True, 336 | "responsive_web_graphql_skip_user_profile_image_extensions_enabled": False, 337 | "premium_content_api_read_enabled": False, 338 | "communities_web_enable_tweet_community_results_fetch": True, 339 | "c9s_tweet_anatomy_moderator_badge_enabled": True, 340 | "responsive_web_grok_analyze_button_fetch_trends_enabled": True, 341 | "responsive_web_grok_analyze_post_followups_enabled": False, 342 | "responsive_web_grok_share_attachment_enabled": False, 343 | "articles_preview_enabled": True, 344 | "responsive_web_edit_tweet_api_enabled": True, 345 | "graphql_is_translatable_rweb_tweet_is_translatable_enabled": True, 346 | "view_counts_everywhere_api_enabled": True, 347 | "longform_notetweets_consumption_enabled": True, 348 | "responsive_web_twitter_article_tweet_consumption_enabled": True, 349 | "tweet_awards_web_tipping_enabled": False, 350 | "creator_subscriptions_quote_tweet_preview_enabled": False, 351 | "freedom_of_speech_not_reach_fetch_enabled": True, 352 | "standardized_nudges_misinfo": True, 353 | "tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": True, 354 | "rweb_video_timestamps_enabled": True, 355 | "longform_notetweets_rich_text_read_enabled": True, 356 | "longform_notetweets_inline_media_enabled": True, 357 | "responsive_web_enhance_cards_enabled": False, 358 | }), 359 | } 360 | 361 | url = f'https://x.com/i/api/graphql/{query_id}/{action}' 362 | 363 | def _handler(resp): 364 | instructions = resp['data']['user']['result']['timeline_v2']['timeline']['instructions'] 365 | entries = None 366 | for instruction in instructions: 367 | if instruction['type'] == 'TimelineAddEntries': 368 | entries = instruction['entries'] 369 | break 370 | if entries is None: 371 | return None 372 | for entry in entries: 373 | tweet_text = entry['content']['itemContent']['tweet_results']['result'] 374 | tweet_text = tweet_text['legacy']['full_text'] 375 | if not text_condition_func(tweet_text): 376 | continue 377 | tweet_id = entry['entryId'] 378 | if tweet_id.startswith('tweet-'): 379 | tweet_id = tweet_id[6:] 380 | _url = f'https://x.com/{self.my_username}/status/{tweet_id}' 381 | return _url 382 | return None 383 | 384 | try: 385 | return await self.request('GET', url, params=params, resp_handler=_handler) 386 | except Exception as e: 387 | raise Exception(f'Find posted tweet error: {str(e)}') 388 | 389 | 390 | def to_json(obj): 391 | return json.dumps(obj, separators=(',', ':'), ensure_ascii=True) 392 | -------------------------------------------------------------------------------- /internal/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .utils import * 2 | -------------------------------------------------------------------------------- /internal/utils/async_web3.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | from web3.providers.async_rpc import AsyncHTTPProvider 3 | 4 | from ..vars import USER_AGENT 5 | 6 | 7 | class AsyncHTTPProviderWithUA(AsyncHTTPProvider): 8 | 9 | def __init__( 10 | self, 11 | endpoint_uri: str = None, 12 | request_kwargs=None, 13 | ) -> None: 14 | super().__init__(endpoint_uri, request_kwargs) 15 | 16 | @classmethod 17 | def get_request_headers(cls) -> Dict[str, str]: 18 | return { 19 | "Content-Type": "application/json", 20 | "User-Agent": USER_AGENT, 21 | } 22 | -------------------------------------------------------------------------------- /internal/utils/utils.py: -------------------------------------------------------------------------------- 1 | import random 2 | import asyncio 3 | import aiofiles 4 | from web3 import AsyncWeb3 5 | from retry import retry 6 | from loguru import logger 7 | from aiohttp import ClientResponse 8 | from datetime import datetime 9 | from aiohttp_socks import ProxyConnector 10 | from urllib.parse import urlparse, parse_qs 11 | 12 | from ..config import MAX_TRIES, DISABLE_SSL 13 | 14 | from .async_web3 import AsyncHTTPProviderWithUA 15 | 16 | 17 | def plural_str(cnt: int, name: str): 18 | return f'{cnt} {name}{"s" if cnt > 1 else ""}' 19 | 20 | 21 | def int_to_decimal(i, n): 22 | return i / (10 ** n) 23 | 24 | 25 | def decimal_to_int(d, n): 26 | return int(d * (10 ** n)) 27 | 28 | 29 | def is_empty(val): 30 | if val is None: 31 | return True 32 | if type(val) == str: 33 | return val == '' 34 | return False 35 | 36 | 37 | def get_proxy_url(proxy): 38 | if proxy and '|' in proxy: 39 | proxy = proxy.split('|')[0] 40 | return None if is_empty(proxy) else proxy 41 | 42 | 43 | async def wait_a_bit(x=1): 44 | await asyncio.sleep(random.uniform(0.5, 1) * x) 45 | 46 | 47 | async def handle_aio_response(resp_raw: ClientResponse, acceptable_statuses=None, resp_handler=None, with_text=False): 48 | if acceptable_statuses and len(acceptable_statuses) > 0: 49 | if resp_raw.status not in acceptable_statuses: 50 | raise Exception(f'Bad status code [{resp_raw.status}]: Response = {await resp_raw.text()}') 51 | try: 52 | if resp_handler is not None: 53 | if with_text: 54 | return resp_handler(await resp_raw.text()) 55 | else: 56 | return resp_handler(await resp_raw.json()) 57 | return 58 | except Exception as e: 59 | raise Exception(f'{str(e)}: Status = {resp_raw.status}. Response = {await resp_raw.text()}') 60 | 61 | 62 | def async_retry(async_func): 63 | async def wrapper(*args, **kwargs): 64 | tries, delay = MAX_TRIES, 1.5 65 | while tries > 0: 66 | try: 67 | return await async_func(*args, **kwargs) 68 | except Exception: 69 | tries -= 1 70 | if tries <= 0: 71 | raise 72 | await asyncio.sleep(delay) 73 | 74 | delay *= 2 75 | delay += random.uniform(0, 1) 76 | delay = min(delay, 10) 77 | 78 | return wrapper 79 | 80 | 81 | async def log_long_exc(idx, msg, exc, warning=False, to_file=True): 82 | e_msg = str(exc) 83 | if e_msg == '': 84 | e_msg = ' ' 85 | e_msg_lines = e_msg.splitlines() 86 | if warning: 87 | logger.warning(f'{idx}) {msg}: {e_msg_lines[0]}') 88 | else: 89 | logger.error(f'{idx}) {msg}: {e_msg_lines[0]}') 90 | if len(e_msg_lines) > 1 and to_file: 91 | async with aiofiles.open('logs/errors.txt', 'a', encoding='utf-8') as file: 92 | await file.write(f'{str(datetime.now())} | {idx}) {msg}: {e_msg}\n\n') 93 | await file.flush() 94 | 95 | 96 | def get_conn(proxy): 97 | return ProxyConnector.from_url(proxy) if proxy else None 98 | 99 | 100 | def get_query_param(url: str, name: str): 101 | values = parse_qs(urlparse(url).query).get(name) 102 | if values: 103 | return values[0] 104 | return None 105 | 106 | 107 | def to_bytes(hex_str): 108 | return AsyncWeb3.to_bytes(hexstr=hex_str) 109 | 110 | 111 | @retry(tries=MAX_TRIES, delay=1.5, max_delay=10, backoff=2, jitter=(0, 1)) 112 | def get_w3(rpc_url: str, proxy: str = None): 113 | proxy = get_proxy_url(proxy) 114 | req_kwargs = {} if proxy is None else {'proxy': proxy} 115 | if DISABLE_SSL: 116 | req_kwargs['ssl'] = False 117 | return AsyncWeb3(AsyncHTTPProviderWithUA(rpc_url, req_kwargs)) 118 | -------------------------------------------------------------------------------- /internal/vars/__init__.py: -------------------------------------------------------------------------------- 1 | from .vars import * 2 | -------------------------------------------------------------------------------- /internal/vars/vars.py: -------------------------------------------------------------------------------- 1 | import platform 2 | 3 | 4 | USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36' 5 | SEC_CH_UA = '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"' 6 | SEC_CH_UA_PLATFORM = '"macOS"' 7 | 8 | 9 | if platform.system() == 'Windows': 10 | USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36' 11 | SEC_CH_UA = '"Chromium";v="124", "Google Chrome";v="124", "Not-A.Brand";v="99"' 12 | 13 | 14 | 15 | GALXE_CAPTCHA_ID = '244bcb8b9846215df5af4c624a750db4' 16 | -------------------------------------------------------------------------------- /logs/errors.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akasakaid/lazy-galxe/599ac9c40181471c17cf6280ffcd6c7da92d0f36/logs/errors.txt -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import csv 3 | import random 4 | import aiohttp 5 | import asyncio 6 | 7 | from termcolor import cprint 8 | from loguru import logger 9 | from datetime import datetime 10 | from typing import Tuple, Optional 11 | from eth_account import Account as EthAccount 12 | 13 | from internal.config import WAIT_BETWEEN_ACCOUNTS, THREADS_NUM, \ 14 | SKIP_FIRST_ACCOUNTS, RANDOM_ORDER, UPDATE_STORAGE_ACCOUNT_INFO, \ 15 | GALXE_CAMPAIGN_IDS, REFERRAL_LINKS, SURVEYS, SPACES_STATS 16 | from internal.utils import async_retry, wait_a_bit, log_long_exc, get_query_param 17 | from internal.galxe import GalxeAccount 18 | from internal.models import AccountInfo 19 | from internal.storage import AccountStorage 20 | 21 | 22 | logger.remove() 23 | logger.add(sys.stderr, format='{time:YYYY-MM-DD HH:mm:ss.SSS} | ' 24 | '{level: <7} | ' 25 | '{message}') 26 | 27 | 28 | @async_retry 29 | async def change_ip(idx, link: str): 30 | async with aiohttp.ClientSession() as sess: 31 | async with sess.get(link) as resp: 32 | if resp.status != 200: 33 | raise Exception(f'Failed to change ip: Status = {resp.status}. Response = {await resp.text()}') 34 | logger.info(f'{idx}) Successfully changed ip: {await resp.text()}') 35 | 36 | 37 | async def process_account(account_data: Tuple[int, Tuple[str, str, str, str, str]], 38 | storage: AccountStorage): 39 | 40 | idx, (evm_wallet, proxy, twitter_token, email, discord) = account_data 41 | 42 | evm_address = EthAccount().from_key(evm_wallet).address 43 | 44 | logger.info(f'{idx}) Processing: {evm_address}') 45 | 46 | if ':' in email: 47 | email_username, email_password = tuple(email.split(':')) 48 | else: 49 | email_username, email_password = email, '' 50 | 51 | account_info = await storage.get_account_info(evm_address) 52 | if account_info is None: 53 | logger.info(f'{idx}) Account info was not saved before') 54 | account_info = AccountInfo(idx=idx, evm_address=evm_address, evm_private_key=evm_wallet, 55 | proxy=proxy, twitter_auth_token=twitter_token, discord_token=discord, 56 | email_username=email_username, email_password=email_password) 57 | else: 58 | if account_info.discord_token == '': 59 | account_info.discord_token = discord 60 | if UPDATE_STORAGE_ACCOUNT_INFO: 61 | account_info.proxy = proxy 62 | account_info.twitter_auth_token = twitter_token 63 | account_info.email_username = email_username 64 | account_info.email_password = email_password 65 | account_info.discord_token = discord 66 | logger.info(f'{idx}) Saved account info restored') 67 | 68 | account_info.twitter_error = False 69 | account_info.discord_error = False 70 | account_info.actual_campaigns = [] 71 | 72 | if '|' in account_info.proxy: 73 | change_link = account_info.proxy.split('|')[1] 74 | await change_ip(idx, change_link) 75 | 76 | exc: Optional[Exception] = None 77 | 78 | try: 79 | async with GalxeAccount(idx, account_info, evm_wallet) as galxe_account: 80 | logger.info(f'{idx}) Galxe signing in') 81 | await galxe_account.login() 82 | logger.info(f'{idx}) Galxe signed in') 83 | 84 | await wait_a_bit() 85 | 86 | for campaign_id in GALXE_CAMPAIGN_IDS: 87 | await galxe_account.complete_campaign(campaign_id) 88 | await galxe_account.claim_campaign(campaign_id) 89 | 90 | await wait_a_bit() 91 | 92 | logger.info(f'{idx}) Checking spaces stats') 93 | await galxe_account.spaces_stats() 94 | 95 | except Exception as galxe_exc: 96 | exc = Exception(f'Galxe error: {galxe_exc}') 97 | 98 | logger.info(f'{idx}) Account stats:\n{account_info.str_stats()}') 99 | 100 | await storage.set_account_info(evm_address, account_info) 101 | 102 | await storage.async_save() 103 | 104 | if exc is not None: 105 | raise exc 106 | 107 | 108 | async def process_batch(bid: int, batch, storage: AccountStorage, async_func, sleep): 109 | await asyncio.sleep(WAIT_BETWEEN_ACCOUNTS[0] / THREADS_NUM * bid) 110 | failed = [] 111 | for idx, d in enumerate(batch): 112 | if sleep and idx != 0: 113 | await asyncio.sleep(random.uniform(WAIT_BETWEEN_ACCOUNTS[0], WAIT_BETWEEN_ACCOUNTS[1])) 114 | try: 115 | await async_func(d, storage) 116 | except Exception as e: 117 | failed.append(d) 118 | await log_long_exc(d[0], 'Process account error', e) 119 | print() 120 | return failed 121 | 122 | 123 | async def process(batches, storage: AccountStorage, async_func, sleep=True): 124 | tasks = [] 125 | for idx, b in enumerate(batches): 126 | tasks.append(asyncio.create_task(process_batch(idx, b, storage, async_func, sleep))) 127 | return await asyncio.gather(*tasks) 128 | 129 | 130 | def main(): 131 | with open('files/evm_wallets.txt', 'r', encoding='utf-8') as file: 132 | evm_wallets = file.read().splitlines() 133 | evm_wallets = [w.strip() for w in evm_wallets] 134 | with open('files/proxies.txt', 'r', encoding='utf-8') as file: 135 | proxies = file.read().splitlines() 136 | proxies = [p.strip() for p in proxies] 137 | proxies = [p if '://' in p.split('|')[0] or p == '' else 'http://' + p for p in proxies] 138 | with open('files/twitters.txt', 'r', encoding='utf-8') as file: 139 | twitters = file.read().splitlines() 140 | twitters = [t.strip() for t in twitters] 141 | with open('files/emails.txt', 'r', encoding='utf-8') as file: 142 | emails = file.read().splitlines() 143 | emails = [e.strip() for e in emails] 144 | with open('files/discords.txt', 'r', encoding='utf-8') as file: 145 | discords = file.read().splitlines() 146 | discords = [d.strip() for d in discords] 147 | 148 | if len(discords) == 0: 149 | discords = ['' for _ in evm_wallets] 150 | if len(evm_wallets) != len(proxies): 151 | logger.error('Proxies count does not match wallets count') 152 | return 153 | if len(evm_wallets) != len(twitters): 154 | logger.error('Twitter count does not match wallets count') 155 | return 156 | if len(evm_wallets) != len(emails): 157 | logger.error('Emails count does not match wallets count') 158 | return 159 | if len(evm_wallets) != len(discords): 160 | logger.error('Discord count does not match wallets count') 161 | return 162 | 163 | for idx, w in enumerate(evm_wallets, start=1): 164 | try: 165 | _ = EthAccount().from_key(w).address 166 | except Exception as e: 167 | logger.error(f'Wrong EVM private key #{idx}: {str(e)}') 168 | return 169 | 170 | for i, ref_link in enumerate(REFERRAL_LINKS): 171 | try: 172 | if not ref_link.startswith('https://app.galxe.com/quest/'): 173 | raise Exception(f'should start with "https://app.galxe.com/quest/"') 174 | ref_campaign_id = ref_link.split('/')[-1].split('?')[0] 175 | referral_code = get_query_param(ref_link, 'referral_code') 176 | REFERRAL_LINKS[i] = (ref_campaign_id, referral_code) 177 | except Exception as e: 178 | logger.error(f'Incorrect referral link {ref_link}: {e}') 179 | return 180 | 181 | if 'address' not in SURVEYS: 182 | logger.error('Incorrect format of surveys.csv') 183 | return 184 | survey_campaign_ids = [c_id.strip() for c_id in SURVEYS['address']] 185 | for evm_address, answers in SURVEYS.items(): 186 | if evm_address == 'address': 187 | continue 188 | answers_by_ids = {campaign_id: answers[i] for i, campaign_id in enumerate(survey_campaign_ids)} 189 | SURVEYS[evm_address] = answers_by_ids 190 | 191 | want_only = [] 192 | 193 | def get_batches(skip: int = None, threads: int = THREADS_NUM): 194 | _data = list(enumerate(list(zip(evm_wallets, proxies, twitters, emails, discords)), start=1)) 195 | if skip is not None: 196 | _data = _data[skip:] 197 | if skip is not None and len(want_only) > 0: 198 | _data = [d for d in enumerate(list(zip(evm_wallets, proxies, twitters, emails, discords)), start=1) 199 | if d[0] in want_only] 200 | if RANDOM_ORDER: 201 | random.shuffle(_data) 202 | _batches = [[] for _ in range(threads)] 203 | for _idx, d in enumerate(_data): 204 | _batches[_idx % threads].append(d) 205 | return _batches 206 | 207 | storage = AccountStorage('storage/data.json') 208 | storage.init() 209 | 210 | loop = asyncio.new_event_loop() 211 | asyncio.set_event_loop(loop) 212 | results = loop.run_until_complete(process(get_batches(SKIP_FIRST_ACCOUNTS), storage, process_account)) 213 | 214 | failed = [f[0] for r in results for f in r] 215 | 216 | storage.save() 217 | 218 | print() 219 | logger.info('Finished') 220 | logger.info(f'Failed cnt: {len(failed)}') 221 | logger.info(f'Failed ids: {sorted(failed)}') 222 | print() 223 | 224 | campaigns = {} 225 | spaces = {} 226 | for w in evm_wallets: 227 | account = storage.get_final_account_info(EthAccount().from_key(w).address) 228 | if account is None: 229 | continue 230 | for c_id, value in account.actual_points.items(): 231 | if c_id not in campaigns: 232 | campaigns[c_id] = value[0] 233 | for alias, value in account.spaces_points.items(): 234 | if alias not in spaces: 235 | spaces[alias] = value[0] 236 | campaigns = list(campaigns.items()) 237 | spaces = [(alias, name) for alias, name in spaces.items() if not SPACES_STATS or alias in SPACES_STATS] 238 | 239 | csv_data = [['#', 'EVM Address', 'Total Points'] + [n for _, n in campaigns] + ['Twitter Error', 'Discord Error']] 240 | total = {'total_points': 0, 'twitter_error': 0, 'discord_error': 0} 241 | spaces_csv_data = [['#', 'EVM Address'], ['#', 'EVM Address']] 242 | for _, name in spaces: 243 | spaces_csv_data[0].extend([name, name]) 244 | spaces_csv_data[1].extend(['Points', 'Rank']) 245 | spaces_total = {alias: 0 for alias, _ in spaces} 246 | for idx, w in enumerate(evm_wallets, start=1): 247 | evm_address = EthAccount().from_key(w).address 248 | account = storage.get_final_account_info(evm_address) 249 | if account is None: 250 | csv_data.append([idx, evm_address]) 251 | spaces_csv_data.append([idx, evm_address]) 252 | continue 253 | 254 | points = [account.campaign_points_str(c_id) for c_id, _ in campaigns] 255 | total_points = sum(account.campaign_points(c_id) for c_id, _ in campaigns) 256 | 257 | total['total_points'] += total_points 258 | 259 | for c_id, _ in campaigns: 260 | if c_id not in total: 261 | total[c_id] = [0, None, None] 262 | if c_id not in account.points: 263 | continue 264 | total[c_id][0] += account.points[c_id][1] 265 | if account.points[c_id][2] is not None: 266 | if total[c_id][1] is None: 267 | total[c_id][1] = 0 268 | total[c_id][1] += 1 if account.points[c_id][2] else 0 269 | for c_id, _ in campaigns: 270 | nfts_cnt = account.nfts.get(c_id) 271 | if nfts_cnt is None: 272 | continue 273 | if total[c_id][2] is None: 274 | total[c_id][2] = 0 275 | total[c_id][2] += nfts_cnt 276 | 277 | total['twitter_error'] += 1 if account.twitter_error else 0 278 | total['discord_error'] += 1 if account.discord_error else 0 279 | 280 | csv_data.append([idx, evm_address, total_points] + points + 281 | [account.twitter_error_s, account.discord_error_s]) 282 | 283 | spaces_info = [] 284 | for alias, _ in spaces: 285 | space_points, space_rank = 0, None 286 | if alias in account.spaces_points: 287 | space_points, space_rank = account.spaces_points[alias][1], account.spaces_points[alias][2] 288 | spaces_info.extend([space_points, space_rank]) 289 | spaces_total[alias] += space_points 290 | 291 | spaces_csv_data.append([idx, evm_address] + spaces_info) 292 | 293 | csv_data.append([]) 294 | csv_data.append(['', '', total['total_points']] + 295 | [(f'{total[c_id][0]} / {total[c_id][1]}' 296 | if total[c_id][1] else str(total[c_id][0])) + 297 | f'{" / " + str(total[c_id][2]) if total[c_id][2] is not None else ""}' 298 | for c_id, _ in campaigns] + [total['twitter_error'], total['discord_error']]) 299 | csv_data.append(['', '', 'Total Points'] + [n for _, n in campaigns] + ['Twitter Error', 'Discord Error']) 300 | 301 | spaces_csv_data.extend([[], ['', ''], ['', ''], ['', '']]) 302 | for alias, name in spaces: 303 | spaces_csv_data[-3].extend([spaces_total[alias], '']) 304 | spaces_csv_data[-2].extend(['Points', 'Rank']) 305 | spaces_csv_data[-1].extend([name, name]) 306 | 307 | run_timestamp = str(datetime.now()) 308 | csv_data.extend([[], ['', 'Timestamp', run_timestamp]]) 309 | spaces_csv_data.extend([[], ['', 'Timestamp', run_timestamp]]) 310 | 311 | with open('results/stats.csv', 'w', encoding='utf-8', newline='') as file: 312 | writer = csv.writer(file, delimiter=';') 313 | writer.writerows(csv_data) 314 | with open('results/spaces_stats.csv', 'w', encoding='utf-8', newline='') as file: 315 | writer = csv.writer(file, delimiter=';') 316 | writer.writerows(spaces_csv_data) 317 | 318 | logger.info('Campaigns stats are stored in results/stats.csv') 319 | logger.info('Spaces stats are stored in results/spaces_stats.csv') 320 | logger.info(f'Timestamp: {run_timestamp}') 321 | print() 322 | 323 | 324 | if __name__ == '__main__': 325 | cprint('###############################################################', 'cyan') 326 | cprint('#################', 'cyan', end='') 327 | cprint(' https://t.me/thelaziestcoder ', 'magenta', end='') 328 | cprint('################', 'cyan') 329 | cprint('#################', 'cyan', end='') 330 | cprint(' https://t.me/thelaziestcoder ', 'magenta', end='') 331 | cprint('################', 'cyan') 332 | cprint('#################', 'cyan', end='') 333 | cprint(' https://t.me/thelaziestcoder ', 'magenta', end='') 334 | cprint('################', 'cyan') 335 | cprint('###############################################################\n', 'cyan') 336 | main() 337 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiofiles==23.2.1 2 | aiohttp==3.10.11 3 | aiohttp_socks==0.8.4 4 | aioimaplib==1.0.1 5 | beautifulsoup4==4.12.3 6 | colorama==0.4.6 7 | curl_cffi==0.8.1b8 8 | dataclasses_json==0.6.3 9 | eth_account==0.10.0 10 | Faker==22.6.0 11 | loguru==0.7.0 12 | lxml==5.3.0 13 | playwright==1.40.0 14 | retry==0.9.2 15 | requests==2.32.3 16 | setuptools==75.6.0 17 | termcolor==2.4.0 18 | toml==0.10.2 19 | web3==6.13.0 20 | -------------------------------------------------------------------------------- /results/stats.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akasakaid/lazy-galxe/599ac9c40181471c17cf6280ffcd6c7da92d0f36/results/stats.csv -------------------------------------------------------------------------------- /storage/quizzes.json: -------------------------------------------------------------------------------- 1 | {"367883082841890816": [2, 3, 3, 3, 0]} --------------------------------------------------------------------------------