├── README.md ├── __init__.py ├── bbl ├── __init__.py ├── address.txt └── claim_sbtc.py ├── bevm ├── __init__.py ├── address.json ├── batch_create_bevm_node.py ├── config.ini ├── monitoring_bevm_node.py └── requirements.txt ├── carv ├── __init__.py ├── checkin │ ├── __init__.py │ ├── checkin.py │ ├── checkin_executor.py │ ├── gen_hex_value.py │ └── hex_values.json ├── claim │ ├── __init__.py │ ├── claim.py │ └── claim_executor.py ├── common │ ├── __init__.py │ ├── annotation.py │ ├── common_util.py │ └── resources │ │ ├── __init__.py │ │ ├── checkin_private_keys.txt │ │ ├── claim_private_keys.txt │ │ ├── invite_config.yaml │ │ └── socks5_proxys.txt └── requirements.txt ├── grass ├── README.md ├── common │ └── login.py ├── masterslave │ ├── connect_to_wss_util.py │ ├── master_slave_actuator.py │ ├── master_slave_config.py │ ├── master_slave_config.yaml │ └── master_slave_exec.py ├── register │ └── register_config.yaml └── requirements.txt ├── opi ├── config.json ├── config.txt ├── proxy_report_opi.py ├── report_opi.py └── requirements.txt ├── qna3 ├── README.md ├── __init__.py ├── checkin │ ├── __init__.py │ ├── check_in.py │ └── checkin_executor.py ├── claim │ ├── __init__.py │ ├── claim_executor.py │ └── claim_point.py ├── common │ ├── __init__.py │ ├── annotation.py │ ├── proxy_manager.py │ ├── qna3_util.py │ └── re_captcha_parser.py ├── invite │ ├── __init__.py │ └── invite_manager.py ├── requirements.txt └── resources │ ├── __init__.py │ ├── checkin_private_keys.txt │ ├── claim_private_keys.txt │ ├── config.ini │ ├── invite_config.yaml │ └── socks5_proxys.txt └── tool ├── README.md ├── __init__.py ├── requirements.txt ├── wallet ├── __init__.py └── create_wallet.py └── withdraw ├── __init__.py ├── config.ini ├── util.py ├── withdraw.py └── withdraw_addresses.json /README.md: -------------------------------------------------------------------------------- 1 | # 空投项目的研究脚本 2 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | #将根目录加入sys.path中,解决命令行找不到包的问题 3 | import sys 4 | import os 5 | curPath = os.path.abspath(os.path.dirname(__file__)) 6 | rootPath = os.path.split(curPath)[0] 7 | sys.path.append(rootPath) 8 | ######################################################### 9 | -------------------------------------------------------------------------------- /bbl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/bbl/__init__.py -------------------------------------------------------------------------------- /bbl/address.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/bbl/address.txt -------------------------------------------------------------------------------- /bbl/claim_sbtc.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import string 4 | import time 5 | 6 | from loguru import logger 7 | 8 | import requests 9 | 10 | # 代理可以自己实现或者去买现成 11 | def get_proxy(nstproxy_channel="xx", nstproxy_password="xx"): 12 | session = ''.join(random.choice(string.digits + string.ascii_letters) for _ in range(10)) 13 | return f"http://{nstproxy_channel}-residential-country_ANY-r_5m-s_{session}:{nstproxy_password}@gw-us.nstproxy.com:24125" 14 | 15 | 16 | def parse_txt_file(file_path): 17 | if not os.path.exists(file_path): 18 | logger.error(f"file '{file_path}' not found.") 19 | exit(1) 20 | with open(file_path, 'r', encoding='utf-8') as file: 21 | datas = file.readlines() 22 | 23 | datas = [data.strip() for data in datas if data.strip()] 24 | if len(datas) == 0: 25 | raise Exception("file data not found.") 26 | return datas 27 | 28 | 29 | if __name__ == '__main__': 30 | address_list = parse_txt_file("./address.txt") 31 | headers = { 32 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36", 33 | "Accept": "*/*", 34 | "Accept-Encoding": "gzip, deflate, br", 35 | "Accept-Language": "en-US,en;q=0", 36 | "Connection": "keep-alive", 37 | "X-Requested-With": "XMLHttpRequest", 38 | "authority": "alt.signetfaucet.com", 39 | "method": "GET", 40 | "scheme": "https", 41 | "Referer": "https://alt.signetfaucet.com/", 42 | "Sec-Ch-Ua-Mobile": "?0", 43 | "Sec-Ch-Ua-Platform": "\"Windows\"", 44 | "Sec-Fetch-Dest": "empty", 45 | "Sec-Fetch-Mode": "cors", 46 | "Sec-Fetch-Site": "same-origin" 47 | } 48 | for address in address_list: 49 | logger.info(f"加载钱包:{address} .....") 50 | try: 51 | headers["path"] = f"/claim/?address={address}" 52 | proxy_url = get_proxy() 53 | proxies = { 54 | "http": proxy_url, 55 | "https": proxy_url, 56 | } 57 | url = f"https://alt.signetfaucet.com/claim/?address={address}" 58 | resp = requests.get(url=url, headers=headers, proxies=proxies) 59 | if resp.ok: 60 | resp_text = resp.text 61 | if "sent with txid" in resp_text: 62 | tx_id = resp_text.split(" ")[-1] 63 | logger.success( 64 | f"地址:{address} 领取成功,txId:{tx_id}, 区块链浏览器:https://ex.signet.bublina.eu.org/t/{tx_id}") 65 | continue 66 | logger.error(f"地址:{address} 领取失败,原因:{resp_text}") 67 | else: 68 | logger.error(f"地址:{address} 发送领取请求失败,原因: {resp.text}") 69 | except Exception as e: 70 | logger.error(f"发生异常:{e}") 71 | -------------------------------------------------------------------------------- /bevm/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/bevm/__init__.py -------------------------------------------------------------------------------- /bevm/address.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "0x..": "ip" 4 | } -------------------------------------------------------------------------------- /bevm/batch_create_bevm_node.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | 4 | from fabric import Connection, task 5 | 6 | from configparser import ConfigParser 7 | 8 | 9 | @task 10 | def uname(c): 11 | result = c.run('uname -a') 12 | print_result(result) 13 | 14 | 15 | def print_result(result): 16 | print("服务器 {} 返回结果:\n{}".format(c.host, result.stdout.strip())) 17 | 18 | 19 | # 安装docker环境 20 | @task 21 | def install_docker(con): 22 | filename = "docker_install.sh" 23 | 24 | print(f"开始创建 {filename} 文件...") 25 | 26 | file_content = """ 27 | #!/bin/bash 28 | sudo apt-get update 29 | sudo apt-get install -y docker.io 30 | sudo systemctl start docker 31 | sudo systemctl enable docker 32 | sudo usermod -aG docker $USER 33 | """ 34 | 35 | crate_file_result = con.run(f'echo "{file_content}" > {filename}') 36 | print(crate_file_result) 37 | 38 | print(f"文件 {filename} 已经创建...") 39 | 40 | print("开始安装docker...") 41 | con.run(f'chmod +x {filename}') 42 | time.sleep(2) 43 | con.run(f'sh {filename}') 44 | print("安装docker完毕...") 45 | 46 | 47 | # 启动bevm docker 容器 48 | @task 49 | def start_bevm_node(con, address): 50 | # --bootnodes /ip4/18.222.166.234/tcp/10000/ws/p2p/12D3KooWR1DNEVVWMaRJVfAkXTyZAZgnN159hNcPTooCSwMv4zbx 51 | print("开始启动docker bevm node节点...") 52 | command = ( 53 | f'sudo docker run -d --name "{address}" -v $HOME/node_bevm_test_storage:/root/.local/share/bevm btclayer2/bevm:v0.1.1 bevm "--chain=testnet" "--name={address}" ' 54 | f'"--pruning=archive" --telemetry-url "wss://telemetry.bevm.io/submit 0" --bootnodes /ip4/18.222.166.234/tcp/10000/ws/p2p/12D3KooWR1DNEVVWMaRJVfAkXTyZAZgnN159hNcPTooCSwMv4zbx') 55 | print(f"启动命令为:{command}") 56 | con.run(command) 57 | print("docker容器已经启动...") 58 | 59 | 60 | # 查看docker日志 61 | def showlog(con, address): 62 | command = f"docker logs -f {address}" 63 | con.run(command) 64 | 65 | 66 | def getdata(): 67 | with open('./address.json', 'r', encoding='utf-8') as file: 68 | return json.load(file) 69 | 70 | 71 | def do_exec(): 72 | data, password, username = get_infos() 73 | for address, host in data.items(): 74 | con = Connection(host=host, 75 | user=username, 76 | connect_kwargs={"password": password}) 77 | print(f">> 开始处理地址:{address}") 78 | install_docker(con) 79 | start_bevm_node(con, address) 80 | # showlog(con, address) 81 | con.close() 82 | 83 | 84 | def get_infos(): 85 | config = ConfigParser() 86 | config_file_path = './config.ini' 87 | config.read(config_file_path, encoding='utf-8') 88 | username = config['user_config'].get('username') 89 | password = config['user_config'].get('password') 90 | data = getdata() 91 | return data, password, username 92 | 93 | 94 | def do_start(): 95 | data, password, username = get_infos() 96 | for address, host in data.items(): 97 | con = Connection(host=host, 98 | user=username, 99 | connect_kwargs={"password": password}) 100 | start_bevm_node(con, address) 101 | 102 | 103 | def do_showlog(): 104 | data, password, username = get_infos() 105 | for address, host in data.items(): 106 | con = Connection(host=host, 107 | user=username, 108 | connect_kwargs={"password": password}) 109 | showlog(con, address) 110 | 111 | 112 | 113 | if __name__ == '__main__': 114 | do_exec() 115 | #do_start() 116 | #do_showlog() 117 | pass 118 | 119 | 120 | -------------------------------------------------------------------------------- /bevm/config.ini: -------------------------------------------------------------------------------- 1 | [user_config] 2 | username = 3 | password = 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /bevm/monitoring_bevm_node.py: -------------------------------------------------------------------------------- 1 | import websocket 2 | import json 3 | import threading 4 | 5 | 6 | def on_message(ws, message): 7 | data = json.loads(message) 8 | print("Received data:") 9 | print(data) 10 | # 在这里添加您的逻辑来处理数据并监控您的节点 11 | 12 | 13 | def on_error(ws, error): 14 | print("Error:", error) 15 | 16 | 17 | def on_close(ws): 18 | print("### Connection closed ###") 19 | 20 | 21 | def on_open(ws): 22 | def run(*args): 23 | print("Connected to the Websocket service.") 24 | # 在这里可以发送任何必要的消息来初始化连接或订阅特定的数据 25 | # 例如: ws.send(json.dumps({"action": "subscribe", "channel": "node_updates"})) 26 | 27 | thread = threading.Thread(target=run) 28 | thread.start() 29 | 30 | 31 | if __name__ == "__main__": 32 | websocket.enableTrace(True) 33 | ws = websocket.WebSocketApp("wss://telemetry.bevm.io/feed", 34 | on_message=on_message, 35 | on_error=on_error, 36 | on_close=on_close) 37 | ws.on_open = on_open 38 | ws.run_forever() 39 | -------------------------------------------------------------------------------- /bevm/requirements.txt: -------------------------------------------------------------------------------- 1 | fabric -------------------------------------------------------------------------------- /carv/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/__init__.py -------------------------------------------------------------------------------- /carv/checkin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/checkin/__init__.py -------------------------------------------------------------------------------- /carv/checkin/checkin.py: -------------------------------------------------------------------------------- 1 | from web3 import Web3 2 | import json 3 | import logging 4 | 5 | from carv.common.common_util import CommonUtil, ProxyPoolManager 6 | from carv.common.annotation import retry, capture_error, to_async 7 | 8 | # ronin,opbnb,zkSync,linea 9 | chain_arr = [ 10 | { 11 | "id": 2020, 12 | "url": "", 13 | "contract": "", 14 | "name": "Ronin" 15 | }, 16 | { 17 | "id": 204, 18 | "url": "https://opbnb-mainnet-rpc.bnbchain.org", 19 | "contract": "0xc32338e7f84f4c01864c1d5b2b0c0c7c697c25dc", 20 | "name": "opBNB" 21 | }, 22 | # { 23 | # "id": 324, 24 | # "url": "https://mainnet.era.zksync.io", 25 | # "contract": "0x5155704bb41fde152ad3e1ae402e8e8b9ba335d3", 26 | # "name": "zkSync Era" 27 | # }, 28 | # { 29 | # "id": 59144, 30 | # "url": "https://binance.llamarpc.com", 31 | # "contract": "0xc32338e7f84f4c01864c1d5b2b0c0c7c697c25dc", 32 | # "name": "Linea" 33 | # } 34 | ] 35 | 36 | logging.basicConfig(level=logging.INFO) 37 | 38 | 39 | # 检查 40 | def check_status(proxy: ProxyPoolManager, trak_id: str, chain_id: str, headers: dict): 41 | url = f"https://interface.carv.io/airdrop/check_carv_status?chain_id={chain_id}" 42 | resp = proxy.get(url=url, trak_id=trak_id, headers=headers) 43 | if resp.status_code in [200, 201]: 44 | data = resp.json() 45 | # logging.info(f'req check_status successful, resp : {data}') 46 | if data['code'] == 0 and data['data']['status'] == 'not_started': 47 | return True 48 | logging.error(f'链ID:{chain_id} 已经签到') 49 | return False 50 | 51 | 52 | ### input_data的参数参考 53 | 54 | # 0xa2a9539c - functionName 55 | # 00000000000000000000000004babbd2cb77bc47ff32a78396d70bb33d26dbf3 - 钱包地址 56 | # 0000000000000000000000000000000000000000000000000000000000000032 - 金额 57 | # 000000000000000000000000000000000000000000000000000000000134d6f0 - 每天的日期 58 | # 0000000000000000000000000000000000000000000000000000000000000080 - 固定 59 | # 0000000000000000000000000000000000000000000000000000000000000041 - 固定 60 | # 550dd7a34150c3d9d4a9948a5001214906eb0854c85dcf2acae85a5debd04bd67f174e358552c47008524b0fba693d1537700b6f1b877acf2a58197d20eb0d0f1c - 签名 61 | # 00000000000000000000000000000000000000000000000000000000000000 - 固定 62 | 63 | 64 | def build_input_data(amount, address, signature, ymd): 65 | # 获取今天的动态部分 66 | return ("0xa2a9539c" 67 | f"{address[2:].zfill(64)}" 68 | f"{f'{hex(amount)[2:].zfill(64)}'}" 69 | f"{hex(ymd)[2:].zfill(64)}" 70 | f"0000000000000000000000000000000000000000000000000000000000000080" 71 | f"0000000000000000000000000000000000000000000000000000000000000041" 72 | f"{signature}" 73 | f"00000000000000000000000000000000000000000000000000000000000000") 74 | 75 | 76 | @retry(delay_between_attempts=3) 77 | def checkin_carv_soul(proxy: ProxyPoolManager, trak_id: str, chain_info: dict, private_key: str, dynamic_hex: str, 78 | address, headers): 79 | chain_id = chain_info.get("id") 80 | 81 | # 检查是否能领取 82 | can_checkin = check_status(proxy=proxy, trak_id=trak_id, chain_id=chain_id, headers=headers) 83 | if can_checkin is False: 84 | return address, "", private_key 85 | 86 | carv_soul_url = 'https://interface.carv.io/airdrop/mint/carv_soul' 87 | body = { 88 | 'chain_id': chain_id 89 | } 90 | 91 | # 获取签到需要的信息 92 | resp = proxy.post(url=carv_soul_url, headers=headers, data=json.dumps(body)) 93 | amount = None 94 | signature = None 95 | ymd = None 96 | if resp.status_code in [200, 201]: 97 | json_data = resp.json() 98 | amount = json_data.get("data").get("permit").get("amount") 99 | ymd = json_data.get("data").get("permit").get("ymd") 100 | signature = json_data.get("data").get("signature") 101 | 102 | if chain_id == 2020: 103 | # Ronin不需要和链上交互 104 | logging.info(f'Ronin skip tx ......') 105 | return address, "", private_key 106 | 107 | if amount is None or signature is None or ymd is None: 108 | raise Exception("signature and amount can't be None") 109 | 110 | tmp_input = build_input_data(amount=amount, signature=signature, 111 | address=address, ymd=ymd) 112 | input_data = CommonUtil().check_and_reset_input_data(tmp_input) 113 | web3 = Web3(Web3.HTTPProvider(chain_info.get("url"))) 114 | temp_address = Web3.to_checksum_address(address) 115 | nonce = web3.eth.get_transaction_count(temp_address) 116 | 117 | tx_hash_id = CommonUtil().exec_tx(_from=address, contract=chain_info.get("contract"), input_data=input_data, 118 | nonce=nonce, 119 | chain_id=chain_id, 120 | private_key=private_key, web3=web3) 121 | return address, tx_hash_id, private_key 122 | 123 | 124 | @to_async(max_workers=3) 125 | @capture_error(error_type="carv-ceckin") 126 | def checkin_all(proxy: ProxyPoolManager, trak_id: str, private_key: str, dynamic_hex: str): 127 | logging.info( 128 | f" ========================================= 开始签到: privateKey {private_key} ========================================= ") 129 | logging.info(f" ") 130 | logging.info(f" ") 131 | logging.info(f" ") 132 | # 登录放到外边,不然循环登录没必要 133 | address, headers = CommonUtil().login_with_retry(proxy=proxy, trak_id=trak_id, private_key=private_key, 134 | chain_url="https://opbnb-mainnet-rpc.bnbchain.org") 135 | logging.info(f">>>>>>>>>>> 完成登录 privateKey: {private_key}, address: {address}") 136 | for china_info in chain_arr: 137 | address, tx_hash_id, _private_key = checkin_carv_soul(proxy=proxy, trak_id=trak_id, chain_info=china_info, 138 | private_key=private_key, dynamic_hex=dynamic_hex, 139 | address=address, headers=headers) 140 | logging.info( 141 | f">>>>>>>>>> address: {address} chinaName: {china_info.get('name')}, tx: {tx_hash_id} checkin successful <<<<<<<<<<<<<") 142 | logging.info(f" ") 143 | logging.info(f" ") 144 | logging.info(f" ") 145 | logging.info( 146 | f" ========================================= 完成签到: privateKey {private_key} ========================================= ") 147 | 148 | 149 | if __name__ == '__main__': 150 | pass 151 | -------------------------------------------------------------------------------- /carv/checkin/checkin_executor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import uuid 4 | ######################################################### 5 | # 将根目录加入sys.path中,解决命令行找不到包的问题 6 | import sys 7 | import os 8 | 9 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 10 | sys.path.append(curPath) 11 | ######################################################### 12 | from carv.common.common_util import CommonUtil, ProxyPoolManager 13 | from carv.checkin import checkin, gen_hex_value 14 | 15 | logging.basicConfig(level=logging.DEBUG) 16 | 17 | file_path = os.path.join(curPath, 'carv', 'common', 'resources', 'checkin_private_keys.txt') 18 | abs_file_path = os.path.abspath(file_path) 19 | private_keys = CommonUtil().parse_txt_file(abs_file_path) 20 | 21 | proxy_manager = ProxyPoolManager() 22 | # 初始化今天的hexValue 23 | dynamic_hex = gen_hex_value.gen() 24 | 25 | 26 | async def main(): 27 | tasks = [] 28 | for private_key in private_keys: 29 | trak_id = str(uuid.uuid4()) 30 | task = checkin.checkin_all(proxy=proxy_manager, trak_id=trak_id, private_key=private_key, 31 | dynamic_hex=dynamic_hex) 32 | tasks.append(task) 33 | 34 | await asyncio.gather(*tasks) 35 | 36 | logging.info(" ALL EXEC IN SUCCESSFUL !") 37 | 38 | 39 | if __name__ == '__main__': 40 | asyncio.run(main()) 41 | -------------------------------------------------------------------------------- /carv/checkin/gen_hex_value.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from datetime import datetime, timezone 4 | 5 | # 获取今天的日期 6 | today = datetime.now().strftime('%Y-%m-%d') 7 | 8 | # 文件路径 9 | file_path = 'hex_values.json' 10 | 11 | BASE_HEX = '000000000000000000000000000000000000000000000000000000000134d6f1' 12 | 13 | 14 | def get_or_increment_value(date, file_path): 15 | # 检查文件是否存在 16 | if os.path.exists(file_path): 17 | # 读取JSON文件 18 | with open(file_path, 'r') as file: 19 | data = json.load(file) 20 | else: 21 | # 文件不存在,创建一个空字典 22 | data = {} 23 | 24 | # 检查指定日期是否已经在数据中 25 | if date in data: 26 | # 日期存在,返回对应的值 27 | return data[date] 28 | else: 29 | # 获取最近的日期的字符串,如果数据为空,则使用当前日期 30 | if data: 31 | last_date_str = max(data.keys()) 32 | last_hex = data[last_date_str] 33 | else: 34 | # 如果没有数据,那么我们无法比较日期,返回一个错误或默认值 35 | raise ValueError("No data available to increment from.") 36 | 37 | # 计算日期差异 38 | last_date = datetime.strptime(last_date_str, '%Y-%m-%d').replace(tzinfo=timezone.utc) 39 | now_date = datetime.strptime(date, '%Y-%m-%d').replace(tzinfo=timezone.utc) 40 | delta = (now_date - last_date).days 41 | 42 | # 定义一个函数来增加十六进制的值 43 | def increment_hex(hex_value, increment): 44 | # 移除前缀"0x"并转换为十进制,然后加上增量,再转换回十六进制 45 | new_value = hex(int(hex_value, 16) + increment).lstrip("0x") 46 | # 补充前导0以保持长度一致 47 | new_value = new_value.zfill(len(hex_value)) 48 | return new_value 49 | 50 | # 在之前的last_hex之上增加日期差值 51 | new_value = increment_hex(last_hex, delta) 52 | # 更新数据字典 53 | data[date] = new_value 54 | # 将更新后的数据写回文件 55 | with open(file_path, 'w') as file: 56 | json.dump(data, file, indent=4) 57 | return new_value 58 | 59 | 60 | def gen(): 61 | # 使用函数并打印结果 62 | value = get_or_increment_value(today, file_path) 63 | print(f"Gen hex value for {today}: {value}") 64 | return value 65 | 66 | 67 | if __name__ == '__main__': 68 | gen() 69 | -------------------------------------------------------------------------------- /carv/checkin/hex_values.json: -------------------------------------------------------------------------------- 1 | { 2 | "2024-01-16": "000000000000000000000000000000000000000000000000000000000134d6f3", 3 | "2024-01-17": "000000000000000000000000000000000000000000000000000000000134d6f4", 4 | "2024-01-18": "000000000000000000000000000000000000000000000000000000000134d6f5" 5 | } -------------------------------------------------------------------------------- /carv/claim/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/claim/__init__.py -------------------------------------------------------------------------------- /carv/claim/claim.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | 4 | from carv.common.common_util import CommonUtil, ProxyPoolManager 5 | 6 | 7 | def claim_point(proxy: ProxyPoolManager, private_key: str, trak_id: str): 8 | address, headers = CommonUtil().login_with_retry(proxy, trak_id=trak_id, private_key=private_key, 9 | chain_url="https://opbnb-mainnet-rpc.bnbchain.org") 10 | 11 | logging.info( 12 | f"========================================== privateKey: {private_key} 开始领取积分 ======================================== ") 13 | logging.info(" ") 14 | logging.info(" ") 15 | logging.info(" ") 16 | # 查询奖励列表 17 | query_url = "https://interface.carv.io/airdrop/data_rewards/list" 18 | wait_claim_ids = [] 19 | resp = proxy.get(url=query_url, headers=headers) 20 | if resp.status_code in [200, 201]: 21 | data = resp.json() 22 | if data['code'] == 0: 23 | data_rewards = data.get('data').get('data_rewards') 24 | if data_rewards: 25 | for reward in data_rewards: 26 | reward_id = reward.get('id') 27 | wait_claim_ids.append(reward_id) 28 | 29 | # 领取奖励 30 | claim_url = "https://interface.carv.io/airdrop/data_rewards/claim" 31 | if wait_claim_ids: 32 | for claim_id in wait_claim_ids: 33 | data = { 34 | "id": claim_id 35 | } 36 | claim_resp = proxy.post(url=claim_url, headers=headers, trak_id=trak_id, data=json.dumps(data)) 37 | if claim_resp.status_code in [200, 201]: 38 | logging.info(f" claim claim_id : {claim_id} successful") 39 | logging.info(" ") 40 | logging.info(" ") 41 | logging.info(" ") 42 | logging.info( 43 | f"========================================== privateKey: {private_key} 完成领取积分 ======================================== ") 44 | -------------------------------------------------------------------------------- /carv/claim/claim_executor.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | import uuid 4 | 5 | ######################################################### 6 | #将根目录加入sys.path中,解决命令行找不到包的问题 7 | import sys 8 | import os 9 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 10 | sys.path.append(curPath) 11 | ######################################################### 12 | 13 | from carv.claim import claim 14 | from carv.common.common_util import CommonUtil, ProxyPoolManager 15 | 16 | logging.basicConfig(level=logging.INFO) 17 | 18 | file_path = os.path.join(curPath, 'carv', 'common', 'resources', 'checkin_private_keys.txt') 19 | abs_file_path = os.path.abspath(file_path) 20 | private_keys = CommonUtil().parse_txt_file(abs_file_path) 21 | 22 | proxy_manager = ProxyPoolManager() 23 | retry_attempts = 3 24 | retry_delay = 5 25 | for private_key in private_keys: 26 | trak_id = str(uuid.uuid4()) 27 | attempt = 0 28 | while attempt < retry_attempts: 29 | try: 30 | logging.info(" ") 31 | logging.info(" ") 32 | logging.info(" ") 33 | claim.claim_point(proxy=proxy_manager, trak_id=trak_id, private_key=private_key) 34 | logging.info(" ") 35 | logging.info(" ") 36 | logging.info(" ") 37 | break 38 | except Exception as e: 39 | logging.error(f"attempt {attempt + 1} failed with error: {e}") 40 | attempt += 1 41 | if attempt < retry_attempts: 42 | time.sleep(retry_delay) 43 | else: 44 | logging.error(f"all attempts to checkin have failed for private_key: {private_key}") 45 | logging.info(" ALL EXEC IN SUCCESSFUL !") 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /carv/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/common/__init__.py -------------------------------------------------------------------------------- /carv/common/annotation.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import logging 4 | import time 5 | from concurrent.futures import ThreadPoolExecutor 6 | from datetime import datetime 7 | from functools import wraps 8 | 9 | 10 | def retry(max_attempts=3, delay_between_attempts=1, exceptions=(Exception,)): 11 | """ 12 | 重试装饰器函数。 13 | 14 | 参数: 15 | max_attempts: 最大尝试次数。 16 | delay_between_attempts: 两次尝试之间的延迟(秒)。 17 | exceptions: 需要重试的异常类型。 18 | """ 19 | 20 | def decorator(func): 21 | @wraps(func) 22 | def wrapper(*args, **kwargs): 23 | attempts = 0 24 | while attempts < max_attempts: 25 | try: 26 | return func(*args, **kwargs) 27 | except exceptions as e: 28 | attempts += 1 29 | logging.warning(f'Attempt {attempts} failed with error: {e}') 30 | if attempts < max_attempts: 31 | time.sleep(delay_between_attempts) 32 | else: 33 | logging.error(f'All {max_attempts} attempts failed. No more retries.') 34 | raise 35 | 36 | return wrapper 37 | 38 | return decorator 39 | 40 | 41 | def capture_error(error_type): 42 | def decorator(func): 43 | @wraps(func) 44 | def wrapper(*args, **kwargs): 45 | try: 46 | return func(*args, **kwargs) 47 | except Exception as e: 48 | current_utc_str = datetime.now().strftime('fail_%Y-%m-%d_') + error_type + '.json' 49 | # 收集入参作为错误参数 50 | key = kwargs.get('private_key', 'private_key') 51 | # 构造要写入的数据 52 | data = {'private_key': key, "error": str(e)} 53 | # 读取现有文件内容,如果文件不存在则创建一个空列表 54 | try: 55 | with open(current_utc_str, 'r', encoding='utf-8') as file: 56 | errors_list = json.load(file) 57 | except FileNotFoundError: 58 | errors_list = [] 59 | except json.JSONDecodeError: 60 | errors_list = [] 61 | # 将新的错误数据追加到列表中 62 | errors_list.append(data) 63 | # 将更新后的列表写回文件 64 | with open(current_utc_str, 'w', encoding='utf-8') as file: 65 | json.dump(errors_list, file, ensure_ascii=False, indent=4) 66 | # 重新抛出异常 67 | raise 68 | 69 | return wrapper 70 | 71 | return decorator 72 | 73 | 74 | # 异步任务装饰器 75 | def _async(max_concurrency=3): 76 | semaphore = asyncio.Semaphore(max_concurrency) 77 | 78 | def decorator(func): 79 | @wraps(func) 80 | async def wrapper(*args, **kwargs): 81 | async with semaphore: 82 | return await func(*args, **kwargs) 83 | 84 | return wrapper 85 | 86 | return decorator 87 | 88 | 89 | def to_async(max_workers=2): 90 | executor = ThreadPoolExecutor(max_workers=max_workers) 91 | 92 | def decorator(func): 93 | @wraps(func) 94 | async def async_wrapper(*args, **kwargs): 95 | loop = asyncio.get_running_loop() 96 | # 使用 functools.partial 来传递额外参数 97 | from functools import partial 98 | func_partial = partial(func, *args, **kwargs) 99 | result = await loop.run_in_executor(executor, func_partial) 100 | return result 101 | 102 | return async_wrapper 103 | 104 | return decorator 105 | -------------------------------------------------------------------------------- /carv/common/common_util.py: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | # 将根目录加入sys.path中,解决命令行找不到包的问题 3 | import sys 4 | import os 5 | 6 | from carv.common import annotation 7 | from carv.common.annotation import retry 8 | 9 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 10 | sys.path.append(curPath) 11 | ######################################################### 12 | import base64 13 | import itertools 14 | import json 15 | import logging 16 | import os 17 | import random 18 | import time 19 | 20 | import requests 21 | from eth_account.messages import encode_defunct 22 | from web3 import Web3 23 | 24 | 25 | class CommonUtil: 26 | _data_cache = {} 27 | 28 | @staticmethod 29 | @retry(max_attempts=5, delay_between_attempts=2) 30 | def exec_tx(_from, contract, input_data, nonce, chain_id, private_key, web3: Web3): 31 | contract_address = Web3.to_checksum_address(contract) 32 | # 估计gas 33 | estimated_gas = web3.eth.estimate_gas({ 34 | 'from': _from, 35 | 'to': contract_address, 36 | 'data': input_data 37 | }) 38 | gas_limit = int(estimated_gas * 1.1) 39 | # gas_limit = estimated_gas 40 | logging.info(f'estimated gas: {estimated_gas}, with buffer: {gas_limit}') 41 | # 获取当前的gas价格 42 | gas_price = web3.eth.gas_price 43 | # logging.info(f'current gas price: {gas_price}') 44 | # 构造交易 45 | tx = { 46 | 'from': _from, 47 | 'to': contract_address, 48 | 'gas': gas_limit, 49 | 'gasPrice': gas_price, 50 | 'nonce': nonce, 51 | 'data': input_data, 52 | 'chainId': chain_id 53 | } 54 | # 签名交易 55 | signed_tx = web3.eth.account.sign_transaction(tx, private_key) 56 | # 发送交易 57 | tx_hash = web3.eth.send_raw_transaction(signed_tx.rawTransaction) 58 | receipt = web3.eth.wait_for_transaction_receipt(transaction_hash=tx_hash, timeout=10) 59 | tx_hash_id = receipt.transactionHash.hex() 60 | logging.info(f'Transaction successful with txId: {tx_hash_id}') 61 | return tx_hash_id 62 | 63 | """ 检查并修复input data """ 64 | 65 | @staticmethod 66 | def check_and_reset_input_data(input_data): 67 | if not input_data.startswith('0x'): 68 | return '0x' + input_data 69 | elif input_data.count('0x') > 1: 70 | return '0x' + input_data.replace('0x', '') 71 | else: 72 | raise Exception("format input_data err") 73 | 74 | """ 解析文件 """ 75 | 76 | @staticmethod 77 | def parse_txt_file(file_path): 78 | if not os.path.exists(file_path): 79 | logging.error(f"file '{file_path}' not found.") 80 | exit(1) 81 | with open(file_path, 'r', encoding='utf-8') as file: 82 | datas = file.readlines() 83 | 84 | datas = [data.strip() for data in datas if data.strip()] 85 | if len(datas) == 0: 86 | raise Exception("file data not found.") 87 | return datas 88 | 89 | @staticmethod 90 | def parse_json_file(key, file_path): 91 | # 检查这个key对应的数据是否已经加载 92 | if key in CommonUtil._data_cache: 93 | return CommonUtil._data_cache[key] 94 | 95 | # 检查文件是否存在 96 | if not os.path.exists(file_path): 97 | logging.error(f"File '{file_path}' not found.") 98 | exit(1) 99 | 100 | # 加载文件数据 101 | with open(file_path, 'r', encoding='utf-8') as file: 102 | CommonUtil._data_cache[key] = json.load(file) 103 | 104 | return CommonUtil._data_cache[key] 105 | 106 | @staticmethod 107 | def login(proxy, trak_id: str, private_key: str, chain_url: str): 108 | logging.info(f'>>>>> 开始登录 privateKey in : {private_key}') 109 | 110 | # 获取unique_text 111 | time_resp = proxy.get(trak_id=trak_id, url="https://worldtimeapi.org/api/timezone/etc/UTC") 112 | if time_resp.status_code in [200, 201]: 113 | unique_text = f'{time_resp.json()["unixtime"]}000' 114 | else: 115 | raise Exception("get unique_text fail") 116 | message_text = f"Hello! Please sign this message to confirm your ownership of the address. This action will not cost any gas fee. Here is a unique text: {unique_text}" 117 | 118 | # get signature_hex 119 | web3 = Web3(Web3.HTTPProvider(chain_url)) 120 | message = encode_defunct(text=message_text) 121 | account = web3.eth.account 122 | signed_message = account.sign_message(message, private_key=private_key) 123 | 124 | # signature_hex and address 125 | signature_hex = signed_message.signature.hex() 126 | address = account.from_key(private_key).address 127 | 128 | # step2. get accessToken 129 | base_headers = { 130 | 'Content-Type': 'application/json', 131 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', 132 | 'X-App-Id': 'carv', 133 | 'Origin': 'https://protocol.carv.io', 134 | 'authority': 'interface.carv.io', 135 | 'Referer': 'https://protocol.carv.io/' 136 | } 137 | access_token_data = { 138 | "signature": signature_hex, 139 | 'wallet_addr': address, 140 | 'text': message_text 141 | } 142 | auth_response = proxy.post('https://interface.carv.io/protocol/login', 143 | data=json.dumps(access_token_data), 144 | headers=base_headers) 145 | if auth_response.ok: 146 | auth_response_data = auth_response.json() 147 | if auth_response_data['code'] == 0: 148 | token = auth_response_data['data']['token'] 149 | authorization = base64.b64encode(f"eoa:{token}".encode('utf-8')).decode('utf-8') 150 | base_headers["Authorization"] = "bearer " + authorization 151 | return [address, base_headers] 152 | else: 153 | raise Exception(f"login fail, resp: {auth_response_data}") 154 | 155 | @staticmethod 156 | def login_with_retry(proxy, trak_id: str = None, private_key: str = None, 157 | chain_url: str = None, max_retries=3, retry_delay=5): 158 | retries = 0 159 | while retries < max_retries: 160 | try: 161 | # 尝试执行 login 方法 162 | return CommonUtil().login(proxy, trak_id, private_key, chain_url) 163 | except Exception as e: 164 | retries += 1 165 | logging.error(f"Login attempt {retries} failed with error: {e}") 166 | if retries < max_retries: 167 | # 在重试之前等待一段时间 168 | time.sleep(retry_delay) 169 | else: 170 | # 所有重试尝试都失败,抛出最后一个异常 171 | raise 172 | 173 | 174 | class ProxyPoolManager: 175 | def __init__(self): 176 | """ 177 | 178 | :rtype: object 179 | """ 180 | proxy_list = [] 181 | file_path = os.path.join(curPath, 'carv', 'common', 'resources', 'socks5_proxys.txt') 182 | tmp_proxies = CommonUtil().parse_txt_file(file_path) 183 | # 解析成正确结构 184 | for proxy_str in tmp_proxies: 185 | arr = proxy_str.split('|') 186 | if len(arr) < 4: 187 | continue 188 | ip = arr[0] 189 | port = arr[1] 190 | username = arr[2] 191 | pwd = arr[3] 192 | proxy_list.append(f'socks5://{username}:{pwd}@{ip}:{port}') 193 | # 先打乱代理列表 194 | random.shuffle(proxy_list) 195 | # 然后创建迭代器 196 | self.proxy_pool = itertools.cycle(proxy_list) 197 | # 存储会话 198 | self.sessions = {} 199 | self.proxy_list = proxy_list 200 | 201 | """ 该方法会使用不同代理来发送请求 """ 202 | 203 | def get_proxy(self): 204 | return next(self.proxy_pool) # 获取下一个代理 205 | 206 | def exec(self, url, method, data=None, headers=None): 207 | proxy = self.get_proxy() 208 | logging.info(f'请求:{url} 使用代理: {proxy}') 209 | proxies = {'http': proxy, 'https': proxy} 210 | try: 211 | if method == "get": 212 | return requests.get(url, data=data, headers=headers, proxies=proxies) 213 | if method == "post": 214 | return requests.post(url, data=data, headers=headers, proxies=proxies) 215 | if method == "put": 216 | return requests.put(url, data=data, headers=headers, proxies=proxies) 217 | if method == "delete": 218 | return requests.delete(url, data=data, headers=headers, proxies=proxies) 219 | except requests.exceptions.ProxyError as e: 220 | logging.info(f'代理错误: {e}') 221 | return None 222 | 223 | """ 该方法会使用同一个代理来发送请求 """ 224 | 225 | def session_exec(self, trak_id, url, method, data=None, headers=None): 226 | # 获取或创建session 227 | session = self.get_session(trak_id) 228 | logging.info(f'任务ID: {trak_id} 请求:{url} 使用代理: {session.proxies["http"]}') 229 | try: 230 | if method == "get": 231 | return session.get(url, data=data, headers=headers) 232 | if method == "post": 233 | return session.post(url, data=data, headers=headers) 234 | if method == "put": 235 | return session.put(url, data=data, headers=headers) 236 | if method == "delete": 237 | return session.delete(url, data=data, headers=headers) 238 | except requests.exceptions.ProxyError as e: 239 | logging.info(f'任务ID: {trak_id} 代理错误: {e}') 240 | return None 241 | 242 | def get_session(self, tark_id): 243 | if tark_id is None: 244 | raise Exception("tark_id cannot be None") 245 | if tark_id not in self.sessions: 246 | # 如果tark_id不存在,创建一个新的session与之关联 247 | session = requests.Session() 248 | proxy = self.get_proxy() 249 | proxies = {'http': proxy, 'https': proxy} 250 | logging.debug(f">>> 请求执行代理为: {proxy}") 251 | session.proxies.update(proxies) 252 | self.sessions[tark_id] = session 253 | return self.sessions[tark_id] 254 | 255 | def get(self, url, trak_id=None, data=None, headers=None): 256 | if trak_id is None: 257 | return self.exec(url, "get", data, headers) 258 | return self.session_exec(trak_id, url, "get", data, headers) 259 | 260 | def post(self, url, trak_id=None, data=None, headers=None): 261 | if trak_id is None: 262 | return self.exec(url, "post", data, headers) 263 | return self.session_exec(trak_id, url, "post", data, headers) 264 | 265 | def put(self, url, trak_id=None, data=None, headers=None): 266 | if trak_id is None: 267 | return self.exec(url, "put", data, headers) 268 | return self.session_exec(trak_id, url, "put", data, headers) 269 | 270 | def delete(self, url, trak_id=None, data=None, headers=None): 271 | if trak_id is None: 272 | return self.session_exec(trak_id, url, "delete", data, headers) 273 | return self.exec(url, "delete", data, headers) 274 | 275 | 276 | if __name__ == '__main__': 277 | pass 278 | -------------------------------------------------------------------------------- /carv/common/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/common/resources/__init__.py -------------------------------------------------------------------------------- /carv/common/resources/checkin_private_keys.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/common/resources/checkin_private_keys.txt -------------------------------------------------------------------------------- /carv/common/resources/claim_private_keys.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/common/resources/claim_private_keys.txt -------------------------------------------------------------------------------- /carv/common/resources/invite_config.yaml: -------------------------------------------------------------------------------- 1 | # 邀请用户配置 2 | invite: 3 | infos: 4 | - 5 | invite_private_key: xx # 邀请人私钥 6 | # 接受邀请人私钥,建议20个,因为邀请上限是20,多了好像领取不到多的邀请分了 7 | accept_private_keys: 8 | - xx 9 | - xx 10 | - xx -------------------------------------------------------------------------------- /carv/common/resources/socks5_proxys.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/carv/common/resources/socks5_proxys.txt -------------------------------------------------------------------------------- /carv/requirements.txt: -------------------------------------------------------------------------------- 1 | web3==6.13.0 2 | eth-account==0.10.0 3 | requests==2.31.0 4 | setuptools==69.0.3 5 | PySocks==1.7.1 6 | pyyaml==6.0.1 -------------------------------------------------------------------------------- /grass/README.md: -------------------------------------------------------------------------------- 1 | # grassScript 2 | 3 | forked from : https://github.com/ymmmmmmmm/getgrass_bot 4 | 5 | 优化了一下使用流程,增加了多socks5与多uid的支持 6 | 7 | ## 1:安装依赖 8 | `pip3 install -r requirements.txt` 9 | 10 | ## 2:修改配置文件 11 | 12 | s_config_yaml (智能模式) 13 | config.yaml (指定模式) 14 | 15 | ## 3:启动 16 | - 主从模式,配置好master_slave_config.yaml文件,run:python master_slave_exec.py 17 | 18 | ## 邀请链接 19 | 20 | https://app.getgrass.io/register/?referralCode=59lUb0S1wUNZwZG 21 | 22 | ## user_id获取方法 23 | 24 | ### 方法一: 25 | 1.打开链接登录https://app.getgrass.io/dashboard 26 | 27 | 2.页面按F12打开控制台 输入代码 28 | 29 | `console.log(localStorage.getItem('userId'))` 30 | 31 | 打印的就是当前用户的user_id 32 | 33 | ![0001](https://github.com/ymmmmmmmm/getgrass_bot/assets/51306299/31d0e16e-df2f-443a-a141-910d16052ed9) 34 | 35 | ### 方法二 36 | 修改login.py中的username,password,运行login.py, 会打印出当前账号uid 37 | 38 | 39 | ## Feature 40 | 41 | todo 42 | 43 | -------------------------------------------------------------------------------- /grass/common/login.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import logging 4 | 5 | logging.basicConfig(level=logging.INFO) 6 | 7 | 8 | class LoginManager: 9 | def __init__(self, url='https://api.getgrass.io/auth/login'): 10 | self.log_url = url 11 | self.uid = None 12 | self.username = None 13 | self.password = None 14 | 15 | def do_login(self, username, password): 16 | body = { 17 | "user": username, 18 | "password": password 19 | } 20 | headers = { 21 | 'Content-Type': 'application/json', 22 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', 23 | 'Origin': 'https://app.getgrass.io' 24 | } 25 | try: 26 | login_response = requests.post(self.log_url, data=json.dumps(body), headers=headers) 27 | login_info = login_response.json() 28 | logging.info(f"login info: {login_info}") 29 | if login_response.status_code in [200, 201] and login_info["status"] == "success": 30 | logging.info("login successful") 31 | self.username = username 32 | self.uid = login_info["data"]["id"] 33 | else: 34 | logging.error("login failed") 35 | except requests.RequestException as e: 36 | logging.error(f"An error occurred while trying to login: {e}") 37 | 38 | 39 | if __name__ == '__main__': 40 | login = LoginManager() 41 | login.do_login('', '') 42 | print(login.uid) 43 | -------------------------------------------------------------------------------- /grass/masterslave/connect_to_wss_util.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import asyncio 4 | import random 5 | import ssl 6 | import json 7 | import uuid 8 | import time 9 | 10 | from loguru import logger 11 | from websockets_proxy import Proxy, proxy_connect 12 | 13 | 14 | async def send_ping(websocket, device_id, user_id, socks5_proxy): 15 | while True: 16 | try: 17 | send_message = json.dumps({ 18 | "device_id": device_id, 19 | "uid": user_id, 20 | "id": str(uuid.uuid4()), 21 | "version": "1.0.0", 22 | "action": "PING", 23 | "data": {} 24 | }) 25 | logger.info(f"send_ping : {send_message}") 26 | await websocket.send(send_message) 27 | await asyncio.sleep(20) 28 | except asyncio.CancelledError: 29 | # 如果任务被取消,退出循环 30 | break 31 | except Exception as e: 32 | logger.error(f"send_ping 中发生错误: {e}, 代理:{socks5_proxy}, uid: {user_id}", exc_info=True) 33 | # 如果WebSocket连接关闭,则退出函数,以便重新连接 34 | if e.__class__.__name__ == 'ConnectionClosedError': 35 | logger.error("ConnectionClosedError") 36 | await websocket.close() 37 | break 38 | await asyncio.sleep(3) 39 | 40 | 41 | async def send_pong(device_id, message, user_id, websocket): 42 | await websocket.send(json.dumps({"device_id": device_id, "uid": user_id, "id": message["id"], 43 | "origin_action": "PONG"})) 44 | 45 | 46 | async def connect_to_wss(socks5_proxy, user_id): 47 | device_id = str(uuid.uuid3(uuid.NAMESPACE_DNS, socks5_proxy + ":" + user_id)) 48 | logger.info(f"Load uid: {user_id}, proxy: {socks5_proxy}, device_id : {device_id}") 49 | 50 | while True: 51 | try: 52 | await asyncio.sleep(random.randint(1, 10) / 10) 53 | custom_headers = { 54 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" 55 | } 56 | ssl_context = ssl.create_default_context() 57 | ssl_context.check_hostname = False 58 | ssl_context.verify_mode = ssl.CERT_NONE 59 | uri = "wss://proxy.wynd.network:4650/" 60 | server_hostname = "proxy.wynd.network" 61 | proxy = Proxy.from_url(socks5_proxy) 62 | async with proxy_connect(uri, proxy=proxy, ssl=ssl_context, server_hostname=server_hostname, 63 | extra_headers=custom_headers) as websocket: 64 | task = asyncio.create_task(send_ping(websocket, device_id, user_id, socks5_proxy)) 65 | while True: 66 | response = await websocket.recv() 67 | message = json.loads(response) 68 | logger.debug(f"接收到服务端消息 -> message: {message}") 69 | if message.get("action") == "AUTH": 70 | 71 | await send_auth(custom_headers, device_id, message, user_id, websocket) 72 | 73 | elif message.get("action") == "PONG": 74 | 75 | await send_pong(device_id, message, user_id, websocket) 76 | 77 | await ping_task 78 | 79 | except Exception as e: 80 | logger.error(f"An error occurred: {e}, proxy: {socks5_proxy}, uid: {user_id}", exc_info=True) 81 | finally: 82 | await asyncio.sleep(5) # 发生异常后5秒重试 83 | 84 | 85 | async def send_auth(custom_headers, device_id, message, user_id, websocket): 86 | await websocket.send(json.dumps({ 87 | "id": message["id"], 88 | "origin_action": "AUTH", 89 | "result": { 90 | "browser_id": device_id, 91 | "user_id": user_id, 92 | "user_agent": custom_headers['User-Agent'], 93 | "timestamp": int(time.time()), 94 | "device_type": "extension", 95 | "version": "2.5.0" 96 | } 97 | })) 98 | -------------------------------------------------------------------------------- /grass/masterslave/master_slave_actuator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | import asyncio 5 | 6 | from connect_to_wss_util import connect_to_wss 7 | from master_slave_config import MasterSlaveConfigManager 8 | 9 | 10 | def do_exec(config): 11 | socks5_proxys = config.socks5_proxys 12 | user_ids = config.user_ids 13 | 14 | # check 15 | if not socks5_proxys or not user_ids: 16 | raise ValueError("socks5_proxys or user_ids len is zero") 17 | 18 | tasks = [] 19 | # 确定较大和较小的列表 20 | larger, smaller = (socks5_proxys, user_ids) if len(socks5_proxys) > len(user_ids) else (user_ids, socks5_proxys) 21 | multiple = len(larger) // len(smaller) 22 | remainder = len(larger) % len(smaller) 23 | 24 | def distribute_items(larger_list, smaller_list): 25 | dict = {} 26 | start_idx = 0 27 | for s_item in smaller_list: 28 | assigned_items = larger_list[start_idx:start_idx + multiple] 29 | dict[s_item] = assigned_items 30 | start_idx += multiple 31 | # 处理余数的情况 32 | for i in range(remainder): 33 | dict[smaller_list[i]].append(larger_list[start_idx + i]) 34 | return dict 35 | 36 | mapping = distribute_items(larger, smaller) 37 | 38 | # 创建代理任务 39 | if len(socks5_proxys) > len(user_ids): 40 | # 代理数量超过用户ID数量的情况 41 | for user_id, proxy_list in mapping.items(): 42 | for proxy in proxy_list: 43 | tasks.append(asyncio.create_task(connect_to_wss(proxy, user_id))) 44 | else: 45 | # 用户ID数量超过代理的情况 46 | for proxy, user_id_list in mapping.items(): 47 | for user_id in user_id_list: 48 | tasks.append(asyncio.create_task(connect_to_wss(proxy, user_id))) 49 | 50 | return tasks 51 | 52 | 53 | async def execute_tasks(config): 54 | tasks = do_exec(config) 55 | await asyncio.gather(*tasks) 56 | 57 | 58 | async def main(): 59 | await execute_tasks(MasterSlaveConfigManager.get_config("slave")) 60 | 61 | 62 | if __name__ == '__main__': 63 | asyncio.run(main()) 64 | -------------------------------------------------------------------------------- /grass/masterslave/master_slave_config.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import threading 3 | 4 | 5 | class MaterSlaveConfig: 6 | def __init__(self, real_config): 7 | self.socks5_proxys = [] 8 | self.user_ids = [] 9 | self.parse(real_config) 10 | 11 | def parse(self, real_config): 12 | self.user_ids = real_config.get('user_ids', []) 13 | simple_arrays = real_config.get('socks5_proxys_simple', []) 14 | tmp_socks5_proxys = [] 15 | if simple_arrays: 16 | for proxy_str in simple_arrays: 17 | arr = proxy_str.split('|') 18 | if len(arr) < 4: 19 | continue 20 | ip = arr[0] 21 | port = arr[1] 22 | username = arr[2] 23 | pwd = arr[3] 24 | tmp_socks5_proxys.append(f'socks5://{username}:{pwd}@{ip}:{port}') 25 | self.socks5_proxys = tmp_socks5_proxys 26 | else: 27 | self.socks5_proxys = real_config.get('socks5_proxys', []) 28 | 29 | 30 | class MasterSlaveConfigManager: 31 | _configs = {} 32 | _lock = threading.Lock() 33 | 34 | @classmethod 35 | def get_config(cls, _type): 36 | with cls._lock: 37 | if _type not in cls._configs: 38 | try: 39 | with open('master_slave_config.yaml', 'r', encoding='utf-8') as file: 40 | config = yaml.safe_load(file) 41 | real_config = config.get(_type) 42 | if real_config is None: 43 | raise ValueError("_type not found") 44 | cls._configs[_type] = MaterSlaveConfig(real_config) 45 | except FileNotFoundError: 46 | print("配置文件未找到") 47 | raise 48 | except yaml.YAMLError as exc: 49 | print("加载配置文件发生错误:", exc) 50 | raise 51 | return cls._configs[_type] 52 | 53 | 54 | # 使用示例 55 | if __name__ == '__main__': 56 | def thread_function(_type): 57 | try: 58 | config = MasterSlaveConfigManager.get_config(_type) 59 | print(f"Config for {_type}: {config}") 60 | except Exception as e: 61 | print(f"Error in thread {_type}: {e}") 62 | 63 | 64 | thread1 = threading.Thread(target=thread_function, args=('one_to_one',)) 65 | thread2 = threading.Thread(target=thread_function, args=('many_to_many',)) 66 | thread3 = threading.Thread(target=thread_function, args=('many_to_many',)) 67 | 68 | thread1.start() 69 | thread2.start() 70 | thread3.start() 71 | 72 | thread1.join() 73 | thread2.join() 74 | thread3.join() 75 | -------------------------------------------------------------------------------- /grass/masterslave/master_slave_config.yaml: -------------------------------------------------------------------------------- 1 | 2 | # 小号配置,会根据uid与proxy均匀绑定代理与uid,最好是uid=proxy 3 | slave: 4 | # 你的uid 5 | user_ids: 6 | 7 | # 你的代理 8 | socks5_proxys_simple: 9 | 10 | # 主号配置,主号会均匀分配配置的代理地址 11 | master: 12 | # 你的uid 13 | user_ids: 14 | 15 | # 你的代理 16 | socks5_proxys_simple: 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /grass/masterslave/master_slave_exec.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from master_slave_actuator import execute_tasks 4 | from master_slave_config import MasterSlaveConfigManager 5 | 6 | 7 | async def exec_master(): 8 | print("=========== executor master =============") 9 | await execute_tasks(MasterSlaveConfigManager.get_config("master")) 10 | 11 | 12 | async def exec_slave(): 13 | print("=========== executor slave =============") 14 | await execute_tasks(MasterSlaveConfigManager.get_config("slave")) 15 | 16 | 17 | async def process(): 18 | tasks = [asyncio.ensure_future(exec_master()), asyncio.ensure_future(exec_slave())] 19 | await asyncio.gather(*tasks) 20 | 21 | 22 | if __name__ == '__main__': 23 | asyncio.run(process()) 24 | -------------------------------------------------------------------------------- /grass/register/register_config.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/grass/register/register_config.yaml -------------------------------------------------------------------------------- /grass/requirements.txt: -------------------------------------------------------------------------------- 1 | loguru==0.7.2 2 | websockets_proxy==0.1.0 3 | websockets==12.0 4 | PyYAML==6.0.1 5 | async_timeout==4.0.3 6 | -------------------------------------------------------------------------------- /opi/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ip|port|username|password" 3 | } -------------------------------------------------------------------------------- /opi/config.txt: -------------------------------------------------------------------------------- 1 | opi上报名称 -------------------------------------------------------------------------------- /opi/proxy_report_opi.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import requests 4 | from apscheduler.schedulers.blocking import BlockingScheduler 5 | from loguru import logger 6 | 7 | 8 | 9 | def get_proxyip(proxy_str: str): 10 | arr = proxy_str.split('|') 11 | if len(arr) < 4: 12 | raise Exception("> 代理格式错误, 请检查格式是否为:ip|port|username|pwd") 13 | 14 | ip = arr[0] 15 | port = arr[1] 16 | username = arr[2] 17 | pwd = arr[3] 18 | 19 | proxy = f"socks5://{username}:{pwd}@{ip}:{port}" 20 | 21 | result = { 22 | "https": proxy, 23 | "http:": proxy 24 | } 25 | return result 26 | 27 | 28 | def report(): 29 | logger.debug("================================= 开始上报OPI数据 ================================ ") 30 | with open('./config.json', 'r', encoding='utf-8') as f: 31 | data = json.load(f) 32 | logger.debug(f"> 等待上报数据数量: {len(data)}") 33 | for name, proxyip in data.items(): 34 | proxy = get_proxyip(proxyip) 35 | logger.debug(f"> 开始上报 name: {name}, 代理: {proxy}") 36 | do_report(name, proxy) 37 | logger.debug(f"> 上报完成 name: {name}, IP: {proxy}") 38 | 39 | logger.debug("================================= 结束上报OPI数据 ================================ ") 40 | 41 | 42 | def do_report(name, proxies): 43 | try: 44 | last_report_data = None 45 | # 获取区块上报信息 46 | last_reports_resp = requests.get(url="https://opi.network/api/get_last_reports", proxies=proxies) 47 | if last_reports_resp.ok: 48 | last_reports_json = last_reports_resp.json() 49 | if last_reports_json["error"] is False: 50 | last_report_data = last_reports_json.get("data")[0].get("report") 51 | 52 | # 替换name 53 | last_report_data["name"] = name 54 | r = requests.post(url="https://api.opi.network/report_block", json=last_report_data, proxies=proxies) 55 | if r.status_code == 200: 56 | logger.debug("> 上报成功!") 57 | return 58 | else: 59 | print("上报发生错误, status code: " + str(r.status_code)) 60 | except Exception as e: 61 | logger.error(f"> 上报发生错误: {e}, name: {name}, IP: {proxies}") 62 | 63 | 64 | def scheduler_report(): 65 | logger.debug( 66 | "================================= 启动上报OPI调度任务,将会在每10分钟执行一次 ================================ ") 67 | scheduler = BlockingScheduler() 68 | scheduler.add_job(report, 'cron', minute='*/10') 69 | scheduler.start() 70 | 71 | 72 | if __name__ == '__main__': 73 | scheduler_report() 74 | -------------------------------------------------------------------------------- /opi/report_opi.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | import requests 5 | from apscheduler.schedulers.blocking import BlockingScheduler 6 | from loguru import logger 7 | 8 | REPORT_NAME = "asdfund00" 9 | 10 | 11 | def get_proxyip(proxy_str: str): 12 | arr = proxy_str.split('|') 13 | if len(arr) < 4: 14 | raise Exception("> 代理格式错误, 请检查格式是否为:ip|port|username|pwd") 15 | 16 | ip = arr[0] 17 | port = arr[1] 18 | username = arr[2] 19 | pwd = arr[3] 20 | 21 | proxy = f"socks5://{username}:{pwd}@{ip}:{port}" 22 | 23 | result = { 24 | "https": proxy, 25 | "http:": proxy 26 | } 27 | return result 28 | 29 | 30 | def parse_txt_file(file_path): 31 | if not os.path.exists(file_path): 32 | logger.error(f"file '{file_path}' not found.") 33 | exit(1) 34 | with open(file_path, 'r', encoding='utf-8') as file: 35 | datas = file.readlines() 36 | 37 | datas = [data.strip() for data in datas if data.strip()] 38 | if len(datas) == 0: 39 | raise Exception("file data not found.") 40 | return datas 41 | 42 | 43 | def report(): 44 | logger.debug("================================= 开始上报OPI数据 ================================ ") 45 | names = parse_txt_file("./config.txt") 46 | for name in names: 47 | logger.debug(f"> 开始上报 name: {name}") 48 | do_report(name) 49 | logger.debug(f"> 上报完成 name: {name}") 50 | 51 | logger.debug("================================= 结束上报OPI数据 ================================ ") 52 | 53 | 54 | def do_report(name): 55 | try: 56 | last_report_data = None 57 | # 获取区块上报信息 58 | last_reports_resp = requests.get(url="https://opi.network/api/get_last_reports") 59 | if last_reports_resp.ok: 60 | last_reports_json = last_reports_resp.json() 61 | if last_reports_json["error"] is False: 62 | last_report_data = last_reports_json.get("data")[0].get("report") 63 | 64 | # 替换name 65 | last_report_data["name"] = name 66 | r = requests.post(url="https://api.opi.network/report_block", json=last_report_data) 67 | if r.status_code == 200: 68 | logger.debug("> 上报成功!") 69 | return 70 | else: 71 | print("上报发生错误, status code: " + str(r.status_code)) 72 | except Exception as e: 73 | logger.error(f"> 上报发生错误: {e}, name: {name}") 74 | 75 | 76 | def scheduler_report(): 77 | logger.debug( 78 | "================================= 启动上报OPI调度任务,将会在每10分钟执行一次 ================================ ") 79 | scheduler = BlockingScheduler() 80 | scheduler.add_job(report, 'cron', minute='*/10') 81 | scheduler.start() 82 | 83 | 84 | if __name__ == '__main__': 85 | scheduler_report() 86 | -------------------------------------------------------------------------------- /opi/requirements.txt: -------------------------------------------------------------------------------- 1 | requests==2.31.0 2 | loguru>=0.7.0 3 | apscheduler -------------------------------------------------------------------------------- /qna3/README.md: -------------------------------------------------------------------------------- 1 | 2 | # 先决条件 3 | 4 | ### 签到 5 | 需要opbnb链上有bnb,不用太多,一两刀就行 6 | ### 领取积分 7 | 需要bsc链上有bnb 8 | 9 | # 安装依赖 10 | pip3 install -r requirements.txt 11 | 12 | # 运行 13 | 需要先进行配置: 14 | 15 | 1: checkin_private_keys.txt - 签到私钥 16 | 17 | 2: claim_private_keys.txt - 领取积分的私钥 18 | 19 | 3: socks5_proxys.txt - 代理配置,格式: 20 | ``` 21 | ip|port|username|password 22 | ip|port|username|password 23 | ``` 24 | 25 | 4: 配置invite_config.yaml(注意配置私钥得时候用引号引起来,否则会被当成数字解析) 邀请人,如果需要的话。 26 | ## 签到 27 | 28 | python checkin_executor.py 29 | 30 | ## 领取积分 31 | 32 | python claim_executor.py 33 | 34 | ## 邀请人 35 | python invite_manager.py 36 | 37 | # 查看结果 38 | 去浏览器上看看成功没 39 | 40 | 41 | # Feature 42 | - [x] 签到 43 | - [x] 领取积分 44 | - [x] 邀请注册 45 | - [ ] 点赞回答 -------------------------------------------------------------------------------- /qna3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/__init__.py -------------------------------------------------------------------------------- /qna3/checkin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/checkin/__init__.py -------------------------------------------------------------------------------- /qna3/checkin/check_in.py: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | # 将根目录加入sys.path中,解决命令行找不到包的问题 3 | import sys 4 | import os 5 | 6 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 7 | sys.path.append(curPath) 8 | ######################################################### 9 | from web3 import Web3 10 | import json 11 | import logging 12 | from qna3.common import qna3_util 13 | from qna3.common.proxy_manager import ProxyPoolManager 14 | from qna3.common.qna3_util import get_base_info 15 | from qna3.common.re_captcha_parser import ReCaptchaParser 16 | 17 | logging.basicConfig(level=logging.INFO) 18 | 19 | CONTRACT = "0xb342e7d33b806544609370271a8d074313b7bc30" 20 | INPUT_DATA = "0xe95a644f0000000000000000000000000000000000000000000000000000000000000001" 21 | 22 | 23 | def retry_check_in(proxy_manager: ProxyPoolManager = None, captcha_parser: ReCaptchaParser = None, trak_id: str = None, 24 | private_key: str = None): 25 | return qna3_util.retry_operation_with_logging( 26 | function=do_check_in, 27 | proxy_manager=proxy_manager, 28 | trak_id=trak_id, 29 | private_key=private_key, 30 | captcha_parser=captcha_parser 31 | ) 32 | 33 | 34 | # 签到 35 | def do_check_in(proxy_manager: ProxyPoolManager, captcha_parser: ReCaptchaParser, trak_id: str, private_key: str): 36 | logging.info(f'======================= start check in privateKey in : {private_key} ============================') 37 | 38 | # 获取登录的人机验证token 39 | login_recaptcha = get_captcha_parser(captcha_parser) 40 | # step1,构造公共请求头 41 | web3 = Web3(Web3.WebsocketProvider('wss://opbnb.publicnode.com')) 42 | address, headers, _ = get_base_info(proxy_manager=proxy_manager, trak_id=trak_id, 43 | private_key=private_key, recaptcha=login_recaptcha) 44 | 45 | # step2 检查是否已经签到,如果已经签到就不用再次签到了 46 | checked = is_checkin(proxy_manager=proxy_manager, trak_id=trak_id, private_key=private_key, headers=headers) 47 | if checked: 48 | return address, "", private_key 49 | 50 | # 签到之前需要先调用验证接口(签到前置检查) 51 | checkin_verify(headers, proxy_manager) 52 | 53 | # step3. 交互合约签到,返回txId 54 | chain_id = web3.eth.chain_id 55 | nonce = web3.eth.get_transaction_count(Web3.to_checksum_address(address)) 56 | 57 | # 签到的检查,需要调用validate接口(谷歌图形验证) 58 | checkin_recaptcha = validate_checkin(captcha_parser, headers, proxy_manager) 59 | 60 | tx_hash_id = qna3_util.exec_tx(address, CONTRACT, INPUT_DATA, nonce, chain_id, private_key, web3) 61 | # step4. 上报得分 62 | report_point(proxy_manager=proxy_manager, trak_id=trak_id, headers=headers, tx_hash_id=tx_hash_id, 63 | private_key=private_key, checkin_recaptcha=checkin_recaptcha) 64 | 65 | return [address, tx_hash_id, private_key] 66 | 67 | 68 | def checkin_verify(headers, proxy_manager): 69 | checkin_verify_resp = proxy_manager.post(url='https://api.qna3.ai/api/v2/my/check-in/verify', headers=headers, 70 | data=json.dumps({"via": "opbnb"})) 71 | if not checkin_verify_resp.ok or checkin_verify_resp.json()['data']['action'] != 'passed': 72 | err_msg = f'> 调用 check-in verify 接口失败..., resp: {checkin_verify_resp.json()}' 73 | logging.error(err_msg) 74 | raise Exception(err_msg) 75 | 76 | 77 | def get_captcha_parser(captcha_parser): 78 | login_recaptcha = None 79 | if captcha_parser.enable: 80 | login_recaptcha = captcha_parser.get_captcha_token("login") 81 | return login_recaptcha 82 | 83 | 84 | def validate_checkin(captcha_parser, headers, proxy_manager): 85 | checkin_recaptcha = None 86 | if captcha_parser.enable: 87 | checkin_recaptcha = captcha_parser.get_captcha_token("checkin") 88 | checkin_validate_resp = proxy_manager.post(url=f'https://api.qna3.ai/api/v2/my/validate', headers=headers, 89 | data=json.dumps({ 90 | "action": "checkin", 91 | "recaptcha": checkin_recaptcha 92 | })) 93 | if not checkin_validate_resp.ok: 94 | raise Exception(f"> 请求检查签到接口失败... resp: {checkin_validate_resp.text}") 95 | checkin_status_code = checkin_validate_resp.json().get("statusCode") 96 | if checkin_status_code != 200: 97 | raise Exception(f"> 验证签到接口失败... resp: {checkin_validate_resp.text}") 98 | return checkin_recaptcha 99 | 100 | 101 | def is_checkin(proxy_manager: ProxyPoolManager, trak_id: str, private_key: str, headers: dict) -> bool: 102 | query_checkin_body = { 103 | "query": "query loadUserDetail($cursored: CursoredRequestInput!) {\n userDetail {\n checkInStatus {\n checkInDays\n todayCount\n }\n credit\n creditHistories(cursored: $cursored) {\n cursorInfo {\n endCursor\n hasNextPage\n }\n items {\n claimed\n extra\n id\n score\n signDay\n signInId\n txHash\n typ\n }\n total\n }\n invitation {\n code\n inviteeCount\n leftCount\n }\n origin {\n email\n id\n internalAddress\n userWalletAddress\n }\n voteHistoryOfCurrentActivity {\n created_at\n query\n }\n ambassadorProgram {\n bonus\n claimed\n family {\n checkedInUsers\n totalUsers\n }\n }\n }\n}", 104 | "variables": { 105 | "cursored": { 106 | "after": "", 107 | "first": 20 108 | } 109 | } 110 | } 111 | query_checkin_resp = proxy_manager.post('https://api.qna3.ai/api/v2/graphql', 112 | trak_id, 113 | data=json.dumps(query_checkin_body), 114 | headers=headers) 115 | if query_checkin_resp.ok: 116 | today_check_in_count = query_checkin_resp.json().get("data").get("userDetail").get("checkInStatus").get( 117 | "todayCount") 118 | if today_check_in_count > 0: 119 | logging.info(f" >>>>>>>>>>>>>>>> 今日已经签到, 本次签到跳过 , private_key:{private_key} ") 120 | return True 121 | else: 122 | raise Exception(f"查询签到信息失败!private key : {private_key}") 123 | return False 124 | 125 | 126 | # 上报得分 127 | def report_point(proxy_manager: ProxyPoolManager, trak_id: str, headers: dict, tx_hash_id, private_key: str, 128 | checkin_recaptcha: str): 129 | body = { 130 | 'hash': tx_hash_id, 131 | 'via': 'opbnb' 132 | } 133 | if checkin_recaptcha: 134 | body['recaptcha']: checkin_recaptcha 135 | 136 | check_sign_response = proxy_manager.post('https://api.qna3.ai/api/v2/my/check-in', trak_id, data=json.dumps(body), 137 | headers=headers) 138 | if check_sign_response.ok: 139 | logging.info(f' >>>>>>>>签到成功, json : {check_sign_response.json()}') 140 | elif check_sign_response.status_code == 409 and 'already checked' in check_sign_response.json().get('message'): 141 | logging.info(f' >>>>>>>>签到成功, json : {check_sign_response.json()}') 142 | elif check_sign_response.status_code == 422 and 'already checked' in check_sign_response.json().get('message'): 143 | logging.info(f' >>>>>>>>签到成功, json : {check_sign_response.json()}') 144 | else: 145 | fail_msg = f' >>>>>>>> 签到失败, trak_id: {trak_id}, private_key:{private_key} response : {check_sign_response.json()}' 146 | logging.error(fail_msg) 147 | qna3_util.record_failure_to_json(private_key=private_key, fail_msg=fail_msg) 148 | -------------------------------------------------------------------------------- /qna3/checkin/checkin_executor.py: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | # 将根目录加入sys.path中,解决命令行找不到包的问题 3 | import sys 4 | import os 5 | 6 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 7 | sys.path.append(curPath) 8 | ######################################################### 9 | import asyncio 10 | import uuid 11 | from concurrent.futures import ThreadPoolExecutor 12 | import check_in 13 | import logging 14 | from qna3.common import qna3_util 15 | from qna3.common.proxy_manager import ProxyPoolManager 16 | from configparser import ConfigParser 17 | from qna3.common.re_captcha_parser import ReCaptchaParser 18 | 19 | logging.basicConfig(level=logging.DEBUG) 20 | 21 | executor = ThreadPoolExecutor() 22 | 23 | init_config = ConfigParser() 24 | init_config_path = '../resources/config.ini' 25 | init_config.read(init_config_path, encoding='utf-8') 26 | 27 | 28 | async def run_blocking_io(func, *args): 29 | loop = asyncio.get_event_loop() 30 | return await loop.run_in_executor(executor, func, *args) 31 | 32 | 33 | async def check_in_coroutine(semaphore, proxy_manager, private_key, captcha_parser): 34 | async with semaphore: 35 | trak_id = uuid.uuid4() 36 | try: 37 | logging.info(f"executing check in private key: '{private_key}'") 38 | # 在线程池中运行同步函数 39 | address, tx_hash_id, new_private_key = await run_blocking_io( 40 | check_in.retry_check_in, proxy_manager, captcha_parser, str(trak_id), private_key 41 | ) 42 | logging.info( 43 | "==================================== CHECK IN SUCCESS ===============================================") 44 | logging.info( 45 | f'CHECK IN SUCCESSFUL, ADDRESS: {address}, PRIVATE_KEY: {private_key}, TX_HASH_ID: {tx_hash_id}') 46 | logging.info( 47 | "==================================== CHECK IN SUCCESS ===============================================") 48 | except Exception as e: 49 | logging.error(f"> check in fail, private key is :{private_key}, exception is: {e}") 50 | 51 | 52 | async def main(): 53 | file_path = os.path.join(curPath, 'qna3', 'resources', 'checkin_private_keys.txt') 54 | abs_file_path = os.path.abspath(file_path) 55 | private_keys = qna3_util.parse_txt_file(abs_file_path) 56 | proxy_manager = ProxyPoolManager() 57 | captcha_parser = ReCaptchaParser() 58 | 59 | async_num = 2 60 | customer_async_num = init_config['checkin']['async_num'] 61 | if customer_async_num: 62 | async_num = int(customer_async_num) 63 | 64 | logging.info(f"> 即将开始执行签到,执行签到任务异步数量为:{async_num}") 65 | semaphore = asyncio.Semaphore(async_num) 66 | 67 | # 创建并启动协程列表 68 | tasks = [check_in_coroutine(semaphore=semaphore, proxy_manager=proxy_manager, private_key=private_key, 69 | captcha_parser=captcha_parser) for private_key in private_keys] 70 | await asyncio.gather(*tasks) 71 | 72 | logging.info(" ALL EXEC SUCCESSFUL !") 73 | 74 | 75 | if __name__ == '__main__': 76 | asyncio.run(main()) 77 | -------------------------------------------------------------------------------- /qna3/claim/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/claim/__init__.py -------------------------------------------------------------------------------- /qna3/claim/claim_executor.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import uuid 3 | ######################################################### 4 | #将根目录加入sys.path中,解决命令行找不到包的问题 5 | import sys 6 | import os 7 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 8 | sys.path.append(curPath) 9 | ######################################################### 10 | from qna3.claim import claim_point 11 | from qna3.common import qna3_util 12 | from qna3.common.proxy_manager import ProxyPoolManager 13 | 14 | logging.basicConfig(level=logging.DEBUG) 15 | 16 | 17 | file_path = os.path.join(curPath, 'qna3', 'resources', 'claim_private_keys.txt') 18 | abs_file_path = os.path.abspath(file_path) 19 | private_keys = qna3_util.parse_txt_file(abs_file_path) 20 | 21 | proxy_manager = ProxyPoolManager() 22 | for private_key in private_keys: 23 | trak_id = uuid.uuid4() 24 | logging.info(f"executing claim in private key: '{private_key}'") 25 | (address, tx_hash_id, new_private_key) = claim_point.claim_point(proxy_manager, str(trak_id), private_key) 26 | logging.info("==================================== CLAIM IN SUCCESS ===============================================") 27 | logging.info(" ") 28 | logging.info(" ") 29 | logging.info(" ") 30 | logging.info(f'CLAIM SUCCESSFUL, ADDRESS: {address}, PRIVATE_KEY: {private_key}, TX_HASH_ID: {tx_hash_id}') 31 | logging.info(" ") 32 | logging.info(" ") 33 | logging.info(" ") 34 | logging.info("==================================== CLAIM IN SUCCESS ===============================================") 35 | 36 | 37 | logging.info(" ALL EXEC IN SUCCESSFUL !") -------------------------------------------------------------------------------- /qna3/claim/claim_point.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | ######################################################### 4 | #将根目录加入sys.path中,解决命令行找不到包的问题 5 | import sys 6 | import os 7 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 8 | sys.path.append(curPath) 9 | ######################################################### 10 | from web3 import Web3 11 | 12 | from qna3.common import qna3_util 13 | from qna3.common.proxy_manager import ProxyPoolManager 14 | 15 | CONTRACT = "0xb342e7d33b806544609370271a8d074313b7bc30" 16 | 17 | 18 | def claim_point(proxy_manager: ProxyPoolManager, trak_id: str, private_key: str): 19 | # step1: 查询领取积分方法,查看我能领取多少分 20 | query_claim_point_url = "https://api.qna3.ai/api/v2/my/claim-all" 21 | address, headers, _ = qna3_util.get_base_info(proxy_manager, trak_id, private_key) 22 | query_claim_point_response = proxy_manager.post(url=query_claim_point_url, trak_id=trak_id, headers=headers, data=json.dumps({})) 23 | logging.info(f'query claim point successful, json : {query_claim_point_response.json()}') 24 | 25 | nonce = None 26 | amount = None 27 | history_id = None 28 | signature = None 29 | if query_claim_point_response.ok: 30 | claim_response_data = query_claim_point_response.json() 31 | status_code = claim_response_data['statusCode'] 32 | if status_code == 200: 33 | data = claim_response_data.get("data") 34 | if data: 35 | nonce = data['signature']['nonce'] 36 | signature = data['signature']['signature'] 37 | amount = data['amount'] 38 | history_id = data['history_id'] 39 | else: 40 | raise Exception("claim-all response is null, skip....") 41 | if nonce is None or amount is None or history_id is None or signature is None: 42 | logging.error("====== no points available for redemption were found,skip task!=======") 43 | return 44 | 45 | # step2:调用合约,领取积分 46 | # 签名数据 47 | web3 = Web3(Web3.HTTPProvider('https://bsc-mainnet.nodereal.io/v1/132d52c330424e7896bdc25a5d6ef5fc')) 48 | chain_id = web3.eth.chain_id 49 | input_data = qna3_util.check_and_reset_input_data(build_input_data(amount, nonce, signature)) 50 | tx_nonce = web3.eth.get_transaction_count(address) 51 | tx_id = qna3_util.exec_tx(address, CONTRACT, input_data, tx_nonce, chain_id, private_key, web3) 52 | 53 | # step3:上报合约领取积分成功 54 | report_claim_point_url = f"https://api.qna3.ai/api/v2/my/claim/{history_id}" 55 | body = { 56 | "hash": tx_id 57 | } 58 | report_claim_point_response = proxy_manager.put(url=report_claim_point_url, trak_id=trak_id, data=json.dumps(body), 59 | headers=headers) 60 | report_point_json = report_claim_point_response.json() 61 | logging.info(f'exec claim point successful, json : {report_point_json}') 62 | if report_claim_point_response.status_code in [200, 201]: 63 | if report_point_json["statusCode"] == 200: 64 | logging.info(f'claim point successful') 65 | else: 66 | raise Exception("claim point fail") 67 | return address, private_key, tx_id 68 | 69 | 70 | ### input_data的参数参考 71 | # 0x624f82f5 - functionName 72 | # 000000000000000000000000000000000000000000000000000000000000000e -金额(64位) 73 | # 00000000000000000000000000000000000000000000000000000000000f5836 - nonce(64位) 74 | # 0000000000000000000000000000000000000000000000000000000000000060 - 固定(64位) 75 | # 0000000000000000000000000000000000000000000000000000000000000041 - 固定(64位) 76 | # 9d7738b50579134ee69d7a578942dcd8bb5bd00f9d2359439bc7034d61c6c37a34f8f9d997303be9271e7b52f0953aa36dfb06c07dd44707a30c0c786f2033641c - 签名(128位) 77 | # 00000000000000000000000000000000000000000000000000000000000000 - 固定 78 | 79 | 80 | # 0x624f82f5 81 | # 00000000000000000000000000000000000000000000000000000000000000b3 82 | # 00000000000000000000000000000000000000000000000000000000001b3eba 83 | # 0000000000000000000000000000000000000000000000000000000000000060 84 | # 0000000000000000000000000000000000000000000000000000000000000041 85 | # eca77245f8b48b9d21ae304f6784e31b4107ce504be3d234d247fee4afb04274528ce93747c43c23aac0535bb347848ff8b45bf17983db28188e21b0c7f749231b 86 | # 00000000000000000000000000000000000000000000000000000000000000 87 | 88 | def build_input_data(amount, nonce, signature): 89 | return (f"0x624f82f5{f'{hex(amount)[2:].zfill(64)}'}" 90 | f"{f'{hex(nonce)[2:].zfill(64)}'}" 91 | f"0000000000000000000000000000000000000000000000000000000000000060" 92 | f"0000000000000000000000000000000000000000000000000000000000000041" 93 | f"{signature}" 94 | f"00000000000000000000000000000000000000000000000000000000000000") 95 | 96 | 97 | if __name__ == '__main__': 98 | print() 99 | -------------------------------------------------------------------------------- /qna3/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/common/__init__.py -------------------------------------------------------------------------------- /qna3/common/annotation.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /qna3/common/proxy_manager.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import random 3 | ######################################################### 4 | #将根目录加入sys.path中,解决命令行找不到包的问题 5 | import sys 6 | import os 7 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 8 | sys.path.append(curPath) 9 | ######################################################### 10 | 11 | import requests 12 | import logging 13 | 14 | from qna3.common import qna3_util 15 | 16 | logging.basicConfig(level=logging.INFO) 17 | 18 | """ 请求代理池 """ 19 | 20 | 21 | class ProxyPoolManager: 22 | def __init__(self): 23 | """ 24 | 25 | :rtype: object 26 | """ 27 | proxy_list = [] 28 | file_path = os.path.join(curPath, 'qna3', 'resources', 'socks5_proxys.txt') 29 | abs_file_path = os.path.abspath(file_path) 30 | tmp_proxies = qna3_util.parse_txt_file(abs_file_path) 31 | # 解析成正确结构 32 | for proxy_str in tmp_proxies: 33 | arr = proxy_str.split('|') 34 | if len(arr) < 4: 35 | continue 36 | ip = arr[0] 37 | port = arr[1] 38 | username = arr[2] 39 | pwd = arr[3] 40 | proxy_list.append(f'socks5://{username}:{pwd}@{ip}:{port}') 41 | # 先打乱代理列表 42 | random.shuffle(proxy_list) 43 | # 然后创建迭代器 44 | self.proxy_pool = itertools.cycle(proxy_list) 45 | # 存储会话 46 | self.sessions = {} 47 | 48 | """ 该方法会使用不同代理来发送请求 """ 49 | 50 | def get_proxy(self): 51 | return next(self.proxy_pool) # 获取下一个代理 52 | 53 | def exec(self, url, method, data=None, headers=None): 54 | proxy = self.get_proxy() 55 | logging.info(f'请求:{url} 使用代理: {proxy}') 56 | proxies = {'http': proxy, 'https': proxy} 57 | try: 58 | if method == "get": 59 | return requests.get(url, data=data, headers=headers, proxies=proxies) 60 | if method == "post": 61 | return requests.post(url, data=data, headers=headers, proxies=proxies) 62 | if method == "put": 63 | return requests.put(url, data=data, headers=headers, proxies=proxies) 64 | if method == "delete": 65 | return requests.delete(url, data=data, headers=headers, proxies=proxies) 66 | except requests.exceptions.ProxyError as e: 67 | logging.info(f'代理错误: {e}') 68 | return None 69 | 70 | """ 该方法会使用同一个代理来发送请求 """ 71 | 72 | def session_exec(self, trak_id, url, method, data=None, headers=None): 73 | # 获取或创建session 74 | session = self.get_session(trak_id) 75 | logging.info(f'任务ID: {trak_id} 请求:{url} 使用代理: {session.proxies["http"]}') 76 | try: 77 | if method == "get": 78 | return session.get(url, data=data, headers=headers) 79 | if method == "post": 80 | return session.post(url, data=data, headers=headers) 81 | if method == "put": 82 | return session.put(url, data=data, headers=headers) 83 | if method == "delete": 84 | return session.delete(url, data=data, headers=headers) 85 | except requests.exceptions.ProxyError as e: 86 | logging.info(f'任务ID: {trak_id} 代理错误: {e}') 87 | return None 88 | 89 | def get_session(self, tark_id): 90 | if tark_id is None: 91 | raise Exception("tark_id cannot be None") 92 | if tark_id not in self.sessions: 93 | # 如果tark_id不存在,创建一个新的session与之关联 94 | session = requests.Session() 95 | proxy = self.get_proxy() 96 | proxies = {'http': proxy, 'https': proxy} 97 | logging.debug(f">>> 请求执行代理为: {proxy}") 98 | session.proxies.update(proxies) 99 | self.sessions[tark_id] = session 100 | return self.sessions[tark_id] 101 | 102 | def get(self, url, trak_id=None, data=None, headers=None): 103 | if trak_id is None: 104 | return self.exec(url, "get", data, headers) 105 | return self.session_exec(trak_id, url, "get", data, headers) 106 | 107 | def post(self, url, trak_id=None, data=None, headers=None): 108 | if trak_id is None: 109 | return self.exec(url, "post", data, headers) 110 | return self.session_exec(trak_id, url, "post", data, headers) 111 | 112 | def put(self, url, trak_id=None, data=None, headers=None): 113 | if trak_id is None: 114 | return self.exec(url, "put", data, headers) 115 | return self.session_exec(trak_id, url, "put", data, headers) 116 | 117 | def delete(self, url, trak_id=None, data=None, headers=None): 118 | if trak_id is None: 119 | return self.session_exec(trak_id, url, "delete", data, headers) 120 | return self.exec(url, "delete", data, headers) 121 | 122 | if __name__ == '__main__': 123 | proxy_manager = ProxyPoolManager() 124 | for _ in range(3): 125 | response = proxy_manager.get(trak_id=1, url='https://www.baidu.com') 126 | if response: 127 | print(response.status_code) 128 | else: 129 | print("请求失败") 130 | -------------------------------------------------------------------------------- /qna3/common/qna3_util.py: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | # 将根目录加入sys.path中,解决命令行找不到包的问题 3 | import sys 4 | import os 5 | from datetime import datetime 6 | 7 | 8 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 9 | sys.path.append(curPath) 10 | ######################################################### 11 | from eth_account.messages import encode_defunct 12 | import json 13 | import logging 14 | import time 15 | 16 | from web3 import Web3 17 | 18 | from qna3.common.proxy_manager import ProxyPoolManager 19 | 20 | logging.basicConfig(level=logging.INFO) 21 | 22 | record_fail_file_path = f'./failures_{datetime.now().strftime("%Y-%m-%d")}.json' 23 | 24 | 25 | def get_base_info(proxy_manager: ProxyPoolManager = None, trak_id: str = None, private_key=None, 26 | invite_code: str = None, 27 | recaptcha: str = None): 28 | message = encode_defunct(text="AI + DYOR = Ultimate Answer to Unlock Web3 Universe") 29 | web3 = Web3(Web3.WebsocketProvider('wss://opbnb.publicnode.com')) 30 | account = web3.eth.account 31 | signed_message = account.sign_message(message, private_key=private_key) 32 | # signature_hex and address 33 | signature_hex = signed_message.signature.hex() 34 | address = account.from_key(private_key).address 35 | # step2. get accessToken 36 | access_token_data = { 37 | 'signature': signature_hex, 38 | 'wallet_address': address 39 | } 40 | # 如果有邀请码,就填充邀请码 41 | if invite_code: 42 | access_token_data['invite_code'] = invite_code 43 | # 是否需要人机验证码 44 | if recaptcha: 45 | access_token_data["recaptcha"] = recaptcha 46 | 47 | headers = { 48 | 'Content-Type': 'application/json; charset=utf-8', 49 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', 50 | 'authority': 'api.qna3.ai', 51 | 'Origin': 'https://qna3.ai' 52 | } 53 | access_token = '' 54 | user_id = '' 55 | auth_response = proxy_manager.post('https://api.qna3.ai/api/v2/auth/login?via=wallet', 56 | trak_id, 57 | data=json.dumps(access_token_data), 58 | headers=headers) 59 | if auth_response.ok: 60 | auth_response_data = auth_response.json() 61 | # logging.info(f'req auth successful, json : {auth_response_data}') 62 | access_token = auth_response_data['data']['accessToken'] 63 | user_id = auth_response_data['data']['user']['id'] 64 | invite_code = auth_response_data['data']['user']['invite_code'] 65 | # step3. get me info 66 | headers['Authorization'] = 'Bearer ' + access_token 67 | headers['X-Id'] = user_id 68 | return address, headers, invite_code 69 | 70 | 71 | # 执行签到合约交互 72 | def exec_tx(_from, contract, input_data, nonce, chain_id, private_key, web3): 73 | contract_address = Web3.to_checksum_address(contract) 74 | # 估计gas 75 | estimated_gas = web3.eth.estimate_gas({ 76 | 'from': _from, 77 | 'to': contract_address, 78 | 'data': input_data 79 | }) 80 | gas_limit = int(estimated_gas * 1.1) 81 | # gas_limit = estimated_gas 82 | logging.info(f'estimated gas: {estimated_gas}, with buffer: {gas_limit}') 83 | # 获取当前的gas价格 84 | gas_price = web3.eth.gas_price 85 | # logging.info(f'current gas price: {gas_price}') 86 | # 构造交易 87 | tx = { 88 | 'from': _from, 89 | 'to': contract_address, 90 | 'gas': gas_limit, 91 | 'gasPrice': gas_price, 92 | 'nonce': nonce, 93 | 'data': input_data, 94 | 'chainId': chain_id 95 | } 96 | # 签名交易 97 | signed_tx = web3.eth.account.sign_transaction(tx, private_key) 98 | # 发送交易 99 | tx_hash = web3.eth.send_raw_transaction(signed_tx.rawTransaction) 100 | receipt = None 101 | max_polling_attempts = 5 102 | delay_between_attempts = 3 103 | for attempt in range(max_polling_attempts): 104 | try: 105 | # 尝试获取交易收据 106 | time.sleep(delay_between_attempts) 107 | receipt = web3.eth.get_transaction_receipt(tx_hash) 108 | if receipt: 109 | # 如果收据存在,则打印信息并退出循环 110 | logging.info(f"transaction receipt found in attempt {attempt + 1}") 111 | break 112 | except Exception as e: 113 | # 如果发生异常,则打印错误消息 114 | logging.info(f"attempt {attempt + 1} failed: {e}") 115 | # 如果没有找到收据,则等待指定的延迟时间 116 | logging.error(f"waiting for {delay_between_attempts} seconds before next attempt...") 117 | time.sleep(delay_between_attempts) 118 | tx_hash_id = receipt.transactionHash.hex() 119 | logging.info(f'transaction successful txId: {tx_hash_id}') 120 | return tx_hash_id 121 | 122 | 123 | # 检查并修复input data 124 | def check_and_reset_input_data(input_data): 125 | if not input_data.startswith('0x'): 126 | return '0x' + input_data 127 | elif input_data.count('0x') > 1: 128 | return '0x' + input_data.replace('0x', '') 129 | else: 130 | raise Exception("format input_data err") 131 | 132 | 133 | def parse_txt_file(file_path): 134 | if not os.path.exists(file_path): 135 | logging.error(f"file '{file_path}' not found.") 136 | exit(1) 137 | with open(file_path, 'r', encoding='utf-8') as file: 138 | datas = file.readlines() 139 | 140 | datas = [data.strip() for data in datas if data.strip()] 141 | if len(datas) == 0: 142 | raise Exception("file data not found.") 143 | return datas 144 | 145 | 146 | max_retries = 3 147 | retry_delay = 5 148 | 149 | 150 | def retry_operation_with_logging(function, **kwargs): 151 | retries = 0 152 | while retries < max_retries: 153 | try: 154 | return function(**kwargs) 155 | except Exception as e: 156 | logging.error(f"尝试 {retries + 1} 失败,错误: {e}") 157 | time.sleep(retry_delay) 158 | retries += 1 159 | 160 | # 如果达到最大重试次数仍然失败,记录失败的 privateKey 161 | fail_msg = f"签到操作在 {max_retries} 次尝试后失败。" 162 | logging.error(fail_msg) 163 | if 'private_key' in kwargs: 164 | record_failure_to_json(kwargs['private_key'], fail_msg) 165 | raise 166 | 167 | 168 | def log_failed_private_key(private_key): 169 | current_dir = os.path.dirname(os.path.realpath(__file__)) 170 | failed_keys_file = os.path.join(current_dir, 'failed_private_keys.txt') 171 | with open(failed_keys_file, 'a') as file: 172 | file.write(f"{private_key}\n") 173 | 174 | 175 | def record_failure_to_json(private_key, fail_msg): 176 | # 确保存储失败的目录存在 177 | 178 | os.makedirs(os.path.dirname(record_fail_file_path), exist_ok=True) 179 | 180 | # 确保文件存在 181 | if not os.path.isfile(record_fail_file_path): 182 | with open(record_fail_file_path, "w", encoding="utf-8") as f: 183 | f.write('{}') # 写入一个空的JSON对象 184 | 185 | # 打开文件进行读写 186 | with open(record_fail_file_path, "r+", encoding="utf-8") as file: 187 | try: 188 | data = json.load(file) 189 | except json.JSONDecodeError: 190 | data = {} 191 | data[private_key] = fail_msg 192 | file.seek(0) # 移动到文件开头以覆盖写入 193 | json.dump(data, file, indent=2, ensure_ascii=False) 194 | file.truncate() # 删除现有文件内容的剩余部分 195 | -------------------------------------------------------------------------------- /qna3/common/re_captcha_parser.py: -------------------------------------------------------------------------------- 1 | ######################################################### 2 | # 将根目录加入sys.path中,解决命令行找不到包的问题 3 | import sys 4 | import os 5 | 6 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 7 | sys.path.append(curPath) 8 | ######################################################### 9 | import json 10 | import logging 11 | import time 12 | from configparser import ConfigParser 13 | 14 | import requests 15 | 16 | logging.basicConfig(level=logging.INFO) 17 | 18 | 19 | class ReCaptchaParser: 20 | def __init__(self): 21 | config = ConfigParser() 22 | config_file_path = '../resources/config.ini' 23 | config.read(config_file_path, encoding='utf-8') 24 | 25 | # 配置信息 26 | re_captcha_config = config["re_captcha"] 27 | self.site_key = re_captcha_config["site_key"] 28 | self.captcha_url = re_captcha_config["captcha_url"] 29 | self.api_key = re_captcha_config["api_key"] 30 | self.enable = bool(re_captcha_config.getboolean("enable", fallback=False)) 31 | 32 | """ 33 | 获取token reCaptcha token 34 | @param action: 动作类型 35 | """ 36 | 37 | def get_captcha_token(self, action: str): 38 | if not self.enable: 39 | return None 40 | create_task_body = { 41 | "clientKey": self.api_key, 42 | "task": { 43 | "type": "ReCaptchaV3TaskProxyless", 44 | "websiteURL": self.captcha_url, 45 | "websiteKey": self.site_key, 46 | "pageAction": action 47 | } 48 | } 49 | 50 | headers = { 51 | "Content-Type": "application/json" 52 | } 53 | 54 | logging.info(f"> 开始解析 ReCaptcha, action: [{action}] .....") 55 | try: 56 | create_task_resp = requests.post(url="https://api.ez-captcha.com/createTask", 57 | data=json.dumps(create_task_body), headers=headers) 58 | create_task_resp.raise_for_status() 59 | except requests.RequestException as e: 60 | raise Exception(f"> 创建ReCaptcha解析任务失败: {e}") 61 | 62 | if not create_task_resp.ok: 63 | raise Exception(f"> 创建ReCaptcha解析任务失败, 响应: {create_task_resp.text}") 64 | 65 | if create_task_resp.ok: 66 | print(create_task_resp.json()) 67 | task_id = create_task_resp.json().get("taskId") 68 | 69 | time.sleep(3) 70 | 71 | status = "" 72 | headers = {'Content-Type': 'application/json'} 73 | max_retries = 10 74 | retry_count = 0 75 | 76 | while status != "ready": 77 | if retry_count >= max_retries: 78 | raise Exception("> 解析ReCaptcha失败, 已经达到最大尝试失败次数.....") 79 | 80 | get_task_body = { 81 | "clientKey": self.api_key, 82 | "taskId": task_id 83 | } 84 | get_task_resp = requests.post(url="https://api.ez-captcha.com/getTaskResult", 85 | data=json.dumps(get_task_body), 86 | headers=headers) 87 | 88 | if get_task_resp.ok: 89 | get_task_json = get_task_resp.json() 90 | error_id = get_task_json.get("errorId") 91 | status = get_task_json.get("status") 92 | 93 | if status == "ready" and error_id == 0: 94 | token = get_task_json.get("solution").get("gRecaptchaResponse") 95 | logging.info(f"> 解析ReCaptcha成功,token: {token}") 96 | return token 97 | else: 98 | logging.warning( 99 | f"> 解析ReCaptcha任务尚未完成,将在1s后重试 Status: {status}, Error ID: {error_id}") 100 | 101 | else: 102 | logging.error(f"> 获取ReCaptcha解析任务失败,Status Code: {get_task_resp.status_code}") 103 | time.sleep(1) 104 | retry_count += 1 105 | raise Exception(f"> 创建ReCaptcha解析任务失败, resp: {create_task_resp}") 106 | 107 | 108 | if __name__ == '__main__': 109 | parser = ReCaptchaParser() 110 | token = parser.get_captcha_token("login") 111 | print(token) -------------------------------------------------------------------------------- /qna3/invite/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/invite/__init__.py -------------------------------------------------------------------------------- /qna3/invite/invite_manager.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import uuid 3 | import yaml 4 | ######################################################### 5 | # 将根目录加入sys.path中,解决命令行找不到包的问题 6 | import sys 7 | import os 8 | 9 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 10 | sys.path.append(curPath) 11 | ######################################################### 12 | from qna3.common import qna3_util 13 | from qna3.common.proxy_manager import ProxyPoolManager 14 | 15 | logging.basicConfig(level=logging.INFO) 16 | 17 | file_path = os.path.join(curPath, 'qna3', 'resources', 'invite_config.yaml') 18 | abs_file_path = os.path.abspath(file_path) 19 | 20 | 21 | class InviteManager: 22 | def __init__(self, config_path=abs_file_path): 23 | self.config_path = config_path 24 | self.proxy = ProxyPoolManager() 25 | 26 | def get_config_data(self): 27 | with open(self.config_path, 'r', encoding='utf-8') as file: 28 | return yaml.safe_load(file) 29 | 30 | def invite(self): 31 | # 邀请信息 32 | infos = self.get_config_data().get("invite").get("infos") 33 | if infos is None or len(infos) == 0: 34 | raise Exception("invite infos can't be empty") 35 | 36 | for info in infos: 37 | self.process_invitation(info) 38 | 39 | def process_invitation(self, info): 40 | invite_private_key = info.get("invite_private_key") 41 | logging.info(f"====================== private key:{invite_private_key} 开始邀请 =======================") 42 | logging.info(f"= ") 43 | logging.info(f"= ") 44 | logging.info(f"= ") 45 | 46 | # 邀请人信息 47 | invite_address, _, invite_code = qna3_util.get_base_info( 48 | proxy_manager=self.proxy, trak_id=str(uuid.uuid4()), private_key=invite_private_key 49 | ) 50 | 51 | if invite_code is None or len(invite_code) == 0: 52 | raise Exception("invite_code can't be None") 53 | 54 | # 接受邀请 55 | accept_private_keys = info.get("accept_private_keys") 56 | if accept_private_keys is None or len(accept_private_keys) == 0: 57 | raise Exception("accept_private_keys can't be empty") 58 | 59 | for accept_private_key in accept_private_keys: 60 | logging.info( 61 | f">>>> 邀请人 address: {invite_address}, privateKey: {invite_private_key}, inviteCode: {invite_code}") 62 | accept_address, _, _ = qna3_util.get_base_info( 63 | proxy_manager=self.proxy, trak_id=str(uuid.uuid4()), private_key=accept_private_key, 64 | invite_code=invite_code 65 | ) 66 | logging.info(f">>>> 受邀请成功 接受人 address : {accept_address}, privateKey: {accept_private_key}") 67 | 68 | logging.info(f"= ") 69 | logging.info(f"= ") 70 | logging.info(f"= ") 71 | logging.info(f"====================== private key:{invite_private_key} 完成邀请 =======================") 72 | 73 | 74 | if __name__ == '__main__': 75 | invite_manager = InviteManager() 76 | invite_manager.invite() 77 | -------------------------------------------------------------------------------- /qna3/requirements.txt: -------------------------------------------------------------------------------- 1 | web3==6.13.0 2 | eth-account==0.10.0 3 | requests==2.31.0 4 | setuptools==69.0.3 5 | PySocks==1.7.1 6 | pyyaml==6.0.1 -------------------------------------------------------------------------------- /qna3/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/resources/__init__.py -------------------------------------------------------------------------------- /qna3/resources/checkin_private_keys.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/resources/checkin_private_keys.txt -------------------------------------------------------------------------------- /qna3/resources/claim_private_keys.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/resources/claim_private_keys.txt -------------------------------------------------------------------------------- /qna3/resources/config.ini: -------------------------------------------------------------------------------- 1 | # re_captcha 配置 2 | [re_captcha] 3 | enable = false 4 | api_key = 5 | captcha_url = https://qna3.ai/vote 6 | site_key = 6Lcq80spAAAAADGCu_fvSx3EG46UubsLeaXczBat 7 | # ReCaptchaV3TaskProxyless - 一般分数处理(可能会失败,价格低),ReCaptchaV3TaskProxylessS9 - 高分处理,ReCaptchaV3EnterpriseTaskProxyless - 企业级处理 8 | #process_type = ReCaptchaV3TaskProxyless 9 | 10 | # 签到相关配置 11 | [checkin] 12 | # 启用签到的异步执行数量, 默认值为2 13 | async_num = 2 -------------------------------------------------------------------------------- /qna3/resources/invite_config.yaml: -------------------------------------------------------------------------------- 1 | # 邀请用户配置 2 | invite: 3 | infos: 4 | - 5 | invite_private_key: xx # 邀请人私钥 6 | # 接受邀请人私钥,建议20个,因为邀请上限是20,多了好像领取不到多的邀请分了 7 | accept_private_keys: 8 | - xx 9 | - xx 10 | - xx -------------------------------------------------------------------------------- /qna3/resources/socks5_proxys.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/qna3/resources/socks5_proxys.txt -------------------------------------------------------------------------------- /tool/README.md: -------------------------------------------------------------------------------- 1 | # 常用工具 2 | 3 | ## wallet 4 | 包含的钱包相关的工具 5 | 6 | - 创建钱包 7 | - 根据提示操作即可,成功后会在目录下生成evm_wallet.xlsx文件 8 | 9 | ## withdraw 10 | 包含提现相关工具 11 | 12 | - 币安提现 13 | 1. 修改config.ini文件 14 | 2. 修改withdraw_addresses.json文件 15 | 3. 运行脚本 python withdraw.py 16 | 17 | -------------------------------------------------------------------------------- /tool/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/tool/__init__.py -------------------------------------------------------------------------------- /tool/requirements.txt: -------------------------------------------------------------------------------- 1 | binance-connector 2 | python-dotenv 3 | eth-account==0.9.0 4 | openpyxl==3.1.0 -------------------------------------------------------------------------------- /tool/wallet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/tool/wallet/__init__.py -------------------------------------------------------------------------------- /tool/wallet/create_wallet.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from eth_account import Account 4 | from openpyxl import Workbook 5 | 6 | 7 | def get_input_wallet_count(): 8 | return input("输入的钱包数量为:") 9 | 10 | 11 | def generate_wallet_info(): 12 | global mnemonic, address, privateKey 13 | Account.enable_unaudited_hdwallet_features() 14 | # 创建账户,助记词 15 | account, mnemonic = Account.create_with_mnemonic() 16 | # 钱包地址 17 | address = account.address 18 | # 私钥 19 | privateKey = account.key.hex() 20 | 21 | return address, privateKey, mnemonic 22 | 23 | 24 | if __name__ == '__main__': 25 | 26 | while True: 27 | walletCount = int(get_input_wallet_count()) 28 | if walletCount < 1: 29 | print('>>> 钱包数量不能小于 1') 30 | else: 31 | break 32 | 33 | # 创建excel 34 | wb = Workbook() 35 | ws = wb.active 36 | 37 | # ws.append(['address', 'privateKey', 'mnemonic']) 38 | ws.append(['地址', '私钥', '助记词']) 39 | 40 | for i in range(walletCount): 41 | address, privateKey, mnemonic = generate_wallet_info() 42 | row = [address, privateKey, mnemonic] 43 | ws.append(row) 44 | 45 | # 保存文件 46 | wb.save('evm_wallet.xlsx') 47 | 48 | # 结束程序 49 | countdown = 3 50 | print('>>> 生成成功, 即将自动退出') 51 | while countdown > 0: 52 | print(f'{countdown} ...') 53 | time.sleep(1) 54 | countdown -= 1 -------------------------------------------------------------------------------- /tool/withdraw/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Web5DayDayStudy/airdrop_scripts/241960d4bc9d0ab44eb274897342959caae6163a/tool/withdraw/__init__.py -------------------------------------------------------------------------------- /tool/withdraw/config.ini: -------------------------------------------------------------------------------- 1 | # 启用环境,对应的keys 2 | [profile] 3 | enable = binance_keys 4 | 5 | # binance apiKey配置 6 | [binance_keys] 7 | api_key = xxx 8 | api_secret = xxx 9 | 10 | 11 | # 提现配置 12 | [withdraw] 13 | # 代币符号 14 | coin = BNB 15 | # 提现网络,不配置使用默认的网络 16 | network = OPBNB 17 | # 提现数量 18 | amount = 0.005 19 | # 每个账号提现间隔时间(秒) 20 | interval_time = 5 21 | -------------------------------------------------------------------------------- /tool/withdraw/util.py: -------------------------------------------------------------------------------- 1 | from configparser import ConfigParser, NoSectionError 2 | 3 | 4 | def get_api_key(): 5 | config = ConfigParser() 6 | config_file_path = 'config.ini' 7 | config.read(config_file_path, encoding='utf-8') 8 | 9 | # 检查 'profile' 部分和 'enable' 键是否存在 10 | if 'profile' not in config or 'enable' not in config['profile']: 11 | raise Exception("Missing 'enable' in 'profile' section in config.ini") 12 | 13 | enable_profile = config['profile']['enable'] 14 | 15 | # 检查激活的配置是否存在 16 | if enable_profile not in config: 17 | raise NoSectionError(f"The profile '{enable_profile}' is not defined in config.ini") 18 | 19 | # 检查必要的键是否存在于选定的配置 20 | if "api_key" not in config[enable_profile] or "api_secret" not in config[enable_profile]: 21 | raise Exception(f"Missing 'api_key' or 'api_secret' in '{enable_profile}' profile in config.ini") 22 | 23 | return config[enable_profile]["api_key"], config[enable_profile]["api_secret"] 24 | 25 | 26 | def get_withdrawal(): 27 | config = ConfigParser() 28 | config_file_path = 'config.ini' 29 | config.read(config_file_path, encoding='utf-8') 30 | return config["withdraw"]["coin"], config["withdraw"]["network"], config["withdraw"]["amount"], config["withdraw"][ 31 | "interval_time"] 32 | 33 | 34 | if __name__ == '__main__': 35 | print(f"Api keys: {get_api_key()}") 36 | print(f"Withdrawal : {get_withdrawal()}") 37 | -------------------------------------------------------------------------------- /tool/withdraw/withdraw.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ######################################################### 4 | # 将根目录加入sys.path中,解决命令行找不到包的问题 5 | import sys 6 | import os 7 | 8 | curPath = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) 9 | sys.path.append(curPath) 10 | ######################################################### 11 | import json 12 | import logging 13 | import time 14 | from binance.spot import Spot as Client 15 | from binance.lib.utils import config_logging 16 | 17 | from tool.withdraw.util import get_api_key, get_withdrawal 18 | 19 | config_logging(logging, logging.INFO) 20 | api_key, api_secret = get_api_key() 21 | coin, network, amount, interval_time = get_withdrawal() 22 | spot_client = Client(api_key, api_secret, show_header=True) 23 | 24 | 25 | def do_withdraw(): 26 | # read addresses 27 | with open("withdraw_addresses.json", "r") as fp: 28 | list_addresses = json.load(fp) 29 | logging.info(f"Finished reading list of addresses.") 30 | 31 | for address in list_addresses: 32 | logging.info(f"================== 开始提现 address: {address} ===========================") 33 | logging.info(" ") 34 | logging.info(" ") 35 | logging.info(" ") 36 | resp = spot_client.withdraw(coin=coin, amount=amount, network=network, 37 | address=address) 38 | data = resp.get("data") 39 | logging.info(f"resp: {data}") 40 | logging.info(" ") 41 | logging.info(" ") 42 | logging.info(" ") 43 | logging.info(f"================== 提现成功 address: {address} ===========================") 44 | time.sleep(int(interval_time)) 45 | 46 | logging.info(f" 所有地址提现成功! ") 47 | 48 | 49 | if __name__ == '__main__': 50 | do_withdraw() 51 | -------------------------------------------------------------------------------- /tool/withdraw/withdraw_addresses.json: -------------------------------------------------------------------------------- 1 | [ 2 | "0x...." 3 | ] --------------------------------------------------------------------------------