├── src ├── __init__.py ├── cleanup │ ├── __init__.py │ ├── modes.py │ └── manager.py ├── core │ ├── __init__.py │ ├── priority_fee │ │ ├── __init__.py │ │ ├── fixed_fee.py │ │ ├── dynamic_fee.py │ │ └── manager.py │ ├── pubkeys.py │ ├── wallet.py │ ├── curve.py │ └── client.py ├── trading │ ├── __init__.py │ ├── base.py │ ├── seller.py │ ├── buyer.py │ └── trader.py ├── utils │ ├── __init__.py │ └── logger.py ├── monitoring │ ├── __init__.py │ ├── base_listener.py │ ├── logs_event_processor.py │ ├── block_event_processor.py │ ├── logs_listener.py │ └── block_listener.py ├── cli.py └── config.py ├── .env.example ├── learning-examples ├── raw_bondingCurve_from_getAccountInfo.json ├── calculate_discriminator.py ├── compute_associated_bonding_curve.py ├── decode_from_getAccountInfo.py ├── fetch_price.py ├── listen_new_portal.py ├── get_pumpswap_pools.py ├── cleanup_accounts.py ├── track_bonding_curve_progress.py ├── decode_from_getTransaction.py ├── blockSubscribe_extract_transactions.py ├── check_boding_curve_status.py ├── listen_to_raydium_migration.py ├── listen_new_direct.py ├── decode_from_blockSubscribe.py ├── listen_to_logs_migration.py ├── listen_create_from_blocksubscribe.py ├── listen_new_direct_full_details.py ├── decoded_buy_tx_from_getTransaction.json ├── manual_sell.py └── decoded_create_tx_from_getTransaction.json ├── pyproject.toml ├── tests ├── test_logs_listener.py ├── test_block_listener.py └── compare_listeners.py ├── .gitignore └── README.md /src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/cleanup/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/trading/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/monitoring/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | SOLANA_NODE_RPC_ENDPOINT=... 2 | SOLANA_NODE_WSS_ENDPOINT=... 3 | SOLANA_PRIVATE_KEY=... -------------------------------------------------------------------------------- /learning-examples/raw_bondingCurve_from_getAccountInfo.json: -------------------------------------------------------------------------------- 1 | {"jsonrpc":"2.0","result":{"context":{"apiVersion":"1.18.22","slot":285247740},"value":{"data":["F7f4N2DYrGD99rN6is0DALwvcwAHAAAA/V6hLvnOAgC8g08EAAAAAACAxqR+jQMAAA==","base64"],"executable":false,"lamports":73551852,"owner":"6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P","rentEpoch":18446744073709551615,"space":49}},"id":1} 2 | -------------------------------------------------------------------------------- /src/core/priority_fee/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class PriorityFeePlugin(ABC): 5 | """Base class for priority fee calculation plugins.""" 6 | 7 | @abstractmethod 8 | async def get_priority_fee(self) -> int | None: 9 | """ 10 | Calculate the priority fee. 11 | 12 | Returns: 13 | Optional[int]: Priority fee in lamports, or None if no fee should be applied. 14 | """ 15 | pass 16 | -------------------------------------------------------------------------------- /src/core/priority_fee/fixed_fee.py: -------------------------------------------------------------------------------- 1 | from . import PriorityFeePlugin 2 | 3 | 4 | class FixedPriorityFee(PriorityFeePlugin): 5 | """Fixed priority fee plugin.""" 6 | 7 | def __init__(self, fixed_fee: int): 8 | """ 9 | Initialize the fixed fee plugin. 10 | 11 | Args: 12 | fixed_fee: Fixed priority fee in microlamports. 13 | """ 14 | self.fixed_fee = fixed_fee 15 | 16 | async def get_priority_fee(self) -> int | None: 17 | """ 18 | Return the fixed priority fee. 19 | 20 | Returns: 21 | Optional[int]: Fixed priority fee in microlamports, or None if fixed_fee is 0. 22 | """ 23 | if self.fixed_fee == 0: 24 | return None 25 | return self.fixed_fee 26 | -------------------------------------------------------------------------------- /src/monitoring/base_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base class for WebSocket token listeners. 3 | """ 4 | 5 | from abc import ABC, abstractmethod 6 | from collections.abc import Awaitable, Callable 7 | 8 | from trading.base import TokenInfo 9 | 10 | 11 | class BaseTokenListener(ABC): 12 | """Base abstract class for token listeners.""" 13 | 14 | @abstractmethod 15 | async def listen_for_tokens( 16 | self, 17 | token_callback: Callable[[TokenInfo], Awaitable[None]], 18 | match_string: str | None = None, 19 | creator_address: str | None = None, 20 | ) -> None: 21 | """ 22 | Listen for new token creations. 23 | 24 | Args: 25 | token_callback: Callback function for new tokens 26 | match_string: Optional string to match in token name/symbol 27 | creator_address: Optional creator address to filter by 28 | """ 29 | pass 30 | -------------------------------------------------------------------------------- /learning-examples/calculate_discriminator.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import struct 3 | 4 | # https://book.anchor-lang.com/anchor_bts/discriminator.html 5 | # Set the instruction name here 6 | instruction_name = "account:BondingCurve" 7 | 8 | 9 | def calculate_discriminator(instruction_name): 10 | # Create a SHA256 hash object 11 | sha = hashlib.sha256() 12 | 13 | # Update the hash with the instruction name 14 | sha.update(instruction_name.encode("utf-8")) 15 | 16 | # Get the first 8 bytes of the hash 17 | discriminator_bytes = sha.digest()[:8] 18 | 19 | # Convert the bytes to a 64-bit unsigned integer (little-endian) 20 | discriminator = struct.unpack(" bool: 9 | return CLEANUP_MODE == "on_fail" 10 | 11 | 12 | def should_cleanup_after_sell() -> bool: 13 | return CLEANUP_MODE == "after_sell" 14 | 15 | 16 | def should_cleanup_post_session() -> bool: 17 | return CLEANUP_MODE == "post_session" 18 | 19 | 20 | async def handle_cleanup_after_failure(client, wallet, mint, priority_fee_manager): 21 | if should_cleanup_after_failure(): 22 | logger.info("[Cleanup] Triggered by failed buy transaction.") 23 | manager = AccountCleanupManager(client, wallet, priority_fee_manager, CLEANUP_WITH_PRIORITY_FEE) 24 | await manager.cleanup_ata(mint) 25 | 26 | async def handle_cleanup_after_sell(client, wallet, mint, priority_fee_manager): 27 | if should_cleanup_after_sell(): 28 | logger.info("[Cleanup] Triggered after token sell.") 29 | manager = AccountCleanupManager(client, wallet, priority_fee_manager, CLEANUP_WITH_PRIORITY_FEE) 30 | await manager.cleanup_ata(mint) 31 | 32 | async def handle_cleanup_post_session(client, wallet, mints, priority_fee_manager): 33 | if should_cleanup_post_session(): 34 | logger.info("[Cleanup] Triggered post trading session.") 35 | manager = AccountCleanupManager(client, wallet, priority_fee_manager, CLEANUP_WITH_PRIORITY_FEE) 36 | for mint in mints: 37 | await manager.cleanup_ata(mint) 38 | -------------------------------------------------------------------------------- /src/core/pubkeys.py: -------------------------------------------------------------------------------- 1 | """ 2 | System and program addresses for Solana and pump.fun interactions. 3 | """ 4 | 5 | from dataclasses import dataclass 6 | from typing import Final 7 | 8 | from solders.pubkey import Pubkey 9 | 10 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 11 | TOKEN_DECIMALS: Final[int] = 6 12 | 13 | 14 | @dataclass 15 | class SystemAddresses: 16 | """System-level Solana addresses.""" 17 | 18 | PROGRAM: Final[Pubkey] = Pubkey.from_string("11111111111111111111111111111111") 19 | TOKEN_PROGRAM: Final[Pubkey] = Pubkey.from_string( 20 | "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" 21 | ) 22 | ASSOCIATED_TOKEN_PROGRAM: Final[Pubkey] = Pubkey.from_string( 23 | "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL" 24 | ) 25 | RENT: Final[Pubkey] = Pubkey.from_string( 26 | "SysvarRent111111111111111111111111111111111" 27 | ) 28 | SOL: Final[Pubkey] = Pubkey.from_string( 29 | "So11111111111111111111111111111111111111112" 30 | ) 31 | 32 | 33 | @dataclass 34 | class PumpAddresses: 35 | """Pump.fun program addresses.""" 36 | 37 | PROGRAM: Final[Pubkey] = Pubkey.from_string( 38 | "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 39 | ) 40 | GLOBAL: Final[Pubkey] = Pubkey.from_string( 41 | "4wTV1YmiEkRvAtNtsSGPtUrqRYQMe5SKy2uB4Jjaxnjf" 42 | ) 43 | EVENT_AUTHORITY: Final[Pubkey] = Pubkey.from_string( 44 | "Ce6TQqeHC9p8KetsN6JsjHK7UTZk7nasjjnr7XxXp9F1" 45 | ) 46 | FEE: Final[Pubkey] = Pubkey.from_string( 47 | "CebN5WGQ4jvEPvsVU4EoHEpgzq1VV7AbicfhtW4xC9iM" 48 | ) 49 | LIQUIDITY_MIGRATOR: Final[Pubkey] = Pubkey.from_string( 50 | "39azUYFWPz3VHgKCf3VChUwbpURdCHRxjWVowf5jUJjg" 51 | ) 52 | -------------------------------------------------------------------------------- /src/core/wallet.py: -------------------------------------------------------------------------------- 1 | """ 2 | Wallet management for Solana transactions. 3 | """ 4 | 5 | import base58 6 | from solders.keypair import Keypair 7 | from solders.pubkey import Pubkey 8 | from spl.token.instructions import get_associated_token_address 9 | 10 | 11 | class Wallet: 12 | """Manages a Solana wallet for trading operations.""" 13 | 14 | def __init__(self, private_key: str): 15 | """Initialize wallet from private key. 16 | 17 | Args: 18 | private_key: Base58 encoded private key 19 | """ 20 | self._private_key = private_key 21 | self._keypair = self._load_keypair(private_key) 22 | 23 | @property 24 | def pubkey(self) -> Pubkey: 25 | """Get the public key of the wallet.""" 26 | return self._keypair.pubkey() 27 | 28 | @property 29 | def keypair(self) -> Keypair: 30 | """Get the keypair for signing transactions.""" 31 | return self._keypair 32 | 33 | def get_associated_token_address(self, mint: Pubkey) -> Pubkey: 34 | """Get the associated token account address for a mint. 35 | 36 | Args: 37 | mint: Token mint address 38 | 39 | Returns: 40 | Associated token account address 41 | """ 42 | return get_associated_token_address(self.pubkey, mint) 43 | 44 | @staticmethod 45 | def _load_keypair(private_key: str) -> Keypair: 46 | """Load keypair from private key. 47 | 48 | Args: 49 | private_key: Base58 encoded private key 50 | 51 | Returns: 52 | Solana keypair 53 | """ 54 | private_key_bytes = base58.b58decode(private_key) 55 | return Keypair.from_bytes(private_key_bytes) 56 | -------------------------------------------------------------------------------- /src/utils/logger.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging utilities for the pump.fun trading bot. 3 | """ 4 | 5 | import logging 6 | import sys 7 | 8 | # Global dict to store loggers 9 | _loggers: dict[str, logging.Logger] = {} 10 | 11 | 12 | def get_logger(name: str, level: int = logging.INFO) -> logging.Logger: 13 | """Get or create a logger with the given name. 14 | 15 | Args: 16 | name: Logger name, typically __name__ 17 | level: Logging level 18 | 19 | Returns: 20 | Configured logger 21 | """ 22 | global _loggers 23 | 24 | if name in _loggers: 25 | return _loggers[name] 26 | 27 | logger = logging.getLogger(name) 28 | logger.setLevel(level) 29 | 30 | if not logger.handlers: 31 | formatter = logging.Formatter( 32 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s", 33 | datefmt="%Y-%m-%d %H:%M:%S", 34 | ) 35 | 36 | console_handler = logging.StreamHandler(sys.stdout) 37 | console_handler.setFormatter(formatter) 38 | logger.addHandler(console_handler) 39 | 40 | _loggers[name] = logger 41 | return logger 42 | 43 | 44 | def setup_file_logging( 45 | filename: str = "pump_trading.log", level: int = logging.INFO 46 | ) -> None: 47 | """Set up file logging for all loggers. 48 | 49 | Args: 50 | filename: Log file path 51 | level: Logging level for file handler 52 | """ 53 | root_logger = logging.getLogger() 54 | 55 | formatter = logging.Formatter( 56 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s", 57 | datefmt="%Y-%m-%d %H:%M:%S", 58 | ) 59 | 60 | file_handler = logging.FileHandler(filename) 61 | file_handler.setLevel(level) 62 | file_handler.setFormatter(formatter) 63 | 64 | root_logger.addHandler(file_handler) 65 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pump_bot" 3 | version = "2.0" 4 | description = "Trade tokens on pump.fun" 5 | readme = "README.md" 6 | requires-python = ">=3.9" 7 | 8 | dependencies = [ 9 | "base58>=2.1.1", 10 | "borsh-construct>=0.1.0", 11 | "construct>=2.10.67", 12 | "construct-typing>=0.5.2", 13 | "solana==0.36.6", 14 | "solders>=0.26.0", 15 | "websockets>=15.0", 16 | "python-dotenv>=1.0.1", 17 | "aiohttp>=3.11.13", 18 | ] 19 | 20 | [project.optional-dependencies] 21 | dev = [ 22 | "ruff>=0.10.0" 23 | ] 24 | 25 | [project.scripts] 26 | pump_bot = "cli:sync_main" 27 | 28 | [tool.ruff] 29 | exclude = [ 30 | ".bzr", 31 | ".direnv", 32 | ".eggs", 33 | ".git", 34 | ".git-rewrite", 35 | ".hg", 36 | ".ipynb_checkpoints", 37 | ".mypy_cache", 38 | ".nox", 39 | ".pants.d", 40 | ".pyenv", 41 | ".pytest_cache", 42 | ".pytype", 43 | ".ruff_cache", 44 | ".svn", 45 | ".tox", 46 | ".venv", 47 | ".vscode", 48 | "__pypackages__", 49 | "_build", 50 | "buck-out", 51 | "build", 52 | "dist", 53 | "node_modules", 54 | "site-packages", 55 | "venv", 56 | ] 57 | 58 | line-length = 88 59 | indent-width = 4 60 | target-version = "py311" 61 | 62 | [tool.ruff.lint] 63 | select = [ 64 | "E", "F", "I", "UP", "N", "B", "A", "C4", "T10", "ARG", "PTH", 65 | "ANN", # type annotations 66 | "S", # security best practices 67 | "BLE", # blind except statements 68 | "FBT", # boolean trap parameters 69 | "C90", # complexity metrics 70 | "TRY", # exception handling best practices 71 | "SLF", # private member access 72 | "TCH", # type checking issues 73 | "RUF", # Ruff-specific rules 74 | "ERA", # eradicate commented-out code 75 | "PL", # pylint conventions 76 | ] 77 | ignore = ["E501"] 78 | 79 | [tool.ruff.format] 80 | quote-style = "double" 81 | indent-style = "space" 82 | line-ending = "auto" -------------------------------------------------------------------------------- /learning-examples/compute_associated_bonding_curve.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from solders.pubkey import Pubkey 5 | 6 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 7 | 8 | from core.pubkeys import PumpAddresses, SystemAddresses 9 | 10 | 11 | def get_bonding_curve_address(mint: Pubkey, program_id: Pubkey) -> tuple[Pubkey, int]: 12 | """ 13 | Derives the bonding curve address for a given mint 14 | """ 15 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id) 16 | 17 | 18 | def find_associated_bonding_curve(mint: Pubkey, bonding_curve: Pubkey) -> Pubkey: 19 | """ 20 | Find the associated bonding curve for a given mint and bonding curve. 21 | This uses the standard ATA derivation. 22 | """ 23 | 24 | derived_address, _ = Pubkey.find_program_address( 25 | [ 26 | bytes(bonding_curve), 27 | bytes(SystemAddresses.TOKEN_PROGRAM), 28 | bytes(mint), 29 | ], 30 | SystemAddresses.ASSOCIATED_TOKEN_PROGRAM, 31 | ) 32 | return derived_address 33 | 34 | 35 | def main(): 36 | mint_address = input("Enter the token mint address: ") 37 | 38 | try: 39 | mint = Pubkey.from_string(mint_address) 40 | 41 | bonding_curve_address, bump = get_bonding_curve_address( 42 | mint, PumpAddresses.PROGRAM 43 | ) 44 | 45 | # Calculate the associated bonding curve 46 | associated_bonding_curve = find_associated_bonding_curve( 47 | mint, bonding_curve_address 48 | ) 49 | 50 | print("\nResults:") 51 | print("-" * 50) 52 | print(f"Token Mint: {mint}") 53 | print(f"Bonding Curve: {bonding_curve_address}") 54 | print(f"Associated Bonding Curve: {associated_bonding_curve}") 55 | print(f"Bonding Curve Bump: {bump}") 56 | print("-" * 50) 57 | 58 | except ValueError as e: 59 | print(f"Error: Invalid address format - {str(e)}") 60 | 61 | 62 | if __name__ == "__main__": 63 | main() 64 | -------------------------------------------------------------------------------- /learning-examples/decode_from_getAccountInfo.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import struct 4 | 5 | from construct import Flag, Int64ul, Struct 6 | 7 | LAMPORTS_PER_SOL = 1_000_000_000 8 | TOKEN_DECIMALS = 6 9 | EXPECTED_DISCRIMINATOR = struct.pack(" None: 23 | parsed = self._STRUCT.parse(data[8:]) 24 | self.__dict__.update(parsed) 25 | 26 | 27 | def calculate_bonding_curve_price(curve_state: BondingCurveState) -> float: 28 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 29 | raise ValueError("Invalid reserve state") 30 | 31 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 32 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 33 | ) 34 | 35 | 36 | def decode_bonding_curve_data(raw_data: str) -> BondingCurveState: 37 | decoded_data = base64.b64decode(raw_data) 38 | if decoded_data[:8] != EXPECTED_DISCRIMINATOR: 39 | raise ValueError("Invalid curve state discriminator") 40 | return BondingCurveState(decoded_data) 41 | 42 | 43 | # Load the JSON data 44 | with open("learning-examples/raw_bondingCurve_from_getAccountInfo.json", "r") as file: 45 | json_data = json.load(file) 46 | 47 | # Extract the base64 encoded data 48 | encoded_data = json_data["result"]["value"]["data"][0] 49 | 50 | # Decode the data 51 | bonding_curve_state = decode_bonding_curve_data(encoded_data) 52 | 53 | # Calculate and print the token price 54 | token_price_sol = calculate_bonding_curve_price(bonding_curve_state) 55 | 56 | print("Bonding Curve State:") 57 | print(f" Virtual Token Reserves: {bonding_curve_state.virtual_token_reserves}") 58 | print(f" Virtual SOL Reserves: {bonding_curve_state.virtual_sol_reserves}") 59 | print(f" Real Token Reserves: {bonding_curve_state.real_token_reserves}") 60 | print(f" Real SOL Reserves: {bonding_curve_state.real_sol_reserves}") 61 | print(f" Token Total Supply: {bonding_curve_state.token_total_supply}") 62 | print(f" Complete: {bonding_curve_state.complete}") 63 | print(f"\nToken Price: {token_price_sol:.10f} SOL") 64 | -------------------------------------------------------------------------------- /src/core/priority_fee/dynamic_fee.py: -------------------------------------------------------------------------------- 1 | import statistics 2 | 3 | from solders.pubkey import Pubkey 4 | 5 | from core.client import SolanaClient 6 | from core.priority_fee import PriorityFeePlugin 7 | from utils.logger import get_logger 8 | 9 | logger = get_logger(__name__) 10 | 11 | 12 | class DynamicPriorityFee(PriorityFeePlugin): 13 | """Dynamic priority fee plugin using getRecentPrioritizationFees.""" 14 | 15 | def __init__(self, client: SolanaClient): 16 | """ 17 | Initialize the dynamic fee plugin. 18 | 19 | Args: 20 | client: Solana RPC client for network requests. 21 | """ 22 | self.client = client 23 | 24 | async def get_priority_fee( 25 | self, accounts: list[Pubkey] | None = None 26 | ) -> int | None: 27 | """ 28 | Fetch the recent priority fee using getRecentPrioritizationFees. 29 | 30 | Args: 31 | accounts: List of accounts to consider for the fee calculation. 32 | If None, the fee is calculated without specific account constraints. 33 | 34 | Returns: 35 | Optional[int]: Median priority fee in microlamports, or None if the request fails. 36 | """ 37 | try: 38 | body = { 39 | "jsonrpc": "2.0", 40 | "id": 1, 41 | "method": "getRecentPrioritizationFees", 42 | "params": [[str(account) for account in accounts]] if accounts else [], 43 | } 44 | 45 | response = await self.client.post_rpc(body) 46 | if not response or "result" not in response: 47 | logger.error( 48 | "Failed to fetch recent prioritization fees: invalid response" 49 | ) 50 | return None 51 | 52 | fees = [fee["prioritizationFee"] for fee in response["result"]] 53 | if not fees: 54 | logger.warning("No prioritization fees found in the response") 55 | return None 56 | 57 | # Get the 70th percentile of fees for faster processing 58 | # It means you're paying a fee that's higher than 70% of other transactions 59 | # Higher percentile = faster transactions but more expensive 60 | # Lower percentile = cheaper but slower transactions 61 | prior_fee = int(statistics.quantiles(fees, n=10)[-3]) # 70th percentile 62 | 63 | return prior_fee 64 | 65 | except Exception as e: 66 | logger.error( 67 | f"Failed to fetch recent priority fee: {str(e)}", exc_info=True 68 | ) 69 | return None 70 | -------------------------------------------------------------------------------- /learning-examples/fetch_price.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import struct 4 | import sys 5 | from typing import Final 6 | 7 | from construct import Flag, Int64ul, Struct 8 | from solana.rpc.async_api import AsyncClient 9 | from solders.pubkey import Pubkey 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 12 | 13 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 14 | TOKEN_DECIMALS: Final[int] = 6 15 | CURVE_ADDRESS: Final[str] = "6GXfUqrmPM4VdN1NoDZsE155jzRegJngZRjMkGyby7do" 16 | 17 | # Here and later all the discriminators are precalculated. See learning-examples/discriminator.py 18 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 34 | parsed = self._STRUCT.parse(data[8:]) 35 | self.__dict__.update(parsed) 36 | 37 | 38 | async def get_bonding_curve_state( 39 | conn: AsyncClient, curve_address: Pubkey 40 | ) -> BondingCurveState: 41 | response = await conn.get_account_info(curve_address, encoding="base64") 42 | if not response.value or not response.value.data: 43 | raise ValueError("Invalid curve state: No data") 44 | 45 | data = response.value.data 46 | if data[:8] != EXPECTED_DISCRIMINATOR: 47 | raise ValueError("Invalid curve state discriminator") 48 | 49 | return BondingCurveState(data) 50 | 51 | 52 | def calculate_bonding_curve_price(curve_state: BondingCurveState) -> float: 53 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 54 | raise ValueError("Invalid reserve state") 55 | 56 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 57 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 58 | ) 59 | 60 | 61 | async def main() -> None: 62 | try: 63 | async with AsyncClient(RPC_ENDPOINT) as conn: 64 | curve_address = Pubkey.from_string(CURVE_ADDRESS) 65 | bonding_curve_state = await get_bonding_curve_state(conn, curve_address) 66 | token_price_sol = calculate_bonding_curve_price(bonding_curve_state) 67 | 68 | print("Token price:") 69 | print(f" {token_price_sol:.10f} SOL") 70 | except ValueError as e: 71 | print(f"Error: {e}") 72 | except Exception as e: 73 | print(f"An unexpected error occurred: {e}") 74 | 75 | 76 | if __name__ == "__main__": 77 | asyncio.run(main()) 78 | -------------------------------------------------------------------------------- /learning-examples/listen_new_portal.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | from datetime import datetime 4 | 5 | import websockets 6 | 7 | # PumpPortal WebSocket URL 8 | WS_URL = "wss://pumpportal.fun/api/data" 9 | 10 | 11 | def format_sol(value): 12 | return f"{value:.6f} SOL" 13 | 14 | 15 | def format_timestamp(timestamp): 16 | return datetime.fromtimestamp(timestamp / 1000).strftime("%Y-%m-%d %H:%M:%S") 17 | 18 | 19 | async def listen_for_new_tokens(): 20 | async with websockets.connect(WS_URL) as websocket: 21 | # Subscribe to new token events 22 | await websocket.send(json.dumps({"method": "subscribeNewToken", "params": []})) 23 | 24 | print("Listening for new token creations...") 25 | 26 | while True: 27 | try: 28 | message = await websocket.recv() 29 | data = json.loads(message) 30 | 31 | if "method" in data and data["method"] == "newToken": 32 | token_info = data.get("params", [{}])[0] 33 | elif "signature" in data and "mint" in data: 34 | token_info = data 35 | else: 36 | continue 37 | 38 | print("\n" + "=" * 50) 39 | print( 40 | f"New token created: {token_info.get('name')} ({token_info.get('symbol')})" 41 | ) 42 | print("=" * 50) 43 | print(f"Address: {token_info.get('mint')}") 44 | print(f"Creator: {token_info.get('traderPublicKey')}") 45 | print(f"Initial Buy: {format_sol(token_info.get('initialBuy', 0))}") 46 | print( 47 | f"Market Cap: {format_sol(token_info.get('marketCapSol', 0))}" 48 | ) 49 | print(f"Bonding Curve: {token_info.get('bondingCurveKey')}") 50 | print( 51 | f"Virtual SOL: {format_sol(token_info.get('vSolInBondingCurve', 0))}" 52 | ) 53 | print( 54 | f"Virtual Tokens: {token_info.get('vTokensInBondingCurve', 0):,.0f}" 55 | ) 56 | print(f"Metadata URI: {token_info.get('uri')}") 57 | print(f"Signature: {token_info.get('signature')}") 58 | print("=" * 50) 59 | except websockets.exceptions.ConnectionClosed: 60 | print("\nWebSocket connection closed. Reconnecting...") 61 | break 62 | except json.JSONDecodeError: 63 | print(f"\nReceived non-JSON message: {message}") 64 | except Exception as e: 65 | print(f"\nAn error occurred: {e}") 66 | 67 | 68 | async def main(): 69 | while True: 70 | try: 71 | await listen_for_new_tokens() 72 | except Exception as e: 73 | print(f"\nAn error occurred: {e}") 74 | print("Reconnecting in 5 seconds...") 75 | await asyncio.sleep(5) 76 | 77 | 78 | if __name__ == "__main__": 79 | asyncio.run(main()) 80 | -------------------------------------------------------------------------------- /src/trading/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base interfaces for trading operations. 3 | """ 4 | 5 | from abc import ABC, abstractmethod 6 | from dataclasses import dataclass 7 | from typing import Any 8 | 9 | from solders.pubkey import Pubkey 10 | 11 | from core.pubkeys import PumpAddresses 12 | 13 | 14 | @dataclass 15 | class TokenInfo: 16 | """Token information.""" 17 | 18 | name: str 19 | symbol: str 20 | uri: str 21 | mint: Pubkey 22 | bonding_curve: Pubkey 23 | associated_bonding_curve: Pubkey 24 | user: Pubkey 25 | 26 | @classmethod 27 | def from_dict(cls, data: dict[str, Any]) -> "TokenInfo": 28 | """Create TokenInfo from dictionary. 29 | 30 | Args: 31 | data: Dictionary with token data 32 | 33 | Returns: 34 | TokenInfo instance 35 | """ 36 | return cls( 37 | name=data["name"], 38 | symbol=data["symbol"], 39 | uri=data["uri"], 40 | mint=Pubkey.from_string(data["mint"]), 41 | bonding_curve=Pubkey.from_string(data["bondingCurve"]), 42 | associated_bonding_curve=Pubkey.from_string(data["associatedBondingCurve"]), 43 | user=Pubkey.from_string(data["user"]), 44 | ) 45 | 46 | def to_dict(self) -> dict[str, str]: 47 | """Convert to dictionary. 48 | 49 | Returns: 50 | Dictionary representation 51 | """ 52 | return { 53 | "name": self.name, 54 | "symbol": self.symbol, 55 | "uri": self.uri, 56 | "mint": str(self.mint), 57 | "bondingCurve": str(self.bonding_curve), 58 | "associatedBondingCurve": str(self.associated_bonding_curve), 59 | "user": str(self.user), 60 | } 61 | 62 | 63 | @dataclass 64 | class TradeResult: 65 | """Result of a trading operation.""" 66 | 67 | success: bool 68 | tx_signature: str | None = None 69 | error_message: str | None = None 70 | amount: float | None = None 71 | price: float | None = None 72 | 73 | 74 | class Trader(ABC): 75 | """Base interface for trading operations.""" 76 | 77 | @abstractmethod 78 | async def execute(self, *args, **kwargs) -> TradeResult: 79 | """Execute trading operation. 80 | 81 | Returns: 82 | TradeResult with operation outcome 83 | """ 84 | pass 85 | 86 | def _get_relevant_accounts(self, token_info: TokenInfo) -> list[Pubkey]: 87 | """ 88 | Get the list of accounts relevant for calculating the priority fee. 89 | 90 | Args: 91 | token_info: Token information for the buy/sell operation. 92 | 93 | Returns: 94 | list[Pubkey]: List of relevant accounts. 95 | """ 96 | return [ 97 | token_info.mint, # Token mint address 98 | token_info.bonding_curve, # Bonding curve address 99 | PumpAddresses.PROGRAM, # Pump.fun program address 100 | PumpAddresses.FEE, # Pump.fun fee account 101 | ] 102 | -------------------------------------------------------------------------------- /learning-examples/get_pumpswap_pools.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import struct 4 | 5 | import base58 6 | from dotenv import load_dotenv 7 | from solana.rpc.async_api import AsyncClient 8 | from solana.rpc.types import MemcmpOpts 9 | from solders.pubkey import Pubkey 10 | 11 | load_dotenv() 12 | 13 | RPC_ENDPOINT = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 14 | PUMP_AMM_PROGRAM_ID = Pubkey.from_string("pAMMBay6oceH9fJKBRHGP5D4bD4sWpmSwMn52FMfXEA") 15 | TOKEN_MINT = Pubkey.from_string("35ySx7Rt3RqeTp75QB81FgRvPT5yDY2m5BupsUYDpump") 16 | 17 | 18 | async def get_market_address_by_base_mint(base_mint_address: Pubkey, amm_program_id: Pubkey): 19 | async with AsyncClient(RPC_ENDPOINT) as client: 20 | base_mint_bytes = bytes(base_mint_address) 21 | 22 | # Define the offset for base_mint field 23 | offset = 43 24 | 25 | # Create the filter to match the base_mint 26 | filters = [ 27 | MemcmpOpts(offset=offset, bytes=base_mint_bytes) 28 | ] 29 | 30 | # Retrieve the accounts that match the filter 31 | response = await client.get_program_accounts( 32 | amm_program_id, # AMM program ID 33 | encoding="base64", 34 | filters=filters 35 | ) 36 | 37 | pool_addresses = [account.pubkey for account in response.value] 38 | return pool_addresses[0] 39 | 40 | async def get_market_data(market_address: Pubkey): 41 | async with AsyncClient(RPC_ENDPOINT) as client: 42 | response = await client.get_account_info(market_address, encoding="base64") 43 | data = response.value.data 44 | parsed_data = {} 45 | 46 | offset = 8 47 | fields = [ 48 | ("pool_bump", "u8"), 49 | ("index", "u16"), 50 | ("creator", "pubkey"), 51 | ("base_mint", "pubkey"), 52 | ("quote_mint", "pubkey"), 53 | ("lp_mint", "pubkey"), 54 | ("pool_base_token_account", "pubkey"), 55 | ("pool_quote_token_account", "pubkey"), 56 | ("lp_supply", "u64"), 57 | ] 58 | 59 | for field_name, field_type in fields: 60 | if field_type == "pubkey": 61 | value = data[offset:offset + 32] 62 | parsed_data[field_name] = base58.b58encode(value).decode("utf-8") 63 | offset += 32 64 | elif field_type in {"u64", "i64"}: 65 | value = struct.unpack(" 0: 33 | logger.info(f"Burning {balance} tokens from account {account}...") 34 | burn_ix = burn( 35 | BurnParams( 36 | account=account, 37 | mint=mint, 38 | owner=wallet.pubkey, 39 | amount=balance, 40 | program_id=SystemAddresses.TOKEN_PROGRAM, 41 | ) 42 | ) 43 | await client.build_and_send_transaction([burn_ix], wallet.keypair) 44 | logger.info(f"Burned tokens from {account}") 45 | 46 | # If account exists, attempt to close it 47 | if info.value: 48 | logger.info(f"Closing account: {account}") 49 | close_params = CloseAccountParams( 50 | account=account, 51 | dest=wallet.pubkey, 52 | owner=wallet.pubkey, 53 | program_id=SystemAddresses.TOKEN_PROGRAM, 54 | ) 55 | ix = close_account(close_params) 56 | 57 | tx_sig = await client.build_and_send_transaction( 58 | [ix], 59 | wallet.keypair, 60 | skip_preflight=True, 61 | ) 62 | await client.confirm_transaction(tx_sig) 63 | logger.info(f"Closed successfully: {account}") 64 | else: 65 | logger.info(f"Account does not exist or already closed: {account}") 66 | 67 | except Exception as e: 68 | logger.error(f"Error while processing account {account}: {e}") 69 | 70 | 71 | async def main(): 72 | try: 73 | client = SolanaClient(RPC_ENDPOINT) 74 | wallet = Wallet(PRIVATE_KEY) 75 | 76 | # Get user's ATA for the token 77 | ata = wallet.get_associated_token_address(MINT_ADDRESS) 78 | await close_account_if_exists(client, wallet, ata, MINT_ADDRESS) 79 | 80 | except Exception as e: 81 | logger.error(f"Unexpected error: {e}") 82 | finally: 83 | await client.close() 84 | 85 | 86 | if __name__ == "__main__": 87 | asyncio.run(main()) 88 | -------------------------------------------------------------------------------- /src/core/priority_fee/manager.py: -------------------------------------------------------------------------------- 1 | from solders.pubkey import Pubkey 2 | 3 | from core.client import SolanaClient 4 | from core.priority_fee.dynamic_fee import DynamicPriorityFee 5 | from core.priority_fee.fixed_fee import FixedPriorityFee 6 | from utils.logger import get_logger 7 | 8 | logger = get_logger(__name__) 9 | 10 | 11 | class PriorityFeeManager: 12 | """Manager for priority fee calculation and validation.""" 13 | 14 | def __init__( 15 | self, 16 | client: SolanaClient, 17 | enable_dynamic_fee: bool, 18 | enable_fixed_fee: bool, 19 | fixed_fee: int, 20 | extra_fee: float, 21 | hard_cap: int, 22 | ): 23 | """ 24 | Initialize the priority fee manager. 25 | 26 | Args: 27 | client: Solana RPC client for dynamic fee calculation. 28 | enable_dynamic_fee: Whether to enable dynamic fee calculation. 29 | enable_fixed_fee: Whether to enable fixed fee. 30 | fixed_fee: Fixed priority fee in microlamports. 31 | extra_fee: Percentage increase to apply to the base fee. 32 | hard_cap: Maximum allowed priority fee in microlamports. 33 | """ 34 | self.client = client 35 | self.enable_dynamic_fee = enable_dynamic_fee 36 | self.enable_fixed_fee = enable_fixed_fee 37 | self.fixed_fee = fixed_fee 38 | self.extra_fee = extra_fee 39 | self.hard_cap = hard_cap 40 | 41 | # Initialize plugins 42 | self.dynamic_fee_plugin = DynamicPriorityFee(client) 43 | self.fixed_fee_plugin = FixedPriorityFee(fixed_fee) 44 | 45 | async def calculate_priority_fee( 46 | self, accounts: list[Pubkey] | None = None 47 | ) -> int | None: 48 | """ 49 | Calculate the priority fee based on the configuration. 50 | 51 | Args: 52 | accounts: List of accounts to consider for dynamic fee calculation. 53 | If None, the fee is calculated without specific account constraints. 54 | 55 | Returns: 56 | Optional[int]: Calculated priority fee in microlamports, or None if no fee should be applied. 57 | """ 58 | base_fee = await self._get_base_fee(accounts) 59 | if base_fee is None: 60 | return None 61 | 62 | # Apply extra fee (percentage increase) 63 | final_fee = int(base_fee * (1 + self.extra_fee)) 64 | 65 | # Enforce hard cap 66 | if final_fee > self.hard_cap: 67 | logger.warning( 68 | f"Calculated priority fee {final_fee} exceeds hard cap {self.hard_cap}. Applying hard cap." 69 | ) 70 | final_fee = self.hard_cap 71 | 72 | return final_fee 73 | 74 | async def _get_base_fee(self, accounts: list[Pubkey] | None = None) -> int | None: 75 | """ 76 | Determine the base fee based on the configuration. 77 | 78 | Returns: 79 | Optional[int]: Base fee in microlamports, or None if no fee should be applied. 80 | """ 81 | # Prefer dynamic fee if both are enabled 82 | if self.enable_dynamic_fee: 83 | dynamic_fee = await self.dynamic_fee_plugin.get_priority_fee(accounts) 84 | if dynamic_fee is not None: 85 | return dynamic_fee 86 | 87 | # Fall back to fixed fee if enabled 88 | if self.enable_fixed_fee: 89 | return await self.fixed_fee_plugin.get_priority_fee() 90 | 91 | # No priority fee if both are disabled 92 | return None 93 | -------------------------------------------------------------------------------- /tests/test_logs_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test script for LogsListener 3 | Tests websocket monitoring for new pump.fun tokens using logsSubscribe 4 | """ 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | import sys 10 | from pathlib import Path 11 | 12 | sys.path.append(str(Path(__file__).parent.parent / "src")) 13 | 14 | from core.pubkeys import PumpAddresses 15 | from monitoring.logs_listener import LogsListener 16 | from trading.base import TokenInfo 17 | 18 | logging.basicConfig( 19 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 20 | ) 21 | logger = logging.getLogger("logs-listener-test") 22 | 23 | 24 | class TestTokenCallback: 25 | def __init__(self): 26 | self.detected_tokens = [] 27 | 28 | async def on_token_created(self, token_info: TokenInfo) -> None: 29 | """Process detected token""" 30 | logger.info(f"New token detected: {token_info.name} ({token_info.symbol})") 31 | logger.info(f"Mint: {token_info.mint}") 32 | self.detected_tokens.append(token_info) 33 | print(f"\n{'=' * 50}") 34 | print(f"NEW TOKEN: {token_info.name}") 35 | print(f"Symbol: {token_info.symbol}") 36 | print(f"Mint: {token_info.mint}") 37 | print(f"URI: {token_info.uri}") 38 | print(f"Creator: {token_info.user}") 39 | print(f"Bonding Curve: {token_info.bonding_curve}") 40 | print(f"Associated Bonding Curve: {token_info.associated_bonding_curve}") 41 | print(f"{'=' * 50}\n") 42 | 43 | 44 | async def test_logs_listener( 45 | match_string: str | None = None, 46 | creator_address: str | None = None, 47 | test_duration: int = 60, 48 | ): 49 | """Test the logs listener functionality""" 50 | wss_endpoint = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 51 | if not wss_endpoint: 52 | logger.error("SOLANA_NODE_WSS_ENDPOINT environment variable is not set") 53 | return [] 54 | 55 | logger.info(f"Connecting to WebSocket: {wss_endpoint}") 56 | listener = LogsListener(wss_endpoint, PumpAddresses.PROGRAM) 57 | callback = TestTokenCallback() 58 | 59 | if match_string: 60 | logger.info(f"Filtering tokens matching: {match_string}") 61 | if creator_address: 62 | logger.info(f"Filtering tokens by creator: {creator_address}") 63 | 64 | listen_task = asyncio.create_task( 65 | listener.listen_for_tokens( 66 | callback.on_token_created, 67 | match_string=match_string, 68 | creator_address=creator_address, 69 | ) 70 | ) 71 | 72 | logger.info(f"Listening for {test_duration} seconds...") 73 | try: 74 | await asyncio.sleep(test_duration) 75 | except KeyboardInterrupt: 76 | logger.info("Test interrupted by user") 77 | finally: 78 | listen_task.cancel() 79 | try: 80 | await listen_task 81 | except asyncio.CancelledError: 82 | pass 83 | 84 | logger.info(f"Detected {len(callback.detected_tokens)} tokens") 85 | for token in callback.detected_tokens: 86 | logger.info(f" - {token.name} ({token.symbol}): {token.mint}") 87 | 88 | return callback.detected_tokens 89 | 90 | 91 | if __name__ == "__main__": 92 | match_string = None # Update if you want to filter tokens by name/symbol 93 | creator_address = None # Update if you want to filter tokens by creator address 94 | test_duration = 15 95 | 96 | logger.info("Starting logs listener test (using logsSubscribe)") 97 | asyncio.run(test_logs_listener(match_string, creator_address, test_duration)) 98 | -------------------------------------------------------------------------------- /tests/test_block_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test script for BlockListener 3 | Tests websocket monitoring for new pump.fun tokens using blockSubscribe 4 | """ 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | import sys 10 | from pathlib import Path 11 | 12 | sys.path.append(str(Path(__file__).parent.parent / "src")) 13 | 14 | from core.pubkeys import PumpAddresses 15 | from monitoring.block_listener import BlockListener 16 | from trading.base import TokenInfo 17 | 18 | logging.basicConfig( 19 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 20 | ) 21 | logger = logging.getLogger("block-listener-test") 22 | 23 | 24 | class TestTokenCallback: 25 | def __init__(self): 26 | self.detected_tokens = [] 27 | 28 | async def on_token_created(self, token_info: TokenInfo) -> None: 29 | """Process detected token""" 30 | logger.info(f"New token detected: {token_info.name} ({token_info.symbol})") 31 | logger.info(f"Mint: {token_info.mint}") 32 | self.detected_tokens.append(token_info) 33 | print(f"\n{'=' * 50}") 34 | print(f"NEW TOKEN: {token_info.name}") 35 | print(f"Symbol: {token_info.symbol}") 36 | print(f"Mint: {token_info.mint}") 37 | print(f"URI: {token_info.uri}") 38 | print(f"Creator: {token_info.user}") 39 | print(f"Bonding Curve: {token_info.bonding_curve}") 40 | print(f"Associated Bonding Curve: {token_info.associated_bonding_curve}") 41 | print(f"{'=' * 50}\n") 42 | 43 | 44 | async def test_block_listener( 45 | match_string: str | None = None, 46 | creator_address: str | None = None, 47 | test_duration: int = 60, 48 | ): 49 | """Test the block listener functionality""" 50 | wss_endpoint = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 51 | if not wss_endpoint: 52 | logger.error("SOLANA_NODE_WSS_ENDPOINT environment variable is not set") 53 | return [] 54 | 55 | logger.info(f"Connecting to WebSocket: {wss_endpoint}") 56 | listener = BlockListener(wss_endpoint, PumpAddresses.PROGRAM) 57 | callback = TestTokenCallback() 58 | 59 | if match_string: 60 | logger.info(f"Filtering tokens matching: {match_string}") 61 | if creator_address: 62 | logger.info(f"Filtering tokens by creator: {creator_address}") 63 | 64 | listen_task = asyncio.create_task( 65 | listener.listen_for_tokens( 66 | callback.on_token_created, 67 | match_string=match_string, 68 | creator_address=creator_address, 69 | ) 70 | ) 71 | 72 | logger.info(f"Listening for {test_duration} seconds...") 73 | try: 74 | await asyncio.sleep(test_duration) 75 | except KeyboardInterrupt: 76 | logger.info("Test interrupted by user") 77 | finally: 78 | listen_task.cancel() 79 | try: 80 | await listen_task 81 | except asyncio.CancelledError: 82 | pass 83 | 84 | logger.info(f"Detected {len(callback.detected_tokens)} tokens") 85 | for token in callback.detected_tokens: 86 | logger.info(f" - {token.name} ({token.symbol}): {token.mint}") 87 | 88 | return callback.detected_tokens 89 | 90 | 91 | if __name__ == "__main__": 92 | match_string = None # Update if you want to filter tokens by name/symbol 93 | creator_address = None # Update if you want to filter tokens by creator address 94 | test_duration = 15 95 | 96 | logger.info("Starting block listener test (using blockSubscribe)") 97 | asyncio.run(test_block_listener(match_string, creator_address, test_duration)) 98 | -------------------------------------------------------------------------------- /learning-examples/track_bonding_curve_progress.py: -------------------------------------------------------------------------------- 1 | """ 2 | Track bonding curve progress for a pump.fun token by mint address. 3 | """ 4 | 5 | import asyncio 6 | import os 7 | import struct 8 | 9 | from dotenv import load_dotenv 10 | from solana.rpc.async_api import AsyncClient 11 | from solders.pubkey import Pubkey 12 | 13 | # Import pump.fun program address 14 | from core.pubkeys import PumpAddresses 15 | 16 | load_dotenv() 17 | 18 | RPC_URL = os.getenv("SOLANA_NODE_RPC_ENDPOINT") 19 | TOKEN_MINT = "xWrzYY4c1LnbSkLrd2LDUg9vw7YtVyJhGmw7MABpump" 20 | 21 | LAMPORTS_PER_SOL = 1_000_000_000 22 | TOKEN_DECIMALS = 6 23 | EXPECTED_DISCRIMINATOR = struct.pack(" Pubkey: 28 | """Derive the bonding curve PDA address from mint.""" 29 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id)[0] 30 | 31 | 32 | async def get_account_data(client: AsyncClient, pubkey: Pubkey) -> bytes: 33 | """Fetch raw account data for a given public key.""" 34 | resp = await client.get_account_info(pubkey, encoding="base64") 35 | if not resp.value or not resp.value.data: 36 | raise ValueError(f"Account {pubkey} not found or has no data") 37 | 38 | return resp.value.data 39 | 40 | 41 | def parse_curve_state(data: bytes) -> dict: 42 | """Decode bonding curve account data.""" 43 | if data[:8] != EXPECTED_DISCRIMINATOR: 44 | raise ValueError("Invalid discriminator for bonding curve") 45 | 46 | fields = struct.unpack_from(" 0 64 | ): 65 | tx_signature = tx["transaction"][0] 66 | elif ( 67 | isinstance(tx["transaction"], dict) 68 | and "signatures" in tx["transaction"] 69 | ): 70 | tx_signature = tx["transaction"][ 71 | "signatures" 72 | ][0] 73 | else: 74 | continue 75 | await save_transaction(tx, tx_signature) 76 | elif "result" in data: 77 | print("Subscription confirmed") 78 | except Exception as e: 79 | print(f"An error occurred: {e!s}") 80 | 81 | 82 | if __name__ == "__main__": 83 | asyncio.run(listen_for_transactions()) 84 | -------------------------------------------------------------------------------- /learning-examples/check_boding_curve_status.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import os 4 | import struct 5 | import sys 6 | from typing import Final 7 | 8 | from construct import Flag, Int64ul, Struct 9 | from solana.rpc.async_api import AsyncClient 10 | from solders.pubkey import Pubkey 11 | 12 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 13 | 14 | from core.pubkeys import PumpAddresses 15 | 16 | # Constants 17 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 33 | parsed = self._STRUCT.parse(data[8:]) 34 | self.__dict__.update(parsed) 35 | 36 | 37 | def get_associated_bonding_curve_address( 38 | mint: Pubkey, program_id: Pubkey 39 | ) -> tuple[Pubkey, int]: 40 | """ 41 | Derives the associated bonding curve address for a given mint 42 | """ 43 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id) 44 | 45 | 46 | async def get_bonding_curve_state( 47 | conn: AsyncClient, curve_address: Pubkey 48 | ) -> BondingCurveState: 49 | response = await conn.get_account_info(curve_address, encoding="base64") 50 | if not response.value or not response.value.data: 51 | raise ValueError("Invalid curve state: No data") 52 | 53 | data = response.value.data 54 | if data[:8] != EXPECTED_DISCRIMINATOR: 55 | raise ValueError("Invalid curve state discriminator") 56 | 57 | return BondingCurveState(data) 58 | 59 | 60 | async def check_token_status(mint_address: str) -> None: 61 | try: 62 | mint = Pubkey.from_string(mint_address) 63 | 64 | # Get the associated bonding curve address 65 | bonding_curve_address, bump = get_associated_bonding_curve_address( 66 | mint, PumpAddresses.PROGRAM 67 | ) 68 | 69 | print("\nToken Status:") 70 | print("-" * 50) 71 | print(f"Token Mint: {mint}") 72 | print(f"Associated Bonding Curve: {bonding_curve_address}") 73 | print(f"Bump Seed: {bump}") 74 | print("-" * 50) 75 | 76 | # Check completion status 77 | async with AsyncClient(RPC_ENDPOINT) as client: 78 | try: 79 | curve_state = await get_bonding_curve_state( 80 | client, bonding_curve_address 81 | ) 82 | 83 | print("\nBonding Curve Status:") 84 | print("-" * 50) 85 | print( 86 | f"Completion Status: {'Completed' if curve_state.complete else 'Not Completed'}" 87 | ) 88 | if curve_state.complete: 89 | print( 90 | "\nNote: This bonding curve has completed and liquidity has been migrated to Raydium." 91 | ) 92 | print("-" * 50) 93 | 94 | except ValueError as e: 95 | print(f"\nError accessing bonding curve: {e}") 96 | 97 | except ValueError as e: 98 | print(f"\nError: Invalid address format - {e}") 99 | except Exception as e: 100 | print(f"\nUnexpected error: {e}") 101 | 102 | 103 | def main(): 104 | parser = argparse.ArgumentParser(description="Check token bonding curve status") 105 | parser.add_argument("mint_address", help="The token mint address") 106 | 107 | args = parser.parse_args() 108 | asyncio.run(check_token_status(args.mint_address)) 109 | 110 | 111 | if __name__ == "__main__": 112 | main() 113 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | trades/* 2 | 3 | .vscode 4 | .pylintrc 5 | .ruff_cache 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | cover/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | .pybuilder/ 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | # For a library or package, you might want to ignore these files since the code is 93 | # intended to run in multiple environments; otherwise, check them in: 94 | # .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | trades/trades.log 170 | -------------------------------------------------------------------------------- /src/cleanup/manager.py: -------------------------------------------------------------------------------- 1 | from solders.pubkey import Pubkey 2 | from spl.token.instructions import BurnParams, CloseAccountParams, burn, close_account 3 | 4 | from config import CLEANUP_FORCE_CLOSE_WITH_BURN 5 | from core.client import SolanaClient 6 | from core.priority_fee.manager import PriorityFeeManager 7 | from core.pubkeys import SystemAddresses 8 | from core.wallet import Wallet 9 | from utils.logger import get_logger 10 | 11 | logger = get_logger(__name__) 12 | 13 | 14 | class AccountCleanupManager: 15 | """Handles safe cleanup of token accounts (ATA) after trading sessions.""" 16 | def __init__( 17 | self, 18 | client: SolanaClient, 19 | wallet: Wallet, 20 | priority_fee_manager: PriorityFeeManager, 21 | use_priority_fee: bool = False, 22 | ): 23 | """ 24 | Args: 25 | client: Solana RPC client 26 | wallet: Wallet for signing transactions 27 | """ 28 | self.client = client 29 | self.wallet = wallet 30 | self.priority_fee_manager = priority_fee_manager 31 | self.use_priority_fee = use_priority_fee 32 | 33 | async def cleanup_ata(self, mint: Pubkey) -> None: 34 | """ 35 | Attempt to burn any remaining tokens and close the ATA. 36 | Skips if account doesn't exist or is already empty/closed. 37 | """ 38 | ata = self.wallet.get_associated_token_address(mint) 39 | solana_client = await self.client.get_client() 40 | 41 | priority_fee = ( 42 | await self.priority_fee_manager.calculate_priority_fee([ata]) 43 | if self.use_priority_fee 44 | else None 45 | ) 46 | 47 | try: 48 | info = await solana_client.get_account_info(ata, encoding="base64") 49 | if not info.value: 50 | logger.info(f"ATA {ata} does not exist or already closed.") 51 | return 52 | 53 | balance = await self.client.get_token_account_balance(ata) 54 | instructions = [] 55 | 56 | if balance > 0 and CLEANUP_FORCE_CLOSE_WITH_BURN: 57 | logger.info(f"Burning {balance} tokens from ATA {ata} (mint: {mint})...") 58 | burn_ix = burn( 59 | BurnParams( 60 | account=ata, 61 | mint=mint, 62 | owner=self.wallet.pubkey, 63 | amount=balance, 64 | program_id=SystemAddresses.TOKEN_PROGRAM, 65 | ) 66 | ) 67 | instructions.append(burn_ix) 68 | 69 | elif balance > 0: 70 | logger.info( 71 | f"Skipping ATA {ata} with non-zero balance ({balance} tokens) " 72 | f"because CLEANUP_FORCE_CLOSE_WITH_BURN is disabled." 73 | ) 74 | return 75 | 76 | # Include close account instruction 77 | logger.info(f"Closing ATA: {ata}") 78 | close_ix = close_account( 79 | CloseAccountParams( 80 | account=ata, 81 | dest=self.wallet.pubkey, 82 | owner=self.wallet.pubkey, 83 | program_id=SystemAddresses.TOKEN_PROGRAM, 84 | ) 85 | ) 86 | instructions.append(close_ix) 87 | 88 | # Send both burn and close instructions in the same transaction 89 | if instructions: 90 | tx_sig = await self.client.build_and_send_transaction( 91 | instructions, 92 | self.wallet.keypair, 93 | skip_preflight=True, 94 | priority_fee=priority_fee, 95 | ) 96 | await self.client.confirm_transaction(tx_sig) 97 | logger.info(f"Closed successfully: {ata}") 98 | 99 | except Exception as e: 100 | logger.warning(f"Cleanup failed for ATA {ata}: {e!s}") 101 | -------------------------------------------------------------------------------- /src/cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Command-line interface for the pump.fun trading bot. 4 | """ 5 | 6 | import argparse 7 | import asyncio 8 | import os 9 | import sys 10 | 11 | import config 12 | from trading.trader import PumpTrader 13 | from utils.logger import get_logger, setup_file_logging 14 | 15 | logger = get_logger(__name__) 16 | 17 | 18 | def parse_args() -> argparse.Namespace: 19 | """Parse command line arguments. 20 | 21 | Returns: 22 | Parsed arguments 23 | """ 24 | parser = argparse.ArgumentParser(description="Trade tokens on pump.fun.") 25 | parser.add_argument( 26 | "--yolo", action="store_true", help="Run in YOLO mode (continuous trading)" 27 | ) 28 | parser.add_argument( 29 | "--match", 30 | type=str, 31 | help="Only trade tokens with names or symbols matching this string", 32 | ) 33 | parser.add_argument( 34 | "--bro", type=str, help="Only trade tokens created by this user address" 35 | ) 36 | parser.add_argument( 37 | "--marry", action="store_true", help="Only buy tokens, skip selling" 38 | ) 39 | parser.add_argument( 40 | "--amount", 41 | type=float, 42 | help=f"Amount of SOL to spend on each buy (default: {config.BUY_AMOUNT})", 43 | ) 44 | parser.add_argument( 45 | "--buy-slippage", 46 | type=float, 47 | help=f"Buy slippage tolerance (default: {config.BUY_SLIPPAGE})", 48 | ) 49 | parser.add_argument( 50 | "--sell-slippage", 51 | type=float, 52 | help=f"Sell slippage tolerance (default: {config.SELL_SLIPPAGE})", 53 | ) 54 | 55 | return parser.parse_args() 56 | 57 | 58 | async def main() -> None: 59 | """Main entry point for the CLI.""" 60 | setup_file_logging("pump_trading.log") 61 | 62 | args = parse_args() 63 | 64 | # Get configuration values, preferring command line args over config.py 65 | rpc_endpoint: str | None = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 66 | wss_endpoint: str | None = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 67 | private_key: str | None = os.environ.get("SOLANA_PRIVATE_KEY") 68 | 69 | # Validate configuration values 70 | if not rpc_endpoint or not rpc_endpoint.startswith(("http://", "https://")): 71 | logger.error("Invalid RPC endpoint. Must start with http:// or https://") 72 | sys.exit(1) 73 | 74 | if not wss_endpoint or not wss_endpoint.startswith(("ws://", "wss://")): 75 | logger.error("Invalid WebSocket endpoint. Must start with ws:// or wss://") 76 | sys.exit(1) 77 | 78 | if not private_key or len(private_key) < 80: 79 | logger.error("Invalid private key. Key appears to be missing or too short") 80 | sys.exit(1) 81 | 82 | # Get trading parameters 83 | buy_amount: float = args.amount if args.amount is not None else config.BUY_AMOUNT 84 | buy_slippage: float = ( 85 | args.buy_slippage if args.buy_slippage is not None else config.BUY_SLIPPAGE 86 | ) 87 | sell_slippage: float = ( 88 | args.sell_slippage if args.sell_slippage is not None else config.SELL_SLIPPAGE 89 | ) 90 | 91 | trader: PumpTrader = PumpTrader( 92 | rpc_endpoint=rpc_endpoint, # type: ignore 93 | wss_endpoint=wss_endpoint, # type: ignore 94 | private_key=private_key, 95 | buy_amount=buy_amount, 96 | buy_slippage=buy_slippage, 97 | sell_slippage=sell_slippage, 98 | max_retries=config.MAX_RETRIES, 99 | listener_type=config.LISTENER_TYPE, 100 | ) 101 | 102 | try: 103 | await trader.start( 104 | match_string=args.match, 105 | bro_address=args.bro, 106 | marry_mode=args.marry, 107 | yolo_mode=args.yolo, 108 | ) 109 | except KeyboardInterrupt: 110 | logger.info("Trading stopped by user") 111 | except Exception as e: 112 | logger.error(f"Trading stopped due to error: {e!s}") 113 | finally: 114 | try: 115 | await trader.solana_client.close() 116 | except Exception: 117 | pass 118 | 119 | 120 | def sync_main() -> None: 121 | asyncio.run(main()) 122 | 123 | 124 | if __name__ == "__main__": 125 | asyncio.run(main()) 126 | -------------------------------------------------------------------------------- /src/core/curve.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bonding curve operations for pump.fun tokens. 3 | """ 4 | 5 | import struct 6 | from typing import Final 7 | 8 | from construct import Flag, Int64ul, Struct 9 | from solders.pubkey import Pubkey 10 | 11 | from core.client import SolanaClient 12 | from core.pubkeys import LAMPORTS_PER_SOL, TOKEN_DECIMALS 13 | from utils.logger import get_logger 14 | 15 | logger = get_logger(__name__) 16 | 17 | # Discriminator for the bonding curve account 18 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 34 | """Parse bonding curve data. 35 | 36 | Args: 37 | data: Raw account data 38 | 39 | Raises: 40 | ValueError: If data cannot be parsed 41 | """ 42 | if data[:8] != EXPECTED_DISCRIMINATOR: 43 | raise ValueError("Invalid curve state discriminator") 44 | 45 | parsed = self._STRUCT.parse(data[8:]) 46 | self.__dict__.update(parsed) 47 | 48 | def calculate_price(self) -> float: 49 | """Calculate token price in SOL. 50 | 51 | Returns: 52 | Token price in SOL 53 | 54 | Raises: 55 | ValueError: If reserve state is invalid 56 | """ 57 | if self.virtual_token_reserves <= 0 or self.virtual_sol_reserves <= 0: 58 | raise ValueError("Invalid reserve state") 59 | 60 | return (self.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 61 | self.virtual_token_reserves / 10**TOKEN_DECIMALS 62 | ) 63 | 64 | @property 65 | def token_reserves(self) -> float: 66 | """Get token reserves in decimal form.""" 67 | return self.virtual_token_reserves / 10**TOKEN_DECIMALS 68 | 69 | @property 70 | def sol_reserves(self) -> float: 71 | """Get SOL reserves in decimal form.""" 72 | return self.virtual_sol_reserves / LAMPORTS_PER_SOL 73 | 74 | 75 | class BondingCurveManager: 76 | """Manager for bonding curve operations.""" 77 | 78 | def __init__(self, client: SolanaClient): 79 | """Initialize with Solana client. 80 | 81 | Args: 82 | client: Solana client for RPC calls 83 | """ 84 | self.client = client 85 | 86 | async def get_curve_state(self, curve_address: Pubkey) -> BondingCurveState: 87 | """Get the state of a bonding curve. 88 | 89 | Args: 90 | curve_address: Address of the bonding curve account 91 | 92 | Returns: 93 | Bonding curve state 94 | 95 | Raises: 96 | ValueError: If curve data is invalid 97 | """ 98 | try: 99 | account = await self.client.get_account_info(curve_address) 100 | if not account.data: 101 | raise ValueError(f"No data in bonding curve account {curve_address}") 102 | 103 | return BondingCurveState(account.data) 104 | 105 | except Exception as e: 106 | logger.error(f"Failed to get curve state: {str(e)}") 107 | raise ValueError(f"Invalid curve state: {str(e)}") 108 | 109 | async def calculate_price(self, curve_address: Pubkey) -> float: 110 | """Calculate the current price of a token. 111 | 112 | Args: 113 | curve_address: Address of the bonding curve account 114 | 115 | Returns: 116 | Token price in SOL 117 | """ 118 | curve_state = await self.get_curve_state(curve_address) 119 | return curve_state.calculate_price() 120 | 121 | async def calculate_expected_tokens( 122 | self, curve_address: Pubkey, sol_amount: float 123 | ) -> float: 124 | """Calculate the expected token amount for a given SOL input. 125 | 126 | Args: 127 | curve_address: Address of the bonding curve account 128 | sol_amount: Amount of SOL to spend 129 | 130 | Returns: 131 | Expected token amount 132 | """ 133 | curve_state = await self.get_curve_state(curve_address) 134 | price = curve_state.calculate_price() 135 | return sol_amount / price 136 | -------------------------------------------------------------------------------- /learning-examples/listen_to_raydium_migration.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import os 4 | import sys 5 | 6 | import websockets 7 | 8 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 9 | 10 | from core.pubkeys import PumpAddresses 11 | 12 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 13 | 14 | 15 | def process_initialize2_transaction(data): 16 | """Process and decode an initialize2 transaction""" 17 | try: 18 | signature = data["transaction"]["signatures"][0] 19 | account_keys = data["transaction"]["message"]["accountKeys"] 20 | 21 | # Check raydium_amm_idl.json for the account keys 22 | # The token address is typically the 19th account (index 18) 23 | # The liquidity pool address is typically the 3rd account (index 2) 24 | if len(account_keys) > 18: 25 | token_address = account_keys[18] 26 | liquidity_address = account_keys[2] 27 | 28 | print(f"\nSignature: {signature}") 29 | print(f"Token Address: {token_address}") 30 | print(f"Liquidity Address: {liquidity_address}") 31 | print("=" * 50) 32 | else: 33 | print(f"\nError: Not enough account keys (found {len(account_keys)})") 34 | 35 | except Exception as e: 36 | print(f"\nError: {e!s}") 37 | 38 | 39 | async def listen_for_events(): 40 | while True: 41 | try: 42 | async with websockets.connect(WSS_ENDPOINT) as websocket: 43 | subscription_message = json.dumps( 44 | { 45 | "jsonrpc": "2.0", 46 | "id": 1, 47 | "method": "blockSubscribe", 48 | "params": [ 49 | { 50 | "mentionsAccountOrProgram": str( 51 | PumpAddresses.LIQUIDITY_MIGRATOR 52 | ) 53 | }, 54 | { 55 | "commitment": "confirmed", 56 | "encoding": "json", 57 | "showRewards": False, 58 | "transactionDetails": "full", 59 | "maxSupportedTransactionVersion": 0, 60 | }, 61 | ], 62 | } 63 | ) 64 | 65 | await websocket.send(subscription_message) 66 | response = await websocket.recv() 67 | print(f"Subscription response: {response}") 68 | print("\nListening for Raydium pool initialization events...") 69 | 70 | while True: 71 | try: 72 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 73 | data = json.loads(response) 74 | 75 | if "method" in data and data["method"] == "blockNotification": 76 | if "params" in data and "result" in data["params"]: 77 | block_data = data["params"]["result"] 78 | if ( 79 | "value" in block_data 80 | and "block" in block_data["value"] 81 | ): 82 | block = block_data["value"]["block"] 83 | if "transactions" in block: 84 | for tx in block["transactions"]: 85 | logs = tx.get("meta", {}).get( 86 | "logMessages", [] 87 | ) 88 | 89 | # Check for initialize2 instruction 90 | for log in logs: 91 | if ( 92 | "Program log: initialize2: InitializeInstruction2" 93 | in log 94 | ): 95 | print( 96 | "Found initialize2 instruction!" 97 | ) 98 | process_initialize2_transaction(tx) 99 | break 100 | 101 | except TimeoutError: 102 | print("\nChecking connection...") 103 | print("Connection alive") 104 | continue 105 | 106 | except Exception as e: 107 | print(f"\nConnection error: {e!s}") 108 | print("Retrying in 5 seconds...") 109 | await asyncio.sleep(5) 110 | 111 | 112 | if __name__ == "__main__": 113 | asyncio.run(listen_for_events()) 114 | -------------------------------------------------------------------------------- /src/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Configuration for the pump.fun trading bot. 3 | 4 | This file defines comprehensive parameters and settings for the trading bot. 5 | Carefully review and adjust values to match your trading strategy and risk tolerance. 6 | """ 7 | 8 | # Trading parameters 9 | # Control trade execution: amount of SOL per trade and acceptable price deviation 10 | BUY_AMOUNT: int | float = 0.000_001 # Amount of SOL to spend when buying 11 | BUY_SLIPPAGE: float = 0.4 # Maximum acceptable price deviation (0.4 = 40%) 12 | SELL_SLIPPAGE: float = 0.4 # Consistent slippage tolerance to maintain trading strategy 13 | 14 | 15 | # EXTREME FAST Mode configuration 16 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 17 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 18 | EXTREME_FAST_MODE: bool = False 19 | # Amount of tokens to buy in EXTREME FAST mode. No price calculation is done; the bot buys exactly this amount. 20 | EXTREME_FAST_TOKEN_AMOUNT: int = 30 21 | 22 | 23 | # Priority fee configuration 24 | # Manage transaction speed and cost on the Solana network 25 | ENABLE_DYNAMIC_PRIORITY_FEE: bool = False # Adaptive fee calculation 26 | ENABLE_FIXED_PRIORITY_FEE: bool = True # Use consistent, predictable fee 27 | FIXED_PRIORITY_FEE: int = 2_000 # Base fee in microlamports 28 | EXTRA_PRIORITY_FEE: float = 0.0 # Percentage increase on priority fee (0.1 = 10%) 29 | HARD_CAP_PRIOR_FEE: int = 200_000 # Maximum allowable fee to prevent excessive spending in microlamports 30 | 31 | 32 | # Listener configuration 33 | # Choose method for detecting new tokens on the network 34 | # "logs": Recommended for more stable token detection 35 | # "blocks": Unstable method, potentially less reliable 36 | LISTENER_TYPE = "logs" 37 | 38 | 39 | # Retry and timeout settings 40 | # Control bot resilience and transaction handling 41 | MAX_RETRIES: int = 10 # Number of attempts for transaction submission 42 | 43 | # Waiting periods in seconds between actions (TODO: to be replaced with retry mechanism) 44 | WAIT_TIME_AFTER_CREATION: int | float = 15 # Seconds to wait after token creation 45 | WAIT_TIME_AFTER_BUY: int | float = 15 # Holding period after buy transaction 46 | WAIT_TIME_BEFORE_NEW_TOKEN: int | float = 15 # Pause between token trades 47 | 48 | 49 | # Token and account management 50 | # Control token processing and account cleanup strategies 51 | MAX_TOKEN_AGE: int | float = 0.1 # Maximum token age in seconds for processing 52 | 53 | # Cleanup mode determines when to manage token accounts. Options: 54 | # "disabled": No cleanup will occur. 55 | # "on_fail": Only clean up if a buy transaction fails. 56 | # "after_sell": Clean up after selling, but only if the balance is zero. 57 | # "post_session": Clean up all empty accounts after a trading session ends. 58 | CLEANUP_MODE: str = "disabled" 59 | CLEANUP_FORCE_CLOSE_WITH_BURN: bool = False # Burn remaining tokens before closing account, else skip ATA with non-zero balances 60 | CLEANUP_WITH_PRIORITY_FEE: bool = False # Use priority fees for cleanup transactions 61 | 62 | 63 | # Node provider configuration (TODO: to be implemented) 64 | # Manage RPC node interaction to prevent rate limiting 65 | MAX_RPS: int = 25 # Maximum requests per second 66 | 67 | 68 | def validate_configuration() -> None: 69 | """ 70 | Comprehensive validation of bot configuration. 71 | 72 | Checks: 73 | - Type correctness 74 | - Value ranges 75 | - Logical consistency of settings 76 | """ 77 | # Configuration validation checks 78 | config_checks = [ 79 | # (value, type, min_value, max_value, error_message) 80 | (BUY_AMOUNT, (int, float), 0, float('inf'), "BUY_AMOUNT must be a positive number"), 81 | (BUY_SLIPPAGE, float, 0, 1, "BUY_SLIPPAGE must be between 0 and 1"), 82 | (SELL_SLIPPAGE, float, 0, 1, "SELL_SLIPPAGE must be between 0 and 1"), 83 | (FIXED_PRIORITY_FEE, int, 0, float('inf'), "FIXED_PRIORITY_FEE must be a non-negative integer"), 84 | (EXTRA_PRIORITY_FEE, float, 0, 1, "EXTRA_PRIORITY_FEE must be between 0 and 1"), 85 | (HARD_CAP_PRIOR_FEE, int, 0, float('inf'), "HARD_CAP_PRIOR_FEE must be a non-negative integer"), 86 | (MAX_RETRIES, int, 0, 100, "MAX_RETRIES must be between 0 and 100") 87 | ] 88 | 89 | for value, expected_type, min_val, max_val, error_msg in config_checks: 90 | if not isinstance(value, expected_type): 91 | raise ValueError(f"Type error: {error_msg}") 92 | 93 | if isinstance(value, (int, float)) and not (min_val <= value <= max_val): 94 | raise ValueError(f"Range error: {error_msg}") 95 | 96 | # Logical consistency checks 97 | if ENABLE_DYNAMIC_PRIORITY_FEE and ENABLE_FIXED_PRIORITY_FEE: 98 | raise ValueError("Cannot enable both dynamic and fixed priority fees simultaneously") 99 | 100 | # Validate listener type 101 | if LISTENER_TYPE not in ["logs", "blocks"]: 102 | raise ValueError("LISTENER_TYPE must be either 'logs' or 'blocks'") 103 | 104 | # Validate cleanup mode 105 | valid_cleanup_modes = ["disabled", "on_fail", "after_sell", "post_session"] 106 | if CLEANUP_MODE not in valid_cleanup_modes: 107 | raise ValueError(f"CLEANUP_MODE must be one of {valid_cleanup_modes}") 108 | 109 | 110 | # Validate configuration on import 111 | validate_configuration() -------------------------------------------------------------------------------- /tests/compare_listeners.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test script to compare BlockListener and LogsListener 3 | Runs both listeners simultaneously to compare their performance 4 | """ 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | import sys 10 | import time 11 | from pathlib import Path 12 | 13 | sys.path.append(str(Path(__file__).parent.parent / "src")) 14 | 15 | from core.pubkeys import PumpAddresses 16 | from monitoring.block_listener import BlockListener 17 | from monitoring.logs_listener import LogsListener 18 | from trading.base import TokenInfo 19 | 20 | logging.basicConfig( 21 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 22 | ) 23 | logger = logging.getLogger("listener-comparison") 24 | 25 | 26 | class TimingTokenCallback: 27 | def __init__(self, name: str): 28 | self.name = name 29 | self.detected_tokens = [] 30 | self.detection_times = {} 31 | 32 | async def on_token_created(self, token_info: TokenInfo) -> None: 33 | """Process detected token with timing information""" 34 | token_key = str(token_info.mint) 35 | detection_time = time.time() 36 | 37 | self.detected_tokens.append(token_info) 38 | self.detection_times[token_key] = detection_time 39 | 40 | logger.info(f"[{self.name}] Detected: {token_info.name} ({token_info.symbol})") 41 | print(f"\n{'=' * 50}") 42 | print(f"[{self.name}] NEW TOKEN: {token_info.name}") 43 | print(f"Symbol: {token_info.symbol}") 44 | print(f"Mint: {token_info.mint}") 45 | print(f"Detection time: {detection_time}") 46 | print(f"{'=' * 50}\n") 47 | 48 | 49 | async def run_comparison(test_duration: int = 300): 50 | """Run both listeners and compare their performance""" 51 | wss_endpoint = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 52 | if not wss_endpoint: 53 | logger.error("SOLANA_NODE_WSS_ENDPOINT environment variable is not set") 54 | return 55 | 56 | logger.info(f"Connecting to WebSocket: {wss_endpoint}") 57 | 58 | block_listener = BlockListener(wss_endpoint, PumpAddresses.PROGRAM) 59 | logs_listener = LogsListener(wss_endpoint, PumpAddresses.PROGRAM) 60 | 61 | block_callback = TimingTokenCallback("BlockListener") 62 | logs_callback = TimingTokenCallback("LogsListener") 63 | 64 | logger.info("Starting both listeners...") 65 | block_task = asyncio.create_task( 66 | block_listener.listen_for_tokens(block_callback.on_token_created) 67 | ) 68 | logs_task = asyncio.create_task( 69 | logs_listener.listen_for_tokens(logs_callback.on_token_created) 70 | ) 71 | 72 | logger.info(f"Comparison running for {test_duration} seconds...") 73 | try: 74 | await asyncio.sleep(test_duration) 75 | except KeyboardInterrupt: 76 | logger.info("Test interrupted by user") 77 | finally: 78 | block_task.cancel() 79 | logs_task.cancel() 80 | try: 81 | await asyncio.gather(block_task, logs_task, return_exceptions=True) 82 | except asyncio.CancelledError: 83 | pass 84 | 85 | logger.info(f"BlockListener detected {len(block_callback.detected_tokens)} tokens") 86 | logger.info(f"LogsListener detected {len(logs_callback.detected_tokens)} tokens") 87 | 88 | # Find tokens detected by both listeners 89 | block_mints = {str(token.mint) for token in block_callback.detected_tokens} 90 | logs_mints = {str(token.mint) for token in logs_callback.detected_tokens} 91 | common_mints = block_mints.intersection(logs_mints) 92 | 93 | logger.info(f"Tokens detected by both listeners: {len(common_mints)}") 94 | 95 | # Compare detection times for common tokens 96 | if common_mints: 97 | logger.info("\nPerformance comparison for tokens detected by both listeners:") 98 | logger.info("Token Mint | BlockListener Time | LogsListener Time | Difference (ms)") 99 | logger.info("-" * 80) 100 | 101 | for mint in common_mints: 102 | block_time = block_callback.detection_times.get(mint) 103 | logs_time = logs_callback.detection_times.get(mint) 104 | 105 | if block_time and logs_time: 106 | diff_ms = abs(block_time - logs_time) * 1000 # Convert to milliseconds 107 | faster = "BlockListener" if block_time < logs_time else "LogsListener" 108 | 109 | logger.info(f"{mint[:10]}... | {block_time:.6f} | {logs_time:.6f} | {diff_ms:.2f}ms ({faster} faster)") 110 | 111 | # Report tokens only detected by one listener 112 | block_only = block_mints - logs_mints 113 | logs_only = logs_mints - block_mints 114 | 115 | if block_only: 116 | logger.info(f"\nTokens only detected by BlockListener: {len(block_only)}") 117 | for mint in block_only: 118 | logger.info(f" - {mint}") 119 | 120 | if logs_only: 121 | logger.info(f"\nTokens only detected by LogsListener: {len(logs_only)}") 122 | for mint in logs_only: 123 | logger.info(f" - {mint}") 124 | 125 | 126 | if __name__ == "__main__": 127 | test_duration = 30 # seconds 128 | 129 | if len(sys.argv) > 1: 130 | try: 131 | test_duration = int(sys.argv[1]) 132 | except ValueError: 133 | logger.error(f"Invalid test duration: {sys.argv[1]}. Using default of {test_duration} seconds.") 134 | 135 | logger.info("Starting listener comparison test") 136 | logger.info(f"Will run for {test_duration} seconds") 137 | asyncio.run(run_comparison(test_duration)) 138 | -------------------------------------------------------------------------------- /src/monitoring/logs_event_processor.py: -------------------------------------------------------------------------------- 1 | """ 2 | Event processing for pump.fun tokens using logsSubscribe data. 3 | """ 4 | 5 | import base64 6 | import struct 7 | from typing import Final 8 | 9 | import base58 10 | from solders.pubkey import Pubkey 11 | 12 | from core.pubkeys import SystemAddresses 13 | from trading.base import TokenInfo 14 | from utils.logger import get_logger 15 | 16 | logger = get_logger(__name__) 17 | 18 | 19 | class LogsEventProcessor: 20 | """Processes events from pump.fun program logs.""" 21 | 22 | # Discriminator for create instruction to avoid non-create transactions 23 | CREATE_DISCRIMINATOR: Final[int] = 8530921459188068891 24 | 25 | def __init__(self, pump_program: Pubkey): 26 | """Initialize event processor. 27 | 28 | Args: 29 | pump_program: Pump.fun program address 30 | """ 31 | self.pump_program = pump_program 32 | 33 | def process_program_logs(self, logs: list[str], signature: str) -> TokenInfo | None: 34 | """Process program logs and extract token info. 35 | 36 | Args: 37 | logs: List of log strings from the notification 38 | signature: Transaction signature 39 | 40 | Returns: 41 | TokenInfo if a token creation is found, None otherwise 42 | """ 43 | # Check if this is a token creation 44 | if not any("Program log: Instruction: Create" in log for log in logs): 45 | return None 46 | 47 | # Skip swaps as the first condition may pass them 48 | if any("Program log: Instruction: CreateTokenAccount" in log for log in logs): 49 | return None 50 | 51 | # Find and process program data 52 | for log in logs: 53 | if "Program data:" in log: 54 | try: 55 | encoded_data = log.split(": ")[1] 56 | decoded_data = base64.b64decode(encoded_data) 57 | parsed_data = self._parse_create_instruction(decoded_data) 58 | 59 | if parsed_data and "name" in parsed_data: 60 | mint = Pubkey.from_string(parsed_data["mint"]) 61 | bonding_curve = Pubkey.from_string(parsed_data["bondingCurve"]) 62 | associated_curve = self._find_associated_bonding_curve( 63 | mint, bonding_curve 64 | ) 65 | 66 | return TokenInfo( 67 | name=parsed_data["name"], 68 | symbol=parsed_data["symbol"], 69 | uri=parsed_data["uri"], 70 | mint=mint, 71 | bonding_curve=bonding_curve, 72 | associated_bonding_curve=associated_curve, 73 | user=Pubkey.from_string(parsed_data["user"]), 74 | ) 75 | except Exception as e: 76 | logger.error(f"Failed to process log data: {e}") 77 | 78 | return None 79 | 80 | def _parse_create_instruction(self, data: bytes) -> dict | None: 81 | """Parse the create instruction data. 82 | 83 | Args: 84 | data: Raw instruction data 85 | 86 | Returns: 87 | Dictionary of parsed data or None if parsing fails 88 | """ 89 | if len(data) < 8: 90 | return None 91 | 92 | # Check for the correct instruction discriminator 93 | discriminator = struct.unpack(" Pubkey: 132 | """ 133 | Find the associated bonding curve for a given mint and bonding curve. 134 | This uses the standard ATA derivation. 135 | 136 | Args: 137 | mint: Token mint address 138 | bonding_curve: Bonding curve address 139 | 140 | Returns: 141 | Associated bonding curve address 142 | """ 143 | derived_address, _ = Pubkey.find_program_address( 144 | [ 145 | bytes(bonding_curve), 146 | bytes(SystemAddresses.TOKEN_PROGRAM), 147 | bytes(mint), 148 | ], 149 | SystemAddresses.ASSOCIATED_TOKEN_PROGRAM, 150 | ) 151 | return derived_address 152 | -------------------------------------------------------------------------------- /learning-examples/listen_new_direct.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | import struct 6 | import sys 7 | 8 | import base58 9 | import websockets 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 12 | from core.pubkeys import PumpAddresses 13 | 14 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 15 | 16 | 17 | # Load the IDL JSON file 18 | with open("idl/pump_fun_idl.json") as f: 19 | idl = json.load(f) 20 | 21 | # Extract the "create" instruction definition 22 | create_instruction = next( 23 | instr for instr in idl["instructions"] if instr["name"] == "create" 24 | ) 25 | 26 | 27 | def parse_create_instruction(data): 28 | if len(data) < 8: 29 | return None 30 | offset = 8 31 | parsed_data = {} 32 | 33 | # Parse fields based on CreateEvent structure 34 | fields = [ 35 | ("name", "string"), 36 | ("symbol", "string"), 37 | ("uri", "string"), 38 | ("mint", "publicKey"), 39 | ("bondingCurve", "publicKey"), 40 | ("user", "publicKey"), 41 | ] 42 | 43 | try: 44 | for field_name, field_type in fields: 45 | if field_type == "string": 46 | length = struct.unpack(" dict[str, Any]: 35 | """Load IDL from file. 36 | 37 | Returns: 38 | IDL as dictionary 39 | """ 40 | try: 41 | with open("idl/pump_fun_idl.json") as f: 42 | return json.load(f) 43 | except Exception as e: 44 | logger.error(f"Failed to load IDL: {str(e)}") 45 | # Create a minimal IDL with just what we need 46 | return { 47 | "instructions": [ 48 | { 49 | "name": "create", 50 | "args": [ 51 | {"name": "name", "type": "string"}, 52 | {"name": "symbol", "type": "string"}, 53 | {"name": "uri", "type": "string"}, 54 | ], 55 | } 56 | ] 57 | } 58 | 59 | def process_transaction(self, tx_data: str) -> TokenInfo | None: 60 | """Process a transaction and extract token info. 61 | 62 | Args: 63 | tx_data: Base64 encoded transaction data 64 | 65 | Returns: 66 | TokenInfo if a token creation is found, None otherwise 67 | """ 68 | try: 69 | tx_data_decoded = base64.b64decode(tx_data) 70 | transaction = VersionedTransaction.from_bytes(tx_data_decoded) 71 | 72 | for ix in transaction.message.instructions: 73 | # Check if instruction is from pump.fun program 74 | program_id_index = ix.program_id_index 75 | if program_id_index >= len(transaction.message.account_keys): 76 | continue 77 | 78 | program_id = transaction.message.account_keys[program_id_index] 79 | 80 | if str(program_id) != str(self.pump_program): 81 | continue 82 | 83 | ix_data = bytes(ix.data) 84 | 85 | # Check if it's a create instruction 86 | if len(ix_data) < 8: 87 | continue 88 | 89 | discriminator = struct.unpack(" dict[str, Any]: 135 | """Decode create instruction data. 136 | 137 | Args: 138 | ix_data: Instruction data bytes 139 | ix_def: Instruction definition from IDL 140 | accounts: List of account pubkeys 141 | 142 | Returns: 143 | Decoded instruction arguments 144 | """ 145 | args = {} 146 | offset = 8 # Skip 8-byte discriminator 147 | 148 | for arg in ix_def["args"]: 149 | if arg["type"] == "string": 150 | length = struct.unpack_from(" If Python is already installed, `uv` will detect and use it automatically. 31 | 32 | ### Installation 33 | 34 | #### 1️⃣ Install Python (if needed) 35 | ```bash 36 | uv python install 37 | ``` 38 | > **Why?** `uv` will fetch and install the required Python version for your system. 39 | 40 | #### 2️⃣ Clone the repository 41 | ```bash 42 | git clone https://github.com/chainstacklabs/pump-fun-bot.git 43 | cd pump-fun-bot 44 | ``` 45 | 46 | #### 3️⃣ Set up a virtual environment 47 | ```bash 48 | # Create virtual environment 49 | uv venv 50 | 51 | # Activate (Unix/macOS) 52 | source .venv/bin/activate 53 | 54 | # Activate (Windows) 55 | .venv\Scripts\activate 56 | ``` 57 | > Virtual environments help keep dependencies isolated and prevent conflicts. 58 | 59 | #### 4️⃣ Install dependencies 60 | ```bash 61 | uv pip install -e . 62 | ``` 63 | > **Why `-e` (editable mode)?** Lets you modify the code without reinstalling the package—useful for development! 64 | 65 | #### 5️⃣ Configure the bot 66 | ```bash 67 | # Copy example config 68 | cp .env.example .env # Unix/macOS 69 | 70 | # Windows 71 | copy .env.example .env 72 | ``` 73 | Edit the `.env` file and add your **Solana RPC endpoints** and **private key**. 74 | 75 | ### Running the bot 76 | 77 | ```bash 78 | # Option 1: run as installed package 79 | pump_bot --help 80 | 81 | # Option 2: run directly 82 | python -m src.cli --help 83 | ``` 84 | 85 | > **You're all set! 🎉** Now you can start using the bot. Check `--help` for available commands. 🚀 86 | 87 | ## Bonding curve state check 88 | 89 | `check_boding_curve_status.py` — checks the state of the bonding curve associated with a token. When the bonding curve state is completed, the token is migrated to Raydium. 90 | 91 | To run: 92 | 93 | `python check_boding_curve_status.py TOKEN_ADDRESS` 94 | 95 | ## Listening to the Raydium migration 96 | 97 | When the bonding curve state completes, the liquidity and the token graduate to Raydium. 98 | 99 | `listen_to_raydium_migration.py` — listens to the migration events of the tokens from pump_fun to Raydium and prints the signature of the migration, the token address, and the liquidity pool address on Raydium. 100 | 101 | To run: 102 | 103 | `python listen_to_raydium_migration.py` 104 | 105 | You can take the compute the associatedBondingCurve address following the [Solana docs PDA](https://solana.com/docs/core/pda) description logic. Take the following as input *as seed* (order seems to matter): 106 | 107 | - bondingCurve address 108 | - the Solana system token program address: `TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA` 109 | - the token mint address 110 | 111 | And compute against the Solana system associated token account program address: `ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL`. 112 | 113 | The implications of this are kinda huge: 114 | * you can now use `logsSubscribe` to snipe the tokens and you are not limited to the `blockSubscribe` method 115 | * see which one is faster 116 | * not every provider supports `blockSubscribe` on lower tier plans or at all, but everyone supports `logsSubscribe` 117 | 118 | The following script showcase the implementation. 119 | 120 | ## Compute associated bonding curve 121 | 122 | `compute_associated_bonding_curve.py` — computes the associated bonding curve for a given token. 123 | 124 | To run: 125 | 126 | `python compute_associated_bonding_curve.py` and then enter the token mint address. 127 | 128 | ## Listen to new direct full details 129 | 130 | `listen_new_direct_full_details.py` — listens to the new direct full details events and prints the signature, the token address, the user, the bonding curve address, and the associated bonding curve address using just the `logsSubscribe` method. Basically everything you need for sniping using just `logsSubscribe` and no extra calls like doing `getTransaction` to get the missing data. It's just computed on the fly now. 131 | 132 | To run: 133 | 134 | `python listen_new_direct_full_details.py` 135 | 136 | So now you can run `listen_create_from_blocksubscribe.py` and `listen_new_direct_full_details.py` at the same time and see which one is faster. 137 | 138 | ## Reference Link 139 | [ChainStack Doc](https://docs.chainstack.com/docs/solana-creating-a-pumpfun-bot) -------------------------------------------------------------------------------- /src/monitoring/logs_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | WebSocket monitoring for pump.fun tokens using logsSubscribe. 3 | """ 4 | 5 | import asyncio 6 | import json 7 | from collections.abc import Awaitable, Callable 8 | 9 | import websockets 10 | from solders.pubkey import Pubkey 11 | 12 | from monitoring.base_listener import BaseTokenListener 13 | from monitoring.logs_event_processor import LogsEventProcessor 14 | from trading.base import TokenInfo 15 | from utils.logger import get_logger 16 | 17 | logger = get_logger(__name__) 18 | 19 | 20 | class LogsListener(BaseTokenListener): 21 | """WebSocket listener for pump.fun token creation events using logsSubscribe.""" 22 | 23 | def __init__(self, wss_endpoint: str, pump_program: Pubkey): 24 | """Initialize token listener. 25 | 26 | Args: 27 | wss_endpoint: WebSocket endpoint URL 28 | pump_program: Pump.fun program address 29 | """ 30 | self.wss_endpoint = wss_endpoint 31 | self.pump_program = pump_program 32 | self.event_processor = LogsEventProcessor(pump_program) 33 | self.ping_interval = 20 # seconds 34 | 35 | async def listen_for_tokens( 36 | self, 37 | token_callback: Callable[[TokenInfo], Awaitable[None]], 38 | match_string: str | None = None, 39 | creator_address: str | None = None, 40 | ) -> None: 41 | """Listen for new token creations using logsSubscribe. 42 | 43 | Args: 44 | token_callback: Callback function for new tokens 45 | match_string: Optional string to match in token name/symbol 46 | creator_address: Optional creator address to filter by 47 | """ 48 | while True: 49 | try: 50 | async with websockets.connect(self.wss_endpoint) as websocket: 51 | await self._subscribe_to_logs(websocket) 52 | ping_task = asyncio.create_task(self._ping_loop(websocket)) 53 | 54 | try: 55 | while True: 56 | token_info = await self._wait_for_token_creation(websocket) 57 | if not token_info: 58 | continue 59 | 60 | logger.info( 61 | f"New token detected: {token_info.name} ({token_info.symbol})" 62 | ) 63 | 64 | if match_string and not ( 65 | match_string.lower() in token_info.name.lower() 66 | or match_string.lower() in token_info.symbol.lower() 67 | ): 68 | logger.info( 69 | f"Token does not match filter '{match_string}'. Skipping..." 70 | ) 71 | continue 72 | 73 | if ( 74 | creator_address 75 | and str(token_info.user) != creator_address 76 | ): 77 | logger.info( 78 | f"Token not created by {creator_address}. Skipping..." 79 | ) 80 | continue 81 | 82 | await token_callback(token_info) 83 | 84 | except websockets.exceptions.ConnectionClosed: 85 | logger.warning("WebSocket connection closed. Reconnecting...") 86 | ping_task.cancel() 87 | 88 | except Exception as e: 89 | logger.error(f"WebSocket connection error: {str(e)}") 90 | logger.info("Reconnecting in 5 seconds...") 91 | await asyncio.sleep(5) 92 | 93 | async def _subscribe_to_logs(self, websocket) -> None: 94 | """Subscribe to logs mentioning the pump.fun program. 95 | 96 | Args: 97 | websocket: Active WebSocket connection 98 | """ 99 | subscription_message = json.dumps( 100 | { 101 | "jsonrpc": "2.0", 102 | "id": 1, 103 | "method": "logsSubscribe", 104 | "params": [ 105 | {"mentions": [str(self.pump_program)]}, 106 | {"commitment": "processed"}, 107 | ], 108 | } 109 | ) 110 | 111 | await websocket.send(subscription_message) 112 | logger.info(f"Subscribed to logs mentioning program: {self.pump_program}") 113 | 114 | # Wait for subscription confirmation 115 | response = await websocket.recv() 116 | response_data = json.loads(response) 117 | if "result" in response_data: 118 | logger.info(f"Subscription confirmed with ID: {response_data['result']}") 119 | else: 120 | logger.warning(f"Unexpected subscription response: {response}") 121 | 122 | async def _ping_loop(self, websocket) -> None: 123 | """Keep connection alive with pings. 124 | 125 | Args: 126 | websocket: Active WebSocket connection 127 | """ 128 | try: 129 | while True: 130 | await asyncio.sleep(self.ping_interval) 131 | try: 132 | pong_waiter = await websocket.ping() 133 | await asyncio.wait_for(pong_waiter, timeout=10) 134 | except asyncio.TimeoutError: 135 | logger.warning("Ping timeout - server not responding") 136 | # Force reconnection 137 | await websocket.close() 138 | return 139 | except asyncio.CancelledError: 140 | pass 141 | except Exception as e: 142 | logger.error(f"Ping error: {str(e)}") 143 | 144 | async def _wait_for_token_creation(self, websocket) -> TokenInfo | None: 145 | try: 146 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 147 | data = json.loads(response) 148 | 149 | if "method" not in data or data["method"] != "logsNotification": 150 | return None 151 | 152 | log_data = data["params"]["result"]["value"] 153 | logs = log_data.get("logs", []) 154 | signature = log_data.get("signature", "unknown") 155 | 156 | # Use the processor to extract token info 157 | return self.event_processor.process_program_logs(logs, signature) 158 | 159 | except asyncio.TimeoutError: 160 | logger.debug("No data received for 30 seconds") 161 | except websockets.exceptions.ConnectionClosed: 162 | logger.warning("WebSocket connection closed") 163 | raise 164 | except Exception as e: 165 | logger.error(f"Error processing WebSocket message: {str(e)}") 166 | 167 | return None 168 | -------------------------------------------------------------------------------- /learning-examples/listen_create_from_blocksubscribe.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | import struct 6 | import sys 7 | 8 | import websockets 9 | from solders.transaction import VersionedTransaction 10 | 11 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 12 | from core.pubkeys import PumpAddresses 13 | 14 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 15 | 16 | 17 | def load_idl(file_path): 18 | with open(file_path) as f: 19 | return json.load(f) 20 | 21 | 22 | def decode_create_instruction(ix_data, ix_def, accounts): 23 | args = {} 24 | offset = 8 # Skip 8-byte discriminator 25 | 26 | for arg in ix_def["args"]: 27 | if arg["type"] == "string": 28 | length = struct.unpack_from(" None: 41 | """Listen for new token creations. 42 | 43 | Args: 44 | token_callback: Callback function for new tokens 45 | match_string: Optional string to match in token name/symbol 46 | creator_address: Optional creator address to filter by 47 | """ 48 | while True: 49 | try: 50 | async with websockets.connect(self.wss_endpoint) as websocket: 51 | await self._subscribe_to_program(websocket) 52 | ping_task = asyncio.create_task(self._ping_loop(websocket)) 53 | 54 | try: 55 | while True: 56 | token_info = await self._wait_for_token_creation(websocket) 57 | if not token_info: 58 | continue 59 | 60 | logger.info( 61 | f"New token detected: {token_info.name} ({token_info.symbol})" 62 | ) 63 | 64 | if match_string and not ( 65 | match_string.lower() in token_info.name.lower() 66 | or match_string.lower() in token_info.symbol.lower() 67 | ): 68 | logger.info( 69 | f"Token does not match filter '{match_string}'. Skipping..." 70 | ) 71 | continue 72 | 73 | if ( 74 | creator_address 75 | and str(token_info.user) != creator_address 76 | ): 77 | logger.info( 78 | f"Token not created by {creator_address}. Skipping..." 79 | ) 80 | continue 81 | 82 | await token_callback(token_info) 83 | 84 | except websockets.exceptions.ConnectionClosed: 85 | logger.warning("WebSocket connection closed. Reconnecting...") 86 | ping_task.cancel() 87 | 88 | except Exception as e: 89 | logger.error(f"WebSocket connection error: {e!s}") 90 | logger.info("Reconnecting in 5 seconds...") 91 | await asyncio.sleep(5) 92 | 93 | async def _subscribe_to_program(self, websocket) -> None: 94 | """Subscribe to blocks mentioning the pump.fun program. 95 | 96 | Args: 97 | websocket: Active WebSocket connection 98 | """ 99 | subscription_message = json.dumps( 100 | { 101 | "jsonrpc": "2.0", 102 | "id": 1, 103 | "method": "blockSubscribe", 104 | "params": [ 105 | {"mentionsAccountOrProgram": str(self.pump_program)}, 106 | { 107 | "commitment": "confirmed", 108 | "encoding": "base64", # base64 is faster than other encoding options 109 | "showRewards": False, 110 | "transactionDetails": "full", 111 | "maxSupportedTransactionVersion": 0, 112 | }, 113 | ], 114 | } 115 | ) 116 | 117 | await websocket.send(subscription_message) 118 | logger.info(f"Subscribed to blocks mentioning program: {self.pump_program}") 119 | 120 | async def _ping_loop(self, websocket) -> None: 121 | """Keep connection alive with pings. 122 | 123 | Args: 124 | websocket: Active WebSocket connection 125 | """ 126 | try: 127 | while True: 128 | await asyncio.sleep(self.ping_interval) 129 | try: 130 | pong_waiter = await websocket.ping() 131 | await asyncio.wait_for(pong_waiter, timeout=10) 132 | except TimeoutError: 133 | logger.warning("Ping timeout - server not responding") 134 | # Force reconnection 135 | await websocket.close() 136 | return 137 | except asyncio.CancelledError: 138 | pass 139 | except Exception as e: 140 | logger.error(f"Ping error: {e!s}") 141 | 142 | async def _wait_for_token_creation(self, websocket) -> TokenInfo | None: 143 | """Wait for token creation event. 144 | 145 | Args: 146 | websocket: Active WebSocket connection 147 | 148 | Returns: 149 | TokenInfo if a token creation is found, None otherwise 150 | """ 151 | try: 152 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 153 | data = json.loads(response) 154 | 155 | if "method" not in data or data["method"] != "blockNotification": 156 | return None 157 | 158 | if "params" not in data or "result" not in data["params"]: 159 | return None 160 | 161 | block_data = data["params"]["result"] 162 | if "value" not in block_data or "block" not in block_data["value"]: 163 | return None 164 | 165 | block = block_data["value"]["block"] 166 | if "transactions" not in block: 167 | return None 168 | 169 | for tx in block["transactions"]: 170 | if not isinstance(tx, dict) or "transaction" not in tx: 171 | continue 172 | 173 | token_info = self.event_processor.process_transaction( 174 | tx["transaction"][0] 175 | ) 176 | if token_info: 177 | return token_info 178 | 179 | except TimeoutError: 180 | logger.debug("No data received for 30 seconds") 181 | except websockets.exceptions.ConnectionClosed: 182 | logger.warning("WebSocket connection closed") 183 | raise 184 | except Exception as e: 185 | logger.error(f"Error processing WebSocket message: {e!s}") 186 | 187 | return None 188 | -------------------------------------------------------------------------------- /learning-examples/listen_new_direct_full_details.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | import struct 6 | import sys 7 | 8 | import base58 9 | import websockets 10 | from solders.pubkey import Pubkey 11 | 12 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 13 | from core.pubkeys import PumpAddresses, SystemAddresses 14 | 15 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 16 | 17 | 18 | def find_associated_bonding_curve(mint: Pubkey, bonding_curve: Pubkey) -> Pubkey: 19 | """ 20 | Find the associated bonding curve for a given mint and bonding curve. 21 | This uses the standard ATA derivation. 22 | """ 23 | derived_address, _ = Pubkey.find_program_address( 24 | [ 25 | bytes(bonding_curve), 26 | bytes(SystemAddresses.TOKEN_PROGRAM), 27 | bytes(mint), 28 | ], 29 | SystemAddresses.ASSOCIATED_TOKEN_PROGRAM, 30 | ) 31 | return derived_address 32 | 33 | 34 | # Load the IDL JSON file 35 | with open("idl/pump_fun_idl.json") as f: 36 | idl = json.load(f) 37 | 38 | # Extract the "create" instruction definition 39 | create_instruction = next( 40 | instr for instr in idl["instructions"] if instr["name"] == "create" 41 | ) 42 | 43 | 44 | def parse_create_instruction(data): 45 | if len(data) < 8: 46 | return None 47 | offset = 8 48 | parsed_data = {} 49 | 50 | # Parse fields based on CreateEvent structure 51 | fields = [ 52 | ("name", "string"), 53 | ("symbol", "string"), 54 | ("uri", "string"), 55 | ("mint", "publicKey"), 56 | ("bondingCurve", "publicKey"), 57 | ("user", "publicKey"), 58 | ] 59 | 60 | try: 61 | for field_name, field_type in fields: 62 | if field_type == "string": 63 | length = struct.unpack(" TradeResult: 59 | """Execute sell operation. 60 | 61 | Args: 62 | token_info: Token information 63 | 64 | Returns: 65 | TradeResult with sell outcome 66 | """ 67 | try: 68 | # Get associated token account 69 | associated_token_account = self.wallet.get_associated_token_address( 70 | token_info.mint 71 | ) 72 | 73 | # Get token balance 74 | token_balance = await self.client.get_token_account_balance( 75 | associated_token_account 76 | ) 77 | token_balance_decimal = token_balance / 10**TOKEN_DECIMALS 78 | 79 | logger.info(f"Token balance: {token_balance_decimal}") 80 | 81 | if token_balance == 0: 82 | logger.info("No tokens to sell.") 83 | return TradeResult(success=False, error_message="No tokens to sell") 84 | 85 | # Fetch token price 86 | curve_state = await self.curve_manager.get_curve_state( 87 | token_info.bonding_curve 88 | ) 89 | token_price_sol = curve_state.calculate_price() 90 | 91 | logger.info(f"Price per Token: {token_price_sol:.8f} SOL") 92 | 93 | # Calculate minimum SOL output with slippage 94 | amount = token_balance 95 | expected_sol_output = float(token_balance_decimal) * float(token_price_sol) 96 | slippage_factor = 1 - self.slippage 97 | min_sol_output = int( 98 | (expected_sol_output * slippage_factor) * LAMPORTS_PER_SOL 99 | ) 100 | 101 | logger.info(f"Selling {token_balance_decimal} tokens") 102 | logger.info(f"Expected SOL output: {expected_sol_output:.8f} SOL") 103 | logger.info( 104 | f"Minimum SOL output (with {self.slippage * 100}% slippage): {min_sol_output / LAMPORTS_PER_SOL:.8f} SOL" 105 | ) 106 | 107 | tx_signature = await self._send_sell_transaction( 108 | token_info, 109 | associated_token_account, 110 | amount, 111 | min_sol_output, 112 | ) 113 | 114 | success = await self.client.confirm_transaction(tx_signature) 115 | 116 | if success: 117 | logger.info(f"Sell transaction confirmed: {tx_signature}") 118 | return TradeResult( 119 | success=True, 120 | tx_signature=tx_signature, 121 | amount=token_balance_decimal, 122 | price=token_price_sol, 123 | ) 124 | else: 125 | return TradeResult( 126 | success=False, 127 | error_message=f"Transaction failed to confirm: {tx_signature}", 128 | ) 129 | 130 | except Exception as e: 131 | logger.error(f"Sell operation failed: {str(e)}") 132 | return TradeResult(success=False, error_message=str(e)) 133 | 134 | async def _send_sell_transaction( 135 | self, 136 | token_info: TokenInfo, 137 | associated_token_account: Pubkey, 138 | token_amount: int, 139 | min_sol_output: int, 140 | ) -> str: 141 | """Send sell transaction. 142 | 143 | Args: 144 | mint: Token information 145 | associated_token_account: User's token account 146 | token_amount: Amount of tokens to sell in raw units 147 | min_sol_output: Minimum SOL to receive in lamports 148 | 149 | Returns: 150 | Transaction signature 151 | 152 | Raises: 153 | Exception: If transaction fails after all retries 154 | """ 155 | # Prepare sell instruction accounts 156 | accounts = [ 157 | AccountMeta( 158 | pubkey=PumpAddresses.GLOBAL, is_signer=False, is_writable=False 159 | ), 160 | AccountMeta(pubkey=PumpAddresses.FEE, is_signer=False, is_writable=True), 161 | AccountMeta(pubkey=token_info.mint, is_signer=False, is_writable=False), 162 | AccountMeta( 163 | pubkey=token_info.bonding_curve, is_signer=False, is_writable=True 164 | ), 165 | AccountMeta( 166 | pubkey=token_info.associated_bonding_curve, 167 | is_signer=False, 168 | is_writable=True, 169 | ), 170 | AccountMeta( 171 | pubkey=associated_token_account, is_signer=False, is_writable=True 172 | ), 173 | AccountMeta(pubkey=self.wallet.pubkey, is_signer=True, is_writable=True), 174 | AccountMeta( 175 | pubkey=SystemAddresses.PROGRAM, is_signer=False, is_writable=False 176 | ), 177 | AccountMeta( 178 | pubkey=SystemAddresses.ASSOCIATED_TOKEN_PROGRAM, 179 | is_signer=False, 180 | is_writable=False, 181 | ), 182 | AccountMeta( 183 | pubkey=SystemAddresses.TOKEN_PROGRAM, is_signer=False, is_writable=False 184 | ), 185 | AccountMeta( 186 | pubkey=PumpAddresses.EVENT_AUTHORITY, is_signer=False, is_writable=False 187 | ), 188 | AccountMeta( 189 | pubkey=PumpAddresses.PROGRAM, is_signer=False, is_writable=False 190 | ), 191 | ] 192 | 193 | # Prepare sell instruction data 194 | data = ( 195 | EXPECTED_DISCRIMINATOR 196 | + struct.pack(" TradeResult: 69 | """Execute buy operation. 70 | 71 | Args: 72 | token_info: Token information 73 | 74 | Returns: 75 | TradeResult with buy outcome 76 | """ 77 | try: 78 | # Convert amount to lamports 79 | amount_lamports = int(self.amount * LAMPORTS_PER_SOL) 80 | 81 | if self.extreme_fast_mode: 82 | # Skip the wait and directly calculate the amount 83 | token_amount = self.extreme_fast_token_amount 84 | token_price_sol = self.amount / token_amount 85 | logger.info(f"EXTREME FAST Mode: Buying {token_amount} tokens.") 86 | else: 87 | # Regular behavior with RPC call 88 | curve_state = await self.curve_manager.get_curve_state(token_info.bonding_curve) 89 | token_price_sol = curve_state.calculate_price() 90 | token_amount = self.amount / token_price_sol 91 | 92 | # Calculate maximum SOL to spend with slippage 93 | max_amount_lamports = int(amount_lamports * (1 + self.slippage)) 94 | 95 | logger.info( 96 | f"Buying {token_amount:.6f} tokens at {token_price_sol:.8f} SOL per token" 97 | ) 98 | logger.info( 99 | f"Total cost: {self.amount:.6f} SOL (max: {max_amount_lamports / LAMPORTS_PER_SOL:.6f} SOL)" 100 | ) 101 | 102 | associated_token_account = self.wallet.get_associated_token_address( 103 | token_info.mint 104 | ) 105 | 106 | tx_signature = await self._send_buy_transaction( 107 | token_info, 108 | associated_token_account, 109 | token_amount, 110 | max_amount_lamports, 111 | ) 112 | 113 | success = await self.client.confirm_transaction(tx_signature) 114 | 115 | if success: 116 | logger.info(f"Buy transaction confirmed: {tx_signature}") 117 | return TradeResult( 118 | success=True, 119 | tx_signature=tx_signature, 120 | amount=token_amount, 121 | price=token_price_sol, 122 | ) 123 | else: 124 | return TradeResult( 125 | success=False, 126 | error_message=f"Transaction failed to confirm: {tx_signature}", 127 | ) 128 | 129 | except Exception as e: 130 | logger.error(f"Buy operation failed: {e!s}") 131 | return TradeResult(success=False, error_message=str(e)) 132 | 133 | async def _send_buy_transaction( 134 | self, 135 | token_info: TokenInfo, 136 | associated_token_account: Pubkey, 137 | token_amount: float, 138 | max_amount_lamports: int, 139 | ) -> str: 140 | """Send buy transaction. 141 | 142 | Args: 143 | token_info: Token information 144 | associated_token_account: User's token account 145 | token_amount: Amount of tokens to buy 146 | max_amount_lamports: Maximum SOL to spend in lamports 147 | 148 | Returns: 149 | Transaction signature 150 | 151 | Raises: 152 | Exception: If transaction fails after all retries 153 | """ 154 | accounts = [ 155 | AccountMeta( 156 | pubkey=PumpAddresses.GLOBAL, is_signer=False, is_writable=False 157 | ), 158 | AccountMeta(pubkey=PumpAddresses.FEE, is_signer=False, is_writable=True), 159 | AccountMeta(pubkey=token_info.mint, is_signer=False, is_writable=False), 160 | AccountMeta( 161 | pubkey=token_info.bonding_curve, is_signer=False, is_writable=True 162 | ), 163 | AccountMeta( 164 | pubkey=token_info.associated_bonding_curve, 165 | is_signer=False, 166 | is_writable=True, 167 | ), 168 | AccountMeta( 169 | pubkey=associated_token_account, is_signer=False, is_writable=True 170 | ), 171 | AccountMeta(pubkey=self.wallet.pubkey, is_signer=True, is_writable=True), 172 | AccountMeta( 173 | pubkey=SystemAddresses.PROGRAM, is_signer=False, is_writable=False 174 | ), 175 | AccountMeta( 176 | pubkey=SystemAddresses.TOKEN_PROGRAM, is_signer=False, is_writable=False 177 | ), 178 | AccountMeta( 179 | pubkey=SystemAddresses.RENT, is_signer=False, is_writable=False 180 | ), 181 | AccountMeta( 182 | pubkey=PumpAddresses.EVENT_AUTHORITY, is_signer=False, is_writable=False 183 | ), 184 | AccountMeta( 185 | pubkey=PumpAddresses.PROGRAM, is_signer=False, is_writable=False 186 | ), 187 | ] 188 | 189 | # Prepare idempotent create ATA instruction: it will not fail if ATA already exists 190 | idempotent_ata_ix = create_idempotent_associated_token_account( 191 | self.wallet.pubkey, 192 | self.wallet.pubkey, 193 | token_info.mint, 194 | SystemAddresses.TOKEN_PROGRAM 195 | ) 196 | 197 | # Prepare buy instruction data 198 | token_amount_raw = int(token_amount * 10**TOKEN_DECIMALS) 199 | data = ( 200 | EXPECTED_DISCRIMINATOR 201 | + struct.pack(" Hash: 53 | """Return the most recently cached blockhash.""" 54 | async with self._blockhash_lock: 55 | if self._cached_blockhash is None: 56 | raise RuntimeError("No cached blockhash available yet") 57 | return self._cached_blockhash 58 | 59 | async def get_client(self) -> AsyncClient: 60 | """Get or create the AsyncClient instance. 61 | 62 | Returns: 63 | AsyncClient instance 64 | """ 65 | if self._client is None: 66 | self._client = AsyncClient(self.rpc_endpoint) 67 | return self._client 68 | 69 | async def close(self): 70 | """Close the client connection and stop the blockhash updater.""" 71 | if self._blockhash_updater_task: 72 | self._blockhash_updater_task.cancel() 73 | try: 74 | await self._blockhash_updater_task 75 | except asyncio.CancelledError: 76 | pass 77 | 78 | if self._client: 79 | await self._client.close() 80 | self._client = None 81 | 82 | async def get_account_info(self, pubkey: Pubkey) -> dict[str, Any]: 83 | """Get account info from the blockchain. 84 | 85 | Args: 86 | pubkey: Public key of the account 87 | 88 | Returns: 89 | Account info response 90 | 91 | Raises: 92 | ValueError: If account doesn't exist or has no data 93 | """ 94 | client = await self.get_client() 95 | response = await client.get_account_info(pubkey, encoding="base64") # base64 encoding for account data by default 96 | if not response.value: 97 | raise ValueError(f"Account {pubkey} not found") 98 | return response.value 99 | 100 | async def get_token_account_balance(self, token_account: Pubkey) -> int: 101 | """Get token balance for an account. 102 | 103 | Args: 104 | token_account: Token account address 105 | 106 | Returns: 107 | Token balance as integer 108 | """ 109 | client = await self.get_client() 110 | response = await client.get_token_account_balance(token_account) 111 | if response.value: 112 | return int(response.value.amount) 113 | return 0 114 | 115 | async def get_latest_blockhash(self) -> Hash: 116 | """Get the latest blockhash. 117 | 118 | Returns: 119 | Recent blockhash as string 120 | """ 121 | client = await self.get_client() 122 | response = await client.get_latest_blockhash() 123 | return response.value.blockhash 124 | 125 | async def build_and_send_transaction( 126 | self, 127 | instructions: list[Instruction], 128 | signer_keypair: Keypair, 129 | skip_preflight: bool = True, 130 | max_retries: int = 3, 131 | priority_fee: int | None = None, 132 | ) -> str: 133 | """ 134 | Send a transaction with optional priority fee. 135 | 136 | Args: 137 | instructions: List of instructions to include in the transaction. 138 | skip_preflight: Whether to skip preflight checks. 139 | max_retries: Maximum number of retry attempts. 140 | priority_fee: Optional priority fee in microlamports. 141 | 142 | Returns: 143 | Transaction signature. 144 | """ 145 | client = await self.get_client() 146 | 147 | logger.info( 148 | f"Priority fee in microlamports: {priority_fee if priority_fee else 0}" 149 | ) 150 | 151 | # Add priority fee instructions if applicable 152 | if priority_fee is not None: 153 | fee_instructions = [ 154 | set_compute_unit_limit(100_000), # Default compute unit limit 155 | set_compute_unit_price(priority_fee), 156 | ] 157 | instructions = fee_instructions + instructions 158 | 159 | recent_blockhash = await self.get_cached_blockhash() 160 | message = Message(instructions, signer_keypair.pubkey()) 161 | transaction = Transaction([signer_keypair], message, recent_blockhash) 162 | 163 | for attempt in range(max_retries): 164 | try: 165 | tx_opts = TxOpts( 166 | skip_preflight=skip_preflight, preflight_commitment=Confirmed 167 | ) 168 | response = await client.send_transaction(transaction, tx_opts) 169 | return response.value 170 | 171 | except Exception as e: 172 | if attempt == max_retries - 1: 173 | logger.error( 174 | f"Failed to send transaction after {max_retries} attempts" 175 | ) 176 | raise 177 | 178 | wait_time = 2**attempt 179 | logger.warning( 180 | f"Transaction attempt {attempt + 1} failed: {e!s}, retrying in {wait_time}s" 181 | ) 182 | await asyncio.sleep(wait_time) 183 | 184 | async def confirm_transaction( 185 | self, signature: str, commitment: str = "confirmed" 186 | ) -> bool: 187 | """Wait for transaction confirmation. 188 | 189 | Args: 190 | signature: Transaction signature 191 | commitment: Confirmation commitment level 192 | 193 | Returns: 194 | Whether transaction was confirmed 195 | """ 196 | client = await self.get_client() 197 | try: 198 | await client.confirm_transaction(signature, commitment=commitment) 199 | return True 200 | except Exception as e: 201 | logger.error(f"Failed to confirm transaction {signature}: {e!s}") 202 | return False 203 | 204 | async def post_rpc(self, body: dict[str, Any]) -> dict[str, Any] | None: 205 | """ 206 | Send a raw RPC request to the Solana node. 207 | 208 | Args: 209 | body: JSON-RPC request body. 210 | 211 | Returns: 212 | Optional[Dict[str, Any]]: Parsed JSON response, or None if the request fails. 213 | """ 214 | try: 215 | async with aiohttp.ClientSession() as session: 216 | async with session.post( 217 | self.rpc_endpoint, 218 | json=body, 219 | timeout=aiohttp.ClientTimeout(10), # 10-second timeout 220 | ) as response: 221 | response.raise_for_status() 222 | return await response.json() 223 | except aiohttp.ClientError as e: 224 | logger.error(f"RPC request failed: {e!s}", exc_info=True) 225 | return None 226 | except json.JSONDecodeError as e: 227 | logger.error(f"Failed to decode RPC response: {e!s}", exc_info=True) 228 | return None 229 | -------------------------------------------------------------------------------- /learning-examples/manual_sell.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import struct 4 | 5 | import base58 6 | from construct import Flag, Int64ul, Struct 7 | from solana.rpc.async_api import AsyncClient 8 | from solana.rpc.commitment import Confirmed 9 | from solana.rpc.types import TxOpts 10 | from solders.compute_budget import set_compute_unit_price 11 | from solders.instruction import AccountMeta, Instruction 12 | from solders.keypair import Keypair 13 | from solders.message import Message 14 | from solders.pubkey import Pubkey 15 | from solders.transaction import Transaction 16 | from spl.token.instructions import get_associated_token_address 17 | 18 | # Here and later all the discriminators are precalculated. See learning-examples/discriminator.py 19 | EXPECTED_DISCRIMINATOR = struct.pack(" None: 55 | parsed = self._STRUCT.parse(data[8:]) 56 | self.__dict__.update(parsed) 57 | 58 | 59 | async def get_pump_curve_state( 60 | conn: AsyncClient, curve_address: Pubkey 61 | ) -> BondingCurveState: 62 | response = await conn.get_account_info(curve_address, encoding="base64") 63 | if not response.value or not response.value.data: 64 | raise ValueError("Invalid curve state: No data") 65 | 66 | data = response.value.data 67 | if data[:8] != EXPECTED_DISCRIMINATOR: 68 | raise ValueError("Invalid curve state discriminator") 69 | 70 | return BondingCurveState(data) 71 | 72 | 73 | def calculate_pump_curve_price(curve_state: BondingCurveState) -> float: 74 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 75 | raise ValueError("Invalid reserve state") 76 | 77 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 78 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 79 | ) 80 | 81 | 82 | async def get_token_balance(conn: AsyncClient, associated_token_account: Pubkey): 83 | response = await conn.get_token_account_balance(associated_token_account) 84 | if response.value: 85 | return int(response.value.amount) 86 | return 0 87 | 88 | 89 | async def sell_token( 90 | mint: Pubkey, 91 | bonding_curve: Pubkey, 92 | associated_bonding_curve: Pubkey, 93 | slippage: float = 0.25, 94 | max_retries=5, 95 | ): 96 | private_key = base58.b58decode(os.environ.get("SOLANA_PRIVATE_KEY")) 97 | payer = Keypair.from_bytes(private_key) 98 | 99 | async with AsyncClient(RPC_ENDPOINT) as client: 100 | associated_token_account = get_associated_token_address(payer.pubkey(), mint) 101 | 102 | # Get token balance 103 | token_balance = await get_token_balance(client, associated_token_account) 104 | token_balance_decimal = token_balance / 10**TOKEN_DECIMALS 105 | print(f"Token balance: {token_balance_decimal}") 106 | if token_balance == 0: 107 | print("No tokens to sell.") 108 | return 109 | 110 | # Fetch the token price 111 | curve_state = await get_pump_curve_state(client, bonding_curve) 112 | token_price_sol = calculate_pump_curve_price(curve_state) 113 | print(f"Price per Token: {token_price_sol:.20f} SOL") 114 | 115 | # Calculate minimum SOL output 116 | amount = token_balance 117 | min_sol_output = float(token_balance_decimal) * float(token_price_sol) 118 | slippage_factor = 1 - slippage 119 | min_sol_output = int((min_sol_output * slippage_factor) * LAMPORTS_PER_SOL) 120 | 121 | print(f"Selling {token_balance_decimal} tokens") 122 | print(f"Minimum SOL output: {min_sol_output / LAMPORTS_PER_SOL:.10f} SOL") 123 | 124 | # Continue with the sell transaction 125 | for attempt in range(max_retries): 126 | try: 127 | accounts = [ 128 | AccountMeta(pubkey=PUMP_GLOBAL, is_signer=False, is_writable=False), 129 | AccountMeta(pubkey=PUMP_FEE, is_signer=False, is_writable=True), 130 | AccountMeta(pubkey=mint, is_signer=False, is_writable=False), 131 | AccountMeta( 132 | pubkey=bonding_curve, is_signer=False, is_writable=True 133 | ), 134 | AccountMeta( 135 | pubkey=associated_bonding_curve, 136 | is_signer=False, 137 | is_writable=True, 138 | ), 139 | AccountMeta( 140 | pubkey=associated_token_account, 141 | is_signer=False, 142 | is_writable=True, 143 | ), 144 | AccountMeta( 145 | pubkey=payer.pubkey(), is_signer=True, is_writable=True 146 | ), 147 | AccountMeta( 148 | pubkey=SYSTEM_PROGRAM, is_signer=False, is_writable=False 149 | ), 150 | AccountMeta( 151 | pubkey=SYSTEM_ASSOCIATED_TOKEN_ACCOUNT_PROGRAM, 152 | is_signer=False, 153 | is_writable=False, 154 | ), 155 | AccountMeta( 156 | pubkey=SYSTEM_TOKEN_PROGRAM, is_signer=False, is_writable=False 157 | ), 158 | AccountMeta( 159 | pubkey=PUMP_EVENT_AUTHORITY, is_signer=False, is_writable=False 160 | ), 161 | AccountMeta( 162 | pubkey=PUMP_PROGRAM, is_signer=False, is_writable=False 163 | ), 164 | ] 165 | 166 | discriminator = struct.pack(" None: 123 | """Start the trading bot. 124 | 125 | Args: 126 | match_string: Optional string to match in token name/symbol 127 | bro_address: Optional creator address to filter by 128 | marry_mode: If True, only buy tokens and skip selling 129 | yolo_mode: If True, trade continuously 130 | """ 131 | logger.info("Starting pump.fun trader") 132 | logger.info(f"Match filter: {match_string if match_string else 'None'}") 133 | logger.info(f"Creator filter: {bro_address if bro_address else 'None'}") 134 | logger.info(f"Marry mode: {marry_mode}") 135 | logger.info(f"YOLO mode: {yolo_mode}") 136 | logger.info(f"Max token age: {self.max_token_age} seconds") 137 | 138 | # Start processor task 139 | processor_task = asyncio.create_task( 140 | self._process_token_queue(marry_mode, yolo_mode) 141 | ) 142 | 143 | try: 144 | await self.token_listener.listen_for_tokens( 145 | lambda token: self._queue_token(token), 146 | match_string, 147 | bro_address, 148 | ) 149 | 150 | except Exception as e: 151 | logger.error(f"Trading stopped due to error: {e!s}") 152 | processor_task.cancel() 153 | await self.solana_client.close() 154 | 155 | finally: 156 | processor_task.cancel() 157 | if self.traded_mints: 158 | # Close ATA if enabled 159 | await handle_cleanup_post_session(self.solana_client, self.wallet, list(self.traded_mints), self.priority_fee_manager) 160 | await self.solana_client.close() 161 | 162 | async def _queue_token(self, token_info: TokenInfo) -> None: 163 | """Queue a token for processing if not already processed.""" 164 | token_key = str(token_info.mint) 165 | 166 | if token_key in self.processed_tokens: 167 | logger.debug(f"Token {token_info.symbol} already processed. Skipping...") 168 | return 169 | 170 | # Record timestamp when token was discovered 171 | self.token_timestamps[token_key] = asyncio.get_event_loop().time() 172 | 173 | await self.token_queue.put(token_info) 174 | logger.info(f"Queued new token: {token_info.symbol} ({token_info.mint})") 175 | 176 | async def _process_token_queue(self, marry_mode: bool, yolo_mode: bool) -> None: 177 | """Continuously process tokens from the queue, only if they're fresh.""" 178 | while True: 179 | token_info = await self.token_queue.get() 180 | token_key = str(token_info.mint) 181 | 182 | # Check if token is still "fresh" 183 | current_time = asyncio.get_event_loop().time() 184 | token_age = current_time - self.token_timestamps.get( 185 | token_key, current_time 186 | ) 187 | 188 | if token_age > self.max_token_age: 189 | logger.info( 190 | f"Skipping token {token_info.symbol} - too old ({token_age:.1f}s > {self.max_token_age}s)" 191 | ) 192 | self.token_queue.task_done() 193 | continue 194 | 195 | self.processed_tokens.add(token_key) 196 | 197 | logger.info( 198 | f"Processing fresh token: {token_info.symbol} (age: {token_age:.1f}s)" 199 | ) 200 | await self._handle_token(token_info, marry_mode, yolo_mode) 201 | 202 | self.token_queue.task_done() 203 | 204 | async def _handle_token( 205 | self, token_info: TokenInfo, marry_mode: bool, yolo_mode: bool 206 | ) -> None: 207 | """Handle a new token creation event. 208 | 209 | Args: 210 | token_info: Token information 211 | marry_mode: If True, only buy tokens and skip selling 212 | yolo_mode: If True, continue trading after this token 213 | """ 214 | try: 215 | await self._save_token_info(token_info) 216 | 217 | if not config.EXTREME_FAST_MODE: 218 | logger.info( 219 | f"Waiting for {config.WAIT_TIME_AFTER_CREATION} seconds for the bonding curve to stabilize..." 220 | ) 221 | await asyncio.sleep(config.WAIT_TIME_AFTER_CREATION) 222 | 223 | logger.info( 224 | f"Buying {self.buy_amount:.6f} SOL worth of {token_info.symbol}..." 225 | ) 226 | buy_result: TradeResult = await self.buyer.execute(token_info) 227 | 228 | if buy_result.success: 229 | logger.info(f"Successfully bought {token_info.symbol}") 230 | self._log_trade( 231 | "buy", 232 | token_info, 233 | buy_result.price, # type: ignore 234 | buy_result.amount, # type: ignore 235 | buy_result.tx_signature, 236 | ) 237 | self.traded_mints.add(token_info.mint) 238 | else: 239 | logger.error( 240 | f"Failed to buy {token_info.symbol}: {buy_result.error_message}" 241 | ) 242 | # Close ATA if enabled 243 | await handle_cleanup_after_failure(self.solana_client, self.wallet, token_info.mint, self.priority_fee_manager) 244 | 245 | # Sell token if not in marry mode 246 | if not marry_mode and buy_result.success: 247 | logger.info( 248 | f"Waiting for {config.WAIT_TIME_AFTER_BUY} seconds before selling..." 249 | ) 250 | await asyncio.sleep(config.WAIT_TIME_AFTER_BUY) 251 | 252 | logger.info(f"Selling {token_info.symbol}...") 253 | sell_result: TradeResult = await self.seller.execute(token_info) 254 | 255 | if sell_result.success: 256 | logger.info(f"Successfully sold {token_info.symbol}") 257 | self._log_trade( 258 | "sell", 259 | token_info, 260 | sell_result.price, # type: ignore 261 | sell_result.amount, # type: ignore 262 | sell_result.tx_signature, 263 | ) 264 | # Close ATA if enabled 265 | await handle_cleanup_after_sell(self.solana_client, self.wallet, token_info.mint, self.priority_fee_manager) 266 | else: 267 | logger.error( 268 | f"Failed to sell {token_info.symbol}: {sell_result.error_message}" 269 | ) 270 | elif marry_mode: 271 | logger.info("Marry mode enabled. Skipping sell operation.") 272 | 273 | # Wait before looking for the next token 274 | if yolo_mode: 275 | logger.info( 276 | f"YOLO mode enabled. Waiting {config.WAIT_TIME_BEFORE_NEW_TOKEN} seconds before looking for next token..." 277 | ) 278 | await asyncio.sleep(config.WAIT_TIME_BEFORE_NEW_TOKEN) 279 | 280 | except Exception as e: 281 | logger.error(f"Error handling token {token_info.symbol}: {e!s}") 282 | 283 | async def _save_token_info(self, token_info: TokenInfo) -> None: 284 | """Save token information to a file. 285 | 286 | Args: 287 | token_info: Token information 288 | """ 289 | os.makedirs("trades", exist_ok=True) 290 | file_name = os.path.join("trades", f"{token_info.mint}.txt") 291 | 292 | with open(file_name, "w") as file: 293 | file.write(json.dumps(token_info.to_dict(), indent=2)) 294 | 295 | logger.info(f"Token information saved to {file_name}") 296 | 297 | def _log_trade( 298 | self, 299 | action: str, 300 | token_info: TokenInfo, 301 | price: float, 302 | amount: float, 303 | tx_hash: str | None, 304 | ) -> None: 305 | """Log trade information. 306 | 307 | Args: 308 | action: Trade action (buy/sell) 309 | token_info: Token information 310 | price: Token price in SOL 311 | amount: Trade amount in SOL 312 | tx_hash: Transaction hash 313 | """ 314 | os.makedirs("trades", exist_ok=True) 315 | 316 | log_entry = { 317 | "timestamp": datetime.utcnow().isoformat(), 318 | "action": action, 319 | "token_address": str(token_info.mint), 320 | "symbol": token_info.symbol, 321 | "price": price, 322 | "amount": amount, 323 | "tx_hash": str(tx_hash), 324 | } 325 | 326 | with open("trades/trades.log", "a") as log_file: 327 | log_file.write(json.dumps(log_entry) + "\n") 328 | --------------------------------------------------------------------------------