├── src ├── __init__.py ├── cleanup │ ├── __init__.py │ ├── modes.py │ └── manager.py ├── core │ ├── __init__.py │ ├── priority_fee │ │ ├── __init__.py │ │ ├── fixed_fee.py │ │ ├── dynamic_fee.py │ │ └── manager.py │ ├── wallet.py │ └── pubkeys.py ├── trading │ ├── __init__.py │ └── position.py ├── utils │ ├── __init__.py │ └── logger.py ├── monitoring │ ├── __init__.py │ ├── base_listener.py │ └── listener_factory.py ├── platforms │ ├── pumpfun │ │ ├── __init__.py │ │ └── pumpportal_processor.py │ └── letsbonk │ │ ├── __init__.py │ │ └── pumpportal_processor.py └── geyser │ ├── generated │ └── solana_storage_pb2_grpc.py │ └── proto │ ├── solana-storage.proto │ └── geyser.proto ├── learning-examples ├── listen-new-tokens │ ├── generated │ │ ├── __init__.py │ │ └── solana_storage_pb2_grpc.py │ ├── proto │ │ ├── solana-storage.proto │ │ └── geyser.proto │ └── listen_pumpportal.py ├── raw_bondingCurve_from_getAccountInfo.json ├── calculate_discriminator.py ├── compute_associated_bonding_curve.py ├── cleanup_accounts.py ├── blockSubscribe_extract_transactions.py ├── pumpswap │ └── get_pumpswap_pools.py ├── decode_from_getAccountInfo.py ├── fetch_price.py ├── listen-migrations │ └── listen_blocksubscribe_old_raydium.py ├── decode_from_getTransaction.py ├── decode_from_blockSubscribe.py └── bonding-curve-progress │ ├── poll_bonding_curve_progress.py │ ├── get_graduating_tokens.py │ └── get_bonding_curve_status.py ├── MAINTAINERS.md ├── .env.example ├── .github └── workflows │ ├── auto-assign.yml │ └── spam-detection.yml ├── trades └── trades.log ├── pyproject.toml ├── .gitignore ├── .cursor └── rules │ ├── python-style.mdc │ ├── architecture.mdc │ └── trading-bot.mdc ├── .kiro └── steering │ ├── python-style.mdc │ ├── architecture.mdc │ └── trading-bot.mdc ├── .windsurf └── rules │ ├── python-style.mdc │ └── architecture.mdc ├── bots ├── bot-sniper-2-logs.yaml ├── bot-sniper-1-geyser.yaml ├── bot-sniper-3-blocks.yaml └── bot-sniper-4-pp.yaml ├── AGENTS.md └── CLAUDE.md /src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/cleanup/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/trading/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/monitoring/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /learning-examples/listen-new-tokens/generated/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MAINTAINERS.md: -------------------------------------------------------------------------------- 1 | The maintainers are: 2 | * [@akegaviar](https://github.com/akegaviar) (primary contact, issue manager) 3 | * [@smypmsa](https://github.com/smypmsa) 4 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | SOLANA_NODE_RPC_ENDPOINT=... 2 | SOLANA_NODE_WSS_ENDPOINT=... 3 | 4 | GEYSER_ENDPOINT= 5 | GEYSER_API_TOKEN= 6 | GEYSER_AUTH_TYPE=x-token or basic 7 | 8 | SOLANA_PRIVATE_KEY= -------------------------------------------------------------------------------- /learning-examples/raw_bondingCurve_from_getAccountInfo.json: -------------------------------------------------------------------------------- 1 | {"jsonrpc":"2.0","result":{"context":{"apiVersion":"1.18.22","slot":285247740},"value":{"data":["F7f4N2DYrGD99rN6is0DALwvcwAHAAAA/V6hLvnOAgC8g08EAAAAAACAxqR+jQMAAA==","base64"],"executable":false,"lamports":73551852,"owner":"6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P","rentEpoch":18446744073709551615,"space":49}},"id":1} 2 | -------------------------------------------------------------------------------- /src/core/priority_fee/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class PriorityFeePlugin(ABC): 5 | """Base class for priority fee calculation plugins.""" 6 | 7 | @abstractmethod 8 | async def get_priority_fee(self) -> int | None: 9 | """ 10 | Calculate the priority fee. 11 | 12 | Returns: 13 | Optional[int]: Priority fee in lamports, or None if no fee should be applied. 14 | """ 15 | pass 16 | -------------------------------------------------------------------------------- /.github/workflows/auto-assign.yml: -------------------------------------------------------------------------------- 1 | name: Auto Assign Issues 2 | 3 | permissions: 4 | issues: write 5 | 6 | on: 7 | issues: 8 | types: [opened] 9 | 10 | jobs: 11 | assign_issue: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Assign issue to maintainer 15 | uses: actions/github-script@v6 16 | with: 17 | github-token: ${{ secrets.GITHUB_TOKEN }} 18 | script: | 19 | await github.rest.issues.addAssignees({ 20 | owner: context.repo.owner, 21 | repo: context.repo.repo, 22 | issue_number: context.issue.number, 23 | assignees: ['akegaviar'] 24 | }); -------------------------------------------------------------------------------- /src/core/priority_fee/fixed_fee.py: -------------------------------------------------------------------------------- 1 | from . import PriorityFeePlugin 2 | 3 | 4 | class FixedPriorityFee(PriorityFeePlugin): 5 | """Fixed priority fee plugin.""" 6 | 7 | def __init__(self, fixed_fee: int): 8 | """ 9 | Initialize the fixed fee plugin. 10 | 11 | Args: 12 | fixed_fee: Fixed priority fee in microlamports. 13 | """ 14 | self.fixed_fee = fixed_fee 15 | 16 | async def get_priority_fee(self) -> int | None: 17 | """ 18 | Return the fixed priority fee. 19 | 20 | Returns: 21 | Optional[int]: Fixed priority fee in microlamports, or None if fixed_fee is 0. 22 | """ 23 | if self.fixed_fee == 0: 24 | return None 25 | return self.fixed_fee 26 | -------------------------------------------------------------------------------- /src/platforms/pumpfun/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pump.Fun platform exports. 3 | 4 | This module provides convenient imports for the pump.fun platform implementations. 5 | Platform registration is now handled by the main platform factory. 6 | """ 7 | 8 | from .address_provider import PumpFunAddressProvider 9 | from .curve_manager import PumpFunCurveManager 10 | from .event_parser import PumpFunEventParser 11 | from .instruction_builder import PumpFunInstructionBuilder 12 | from .pumpportal_processor import PumpFunPumpPortalProcessor 13 | 14 | # Export implementations for direct use if needed 15 | __all__ = [ 16 | "PumpFunAddressProvider", 17 | "PumpFunCurveManager", 18 | "PumpFunEventParser", 19 | "PumpFunInstructionBuilder", 20 | "PumpFunPumpPortalProcessor", 21 | ] 22 | -------------------------------------------------------------------------------- /src/platforms/letsbonk/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | LetsBonk platform exports. 3 | 4 | This module provides convenient imports for the LetsBonk platform implementations. 5 | Platform registration is now handled by the main platform factory. 6 | """ 7 | 8 | from .address_provider import LetsBonkAddressProvider 9 | from .curve_manager import LetsBonkCurveManager 10 | from .event_parser import LetsBonkEventParser 11 | from .instruction_builder import LetsBonkInstructionBuilder 12 | from .pumpportal_processor import LetsBonkPumpPortalProcessor 13 | 14 | # Export implementations for direct use if needed 15 | __all__ = [ 16 | "LetsBonkAddressProvider", 17 | "LetsBonkCurveManager", 18 | "LetsBonkEventParser", 19 | "LetsBonkInstructionBuilder", 20 | "LetsBonkPumpPortalProcessor", 21 | ] 22 | -------------------------------------------------------------------------------- /src/geyser/generated/solana_storage_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | 4 | import grpc 5 | import warnings 6 | 7 | 8 | GRPC_GENERATED_VERSION = "1.71.0" 9 | GRPC_VERSION = grpc.__version__ 10 | _version_not_supported = False 11 | 12 | try: 13 | from grpc._utilities import first_version_is_lower 14 | 15 | _version_not_supported = first_version_is_lower( 16 | GRPC_VERSION, GRPC_GENERATED_VERSION 17 | ) 18 | except ImportError: 19 | _version_not_supported = True 20 | 21 | if _version_not_supported: 22 | raise RuntimeError( 23 | f"The grpc package installed is at version {GRPC_VERSION}," 24 | + f" but the generated code in solana_storage_pb2_grpc.py depends on" 25 | + f" grpcio>={GRPC_GENERATED_VERSION}." 26 | + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" 27 | + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." 28 | ) 29 | -------------------------------------------------------------------------------- /learning-examples/listen-new-tokens/generated/solana_storage_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | 4 | import grpc 5 | import warnings 6 | 7 | 8 | GRPC_GENERATED_VERSION = "1.71.0" 9 | GRPC_VERSION = grpc.__version__ 10 | _version_not_supported = False 11 | 12 | try: 13 | from grpc._utilities import first_version_is_lower 14 | 15 | _version_not_supported = first_version_is_lower( 16 | GRPC_VERSION, GRPC_GENERATED_VERSION 17 | ) 18 | except ImportError: 19 | _version_not_supported = True 20 | 21 | if _version_not_supported: 22 | raise RuntimeError( 23 | f"The grpc package installed is at version {GRPC_VERSION}," 24 | + f" but the generated code in solana_storage_pb2_grpc.py depends on" 25 | + f" grpcio>={GRPC_GENERATED_VERSION}." 26 | + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" 27 | + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." 28 | ) 29 | -------------------------------------------------------------------------------- /trades/trades.log: -------------------------------------------------------------------------------- 1 | {"timestamp": "2025-04-24T20:30:13.087092", "action": "buy", "token_address": "DWMUmRQUZPCBA1gwdDxTJuz6JHnQkREiWMyQpsKWGp9v", "symbol": "U8", "price": 5e-06, "amount": 20, "tx_hash": "3JvdfCep45PUB6rCcH4dB2NuwvFP8n67SCUxqJMt4MuN5ekHYc6J27aCUfwNUK3hh5rSyKNYAWXya5vQAT2qQivB"} 2 | {"timestamp": "2025-04-24T20:30:32.759177", "action": "sell", "token_address": "DWMUmRQUZPCBA1gwdDxTJuz6JHnQkREiWMyQpsKWGp9v", "symbol": "U8", "price": 3.805530050663904e-08, "amount": 20.0, "tx_hash": "5cveLfU7XhPNCPMCZfTXyugJpmAQNmi7zr81PSqs8DsP1T2swYFjJwaB5hNSf3kFPfRzgzd7QZBVaZLd5MqsJevB"} 3 | {"timestamp": "2025-08-02T15:37:04.403139", "action": "buy", "platform": "lets_bonk", "token_address": "7o5FtYXxpX6sqtcJJ3ES4DiWt4C9HnHzKuHrZPpjbonk", "symbol": "pants", "price": 5e-06, "amount": 20, "tx_hash": "26Uu4rZ1PcnzioHwWAh3Reca4MMNZjgiQbmihjisiurpv4xcHFtvSptZoV3zUkS5bdqZ4zpLeGb46J4bWyczu6FY"} 4 | {"timestamp": "2025-08-02T15:37:21.984944", "action": "sell", "platform": "lets_bonk", "token_address": "7o5FtYXxpX6sqtcJJ3ES4DiWt4C9HnHzKuHrZPpjbonk", "symbol": "pants", "price": 2.7959121193874663e-08, "amount": 3712.914779, "tx_hash": "5zXxjxHZuWXxGih1Aca6HoFkybiyHT4jPghhXCg3NbkyD3nuzzaJtmjg4mnoEQDeTn64b1cUZMawhmYbeq3cwjfj"} 5 | -------------------------------------------------------------------------------- /learning-examples/calculate_discriminator.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import struct 3 | 4 | # https://book.anchor-lang.com/anchor_bts/discriminator.html 5 | # Set the instruction name here 6 | instruction_name = "account:BondingCurve" 7 | 8 | 9 | def calculate_discriminator(instruction_name): 10 | # Create a SHA256 hash object 11 | sha = hashlib.sha256() 12 | 13 | # Update the hash with the instruction name 14 | sha.update(instruction_name.encode("utf-8")) 15 | 16 | # Get the first 8 bytes of the hash 17 | discriminator_bytes = sha.digest()[:8] 18 | 19 | # Convert the bytes to a 64-bit unsigned integer (little-endian) 20 | discriminator = struct.unpack(" logging.Logger: 12 | """Get or create a logger with the given name. 13 | 14 | Args: 15 | name: Logger name, typically __name__ 16 | level: Logging level 17 | 18 | Returns: 19 | Configured logger 20 | """ 21 | global _loggers 22 | 23 | if name in _loggers: 24 | return _loggers[name] 25 | 26 | logger = logging.getLogger(name) 27 | logger.setLevel(level) 28 | 29 | _loggers[name] = logger 30 | return logger 31 | 32 | 33 | def setup_file_logging( 34 | filename: str = "pump_trading.log", level: int = logging.INFO 35 | ) -> None: 36 | """Set up file logging for all loggers. 37 | 38 | Args: 39 | filename: Log file path 40 | level: Logging level for file handler 41 | """ 42 | root_logger = logging.getLogger() 43 | 44 | # Check if file handler with same filename already exists 45 | for handler in root_logger.handlers: 46 | if ( 47 | isinstance(handler, logging.FileHandler) 48 | and handler.baseFilename == filename 49 | ): 50 | return # File handler already added 51 | 52 | formatter = logging.Formatter( 53 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s", 54 | datefmt="%Y-%m-%d %H:%M:%S", 55 | ) 56 | 57 | file_handler = logging.FileHandler(filename) 58 | file_handler.setLevel(level) 59 | file_handler.setFormatter(formatter) 60 | 61 | root_logger.addHandler(file_handler) 62 | -------------------------------------------------------------------------------- /src/monitoring/base_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base class for WebSocket token listeners - now platform-agnostic. 3 | """ 4 | 5 | from abc import ABC, abstractmethod 6 | from collections.abc import Awaitable, Callable 7 | 8 | from interfaces.core import Platform, TokenInfo 9 | 10 | 11 | class BaseTokenListener(ABC): 12 | """Base abstract class for token listeners - now platform-agnostic.""" 13 | 14 | def __init__(self, platform: Platform | None = None): 15 | """Initialize the listener with optional platform specification. 16 | 17 | Args: 18 | platform: Platform to monitor (if None, monitor all platforms) 19 | """ 20 | self.platform = platform 21 | 22 | @abstractmethod 23 | async def listen_for_tokens( 24 | self, 25 | token_callback: Callable[[TokenInfo], Awaitable[None]], 26 | match_string: str | None = None, 27 | creator_address: str | None = None, 28 | ) -> None: 29 | """ 30 | Listen for new token creations. 31 | 32 | Args: 33 | token_callback: Callback function for new tokens 34 | match_string: Optional string to match in token name/symbol 35 | creator_address: Optional creator address to filter by 36 | """ 37 | pass 38 | 39 | def should_process_token(self, token_info: TokenInfo) -> bool: 40 | """Check if a token should be processed based on platform filter. 41 | 42 | Args: 43 | token_info: Token information 44 | 45 | Returns: 46 | True if token should be processed 47 | """ 48 | if self.platform is None: 49 | return True # Process all platforms 50 | return token_info.platform == self.platform 51 | -------------------------------------------------------------------------------- /src/core/wallet.py: -------------------------------------------------------------------------------- 1 | """ 2 | Wallet management for Solana transactions. 3 | """ 4 | 5 | import base58 6 | from solders.keypair import Keypair 7 | from solders.pubkey import Pubkey 8 | from spl.token.instructions import get_associated_token_address 9 | 10 | from core.pubkeys import SystemAddresses 11 | 12 | 13 | class Wallet: 14 | """Manages a Solana wallet for trading operations.""" 15 | 16 | def __init__(self, private_key: str): 17 | """Initialize wallet from private key. 18 | 19 | Args: 20 | private_key: Base58 encoded private key 21 | """ 22 | self._private_key = private_key 23 | self._keypair = self._load_keypair(private_key) 24 | 25 | @property 26 | def pubkey(self) -> Pubkey: 27 | """Get the public key of the wallet.""" 28 | return self._keypair.pubkey() 29 | 30 | @property 31 | def keypair(self) -> Keypair: 32 | """Get the keypair for signing transactions.""" 33 | return self._keypair 34 | 35 | def get_associated_token_address( 36 | self, mint: Pubkey, token_program_id: Pubkey | None = None 37 | ) -> Pubkey: 38 | """Get the associated token account address for a mint. 39 | 40 | Args: 41 | mint: Token mint address 42 | token_program_id: Token program (TOKEN or TOKEN_2022). Defaults to TOKEN_2022_PROGRAM 43 | 44 | Returns: 45 | Associated token account address 46 | """ 47 | if token_program_id is None: 48 | token_program_id = SystemAddresses.TOKEN_2022_PROGRAM 49 | return get_associated_token_address(self.pubkey, mint, token_program_id) 50 | 51 | @staticmethod 52 | def _load_keypair(private_key: str) -> Keypair: 53 | """Load keypair from private key. 54 | 55 | Args: 56 | private_key: Base58 encoded private key 57 | 58 | Returns: 59 | Solana keypair 60 | """ 61 | private_key_bytes = base58.b58decode(private_key) 62 | return Keypair.from_bytes(private_key_bytes) 63 | -------------------------------------------------------------------------------- /learning-examples/compute_associated_bonding_curve.py: -------------------------------------------------------------------------------- 1 | from solders.pubkey import Pubkey 2 | 3 | # Global constants 4 | PUMP_PROGRAM = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 5 | SYSTEM_TOKEN_PROGRAM = Pubkey.from_string("TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA") 6 | SYSTEM_ASSOCIATED_TOKEN_ACCOUNT_PROGRAM = Pubkey.from_string( 7 | "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL" 8 | ) 9 | 10 | 11 | def get_bonding_curve_address(mint: Pubkey, program_id: Pubkey) -> tuple[Pubkey, int]: 12 | """ 13 | Derives the bonding curve address for a given mint 14 | """ 15 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id) 16 | 17 | 18 | def find_associated_bonding_curve(mint: Pubkey, bonding_curve: Pubkey) -> Pubkey: 19 | """ 20 | Find the associated bonding curve for a given mint and bonding curve. 21 | This uses the standard ATA derivation. 22 | """ 23 | 24 | derived_address, _ = Pubkey.find_program_address( 25 | [ 26 | bytes(bonding_curve), 27 | bytes(SYSTEM_TOKEN_PROGRAM), 28 | bytes(mint), 29 | ], 30 | SYSTEM_ASSOCIATED_TOKEN_ACCOUNT_PROGRAM, 31 | ) 32 | return derived_address 33 | 34 | 35 | def main(): 36 | mint_address = input("Enter the token mint address: ") 37 | 38 | try: 39 | mint = Pubkey.from_string(mint_address) 40 | 41 | bonding_curve_address, bump = get_bonding_curve_address(mint, PUMP_PROGRAM) 42 | 43 | # Calculate the associated bonding curve 44 | associated_bonding_curve = find_associated_bonding_curve( 45 | mint, bonding_curve_address 46 | ) 47 | 48 | print("\nResults:") 49 | print("-" * 50) 50 | print(f"Token Mint: {mint}") 51 | print(f"Bonding Curve: {bonding_curve_address}") 52 | print(f"Associated Bonding Curve: {associated_bonding_curve}") 53 | print(f"Bonding Curve Bump: {bump}") 54 | print("-" * 50) 55 | 56 | except ValueError as e: 57 | print(f"Error: Invalid address format - {e!s}") 58 | 59 | 60 | if __name__ == "__main__": 61 | main() 62 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pump_bot" 3 | version = "2.0" 4 | description = "Trade tokens on pump.fun" 5 | readme = "README.md" 6 | requires-python = ">=3.9" 7 | 8 | dependencies = [ 9 | "base58>=2.1.1", 10 | "borsh-construct>=0.1.0", 11 | "construct>=2.10.67", 12 | "construct-typing>=0.5.2", 13 | "solana==0.36.6", 14 | "solders>=0.26.0", 15 | "websockets>=15.0", 16 | "python-dotenv>=1.0.1", 17 | "aiohttp>=3.11.13", 18 | "grpcio>=1.71.0", 19 | "grpcio-tools>=1.71.0", 20 | "protobuf>=5.29.4", 21 | "pyyaml>=6.0.2", 22 | "uvloop>=0.21.0", 23 | ] 24 | 25 | [project.optional-dependencies] 26 | dev = [ 27 | "ruff>=0.10.0" 28 | ] 29 | 30 | [project.scripts] 31 | pump_bot = "bot_runner:main" 32 | 33 | [build-system] 34 | requires = ["setuptools>=61.0"] 35 | build-backend = "setuptools.build_meta" 36 | 37 | [tool.ruff] 38 | exclude = [ 39 | ".bzr", 40 | ".direnv", 41 | ".eggs", 42 | ".git", 43 | ".git-rewrite", 44 | ".hg", 45 | ".ipynb_checkpoints", 46 | ".mypy_cache", 47 | ".nox", 48 | ".pants.d", 49 | ".pyenv", 50 | ".pytest_cache", 51 | ".pytype", 52 | ".ruff_cache", 53 | ".svn", 54 | ".tox", 55 | ".venv", 56 | ".vscode", 57 | "__pypackages__", 58 | "_build", 59 | "buck-out", 60 | "build", 61 | "dist", 62 | "node_modules", 63 | "site-packages", 64 | "venv", 65 | ] 66 | 67 | line-length = 88 68 | indent-width = 4 69 | target-version = "py311" 70 | 71 | [tool.ruff.lint] 72 | select = [ 73 | "E", "F", "I", "UP", "N", "B", "A", "C4", "T10", "ARG", "PTH", 74 | "ANN", # type annotations 75 | "S", # security best practices 76 | "BLE", # blind except statements 77 | "FBT", # boolean trap parameters 78 | "C90", # complexity metrics 79 | "TRY", # exception handling best practices 80 | "SLF", # private member access 81 | "TCH", # type checking issues 82 | "RUF", # Ruff-specific rules 83 | "ERA", # eradicate commented-out code 84 | "PL", # pylint conventions 85 | ] 86 | ignore = ["E501"] 87 | 88 | [tool.ruff.format] 89 | quote-style = "double" 90 | indent-style = "space" 91 | line-ending = "auto" 92 | -------------------------------------------------------------------------------- /src/cleanup/modes.py: -------------------------------------------------------------------------------- 1 | from cleanup.manager import AccountCleanupManager 2 | from utils.logger import get_logger 3 | 4 | logger = get_logger(__name__) 5 | 6 | 7 | def should_cleanup_after_failure(cleanup_mode) -> bool: 8 | return cleanup_mode == "on_fail" 9 | 10 | 11 | def should_cleanup_after_sell(cleanup_mode) -> bool: 12 | return cleanup_mode == "after_sell" 13 | 14 | 15 | def should_cleanup_post_session(cleanup_mode) -> bool: 16 | return cleanup_mode == "post_session" 17 | 18 | 19 | async def handle_cleanup_after_failure( 20 | client, 21 | wallet, 22 | mint, 23 | token_program_id, 24 | priority_fee_manager, 25 | cleanup_mode, 26 | cleanup_with_prior_fee, 27 | force_burn, 28 | ): 29 | if should_cleanup_after_failure(cleanup_mode): 30 | logger.info("[Cleanup] Triggered by failed buy transaction.") 31 | manager = AccountCleanupManager( 32 | client, wallet, priority_fee_manager, cleanup_with_prior_fee, force_burn 33 | ) 34 | await manager.cleanup_ata(mint, token_program_id) 35 | 36 | 37 | async def handle_cleanup_after_sell( 38 | client, 39 | wallet, 40 | mint, 41 | token_program_id, 42 | priority_fee_manager, 43 | cleanup_mode, 44 | cleanup_with_prior_fee, 45 | force_burn, 46 | ): 47 | if should_cleanup_after_sell(cleanup_mode): 48 | logger.info("[Cleanup] Triggered after token sell.") 49 | manager = AccountCleanupManager( 50 | client, wallet, priority_fee_manager, cleanup_with_prior_fee, force_burn 51 | ) 52 | await manager.cleanup_ata(mint, token_program_id) 53 | 54 | 55 | async def handle_cleanup_post_session( 56 | client, 57 | wallet, 58 | mints, 59 | token_program_ids, 60 | priority_fee_manager, 61 | cleanup_mode, 62 | cleanup_with_prior_fee, 63 | force_burn, 64 | ): 65 | if should_cleanup_post_session(cleanup_mode): 66 | logger.info("[Cleanup] Triggered post trading session.") 67 | manager = AccountCleanupManager( 68 | client, wallet, priority_fee_manager, cleanup_with_prior_fee, force_burn 69 | ) 70 | for mint, token_program_id in zip(mints, token_program_ids): 71 | await manager.cleanup_ata(mint, token_program_id) 72 | -------------------------------------------------------------------------------- /src/core/pubkeys.py: -------------------------------------------------------------------------------- 1 | """ 2 | System addresses and constants for Solana blockchain operations. 3 | This module contains only system-level addresses that are shared across all platforms. 4 | Platform-specific addresses are handled by their respective AddressProvider implementations. 5 | """ 6 | 7 | from typing import Final 8 | 9 | from solders.pubkey import Pubkey 10 | 11 | # Constants 12 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 13 | TOKEN_DECIMALS: Final[int] = 6 14 | 15 | # Token account constants 16 | TOKEN_ACCOUNT_SIZE: Final[int] = 165 # Size of a token account in bytes 17 | TOKEN_ACCOUNT_RENT_EXEMPT_RESERVE: Final[int] = ( 18 | 2_039_280 # Rent-exempt minimum for token accounts 19 | ) 20 | 21 | # Core system programs 22 | SYSTEM_PROGRAM: Final[Pubkey] = Pubkey.from_string("11111111111111111111111111111111") 23 | TOKEN_PROGRAM: Final[Pubkey] = Pubkey.from_string( 24 | "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" 25 | ) 26 | TOKEN_2022_PROGRAM: Final[Pubkey] = Pubkey.from_string( 27 | "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" 28 | ) 29 | ASSOCIATED_TOKEN_PROGRAM: Final[Pubkey] = Pubkey.from_string( 30 | "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL" 31 | ) 32 | 33 | # System accounts 34 | RENT: Final[Pubkey] = Pubkey.from_string("SysvarRent111111111111111111111111111111111") 35 | 36 | # Native SOL token 37 | SOL_MINT: Final[Pubkey] = Pubkey.from_string( 38 | "So11111111111111111111111111111111111111112" 39 | ) 40 | 41 | 42 | class SystemAddresses: 43 | """System-level Solana addresses shared across all platforms.""" 44 | 45 | # Reference the module-level constants 46 | SYSTEM_PROGRAM = SYSTEM_PROGRAM 47 | TOKEN_PROGRAM = TOKEN_PROGRAM 48 | TOKEN_2022_PROGRAM = TOKEN_2022_PROGRAM 49 | ASSOCIATED_TOKEN_PROGRAM = ASSOCIATED_TOKEN_PROGRAM 50 | RENT = RENT 51 | SOL_MINT = SOL_MINT 52 | 53 | @classmethod 54 | def get_all_system_addresses(cls) -> dict[str, Pubkey]: 55 | """Get all system addresses as a dictionary. 56 | 57 | Returns: 58 | Dictionary mapping address names to Pubkey objects 59 | """ 60 | return { 61 | "system_program": cls.SYSTEM_PROGRAM, 62 | "token_program": cls.TOKEN_PROGRAM, 63 | "token_2022_program": cls.TOKEN_2022_PROGRAM, 64 | "associated_token_program": cls.ASSOCIATED_TOKEN_PROGRAM, 65 | "rent": cls.RENT, 66 | "sol_mint": cls.SOL_MINT, 67 | } 68 | -------------------------------------------------------------------------------- /src/core/priority_fee/dynamic_fee.py: -------------------------------------------------------------------------------- 1 | import statistics 2 | 3 | from solders.pubkey import Pubkey 4 | 5 | from core.client import SolanaClient 6 | from core.priority_fee import PriorityFeePlugin 7 | from utils.logger import get_logger 8 | 9 | logger = get_logger(__name__) 10 | 11 | 12 | class DynamicPriorityFee(PriorityFeePlugin): 13 | """Dynamic priority fee plugin using getRecentPrioritizationFees.""" 14 | 15 | def __init__(self, client: SolanaClient): 16 | """ 17 | Initialize the dynamic fee plugin. 18 | 19 | Args: 20 | client: Solana RPC client for network requests. 21 | """ 22 | self.client = client 23 | 24 | async def get_priority_fee( 25 | self, accounts: list[Pubkey] | None = None 26 | ) -> int | None: 27 | """ 28 | Fetch the recent priority fee using getRecentPrioritizationFees. 29 | 30 | Args: 31 | accounts: List of accounts to consider for the fee calculation. 32 | If None, the fee is calculated without specific account constraints. 33 | 34 | Returns: 35 | Optional[int]: Median priority fee in microlamports, or None if the request fails. 36 | """ 37 | try: 38 | body = { 39 | "jsonrpc": "2.0", 40 | "id": 1, 41 | "method": "getRecentPrioritizationFees", 42 | "params": [[str(account) for account in accounts]] if accounts else [], 43 | } 44 | 45 | response = await self.client.post_rpc(body) 46 | if not response or "result" not in response: 47 | logger.error( 48 | "Failed to fetch recent prioritization fees: invalid response" 49 | ) 50 | return None 51 | 52 | fees = [fee["prioritizationFee"] for fee in response["result"]] 53 | if not fees: 54 | logger.warning("No prioritization fees found in the response") 55 | return None 56 | 57 | # Get the 70th percentile of fees for faster processing 58 | # It means you're paying a fee that's higher than 70% of other transactions 59 | # Higher percentile = faster transactions but more expensive 60 | # Lower percentile = cheaper but slower transactions 61 | prior_fee = int(statistics.quantiles(fees, n=10)[-3]) # 70th percentile 62 | 63 | return prior_fee 64 | 65 | except Exception: 66 | logger.exception("Failed to fetch recent priority fee") 67 | return None 68 | -------------------------------------------------------------------------------- /src/core/priority_fee/manager.py: -------------------------------------------------------------------------------- 1 | from solders.pubkey import Pubkey 2 | 3 | from core.client import SolanaClient 4 | from core.priority_fee.dynamic_fee import DynamicPriorityFee 5 | from core.priority_fee.fixed_fee import FixedPriorityFee 6 | from utils.logger import get_logger 7 | 8 | logger = get_logger(__name__) 9 | 10 | 11 | class PriorityFeeManager: 12 | """Manager for priority fee calculation and validation.""" 13 | 14 | def __init__( 15 | self, 16 | client: SolanaClient, 17 | enable_dynamic_fee: bool, 18 | enable_fixed_fee: bool, 19 | fixed_fee: int, 20 | extra_fee: float, 21 | hard_cap: int, 22 | ): 23 | """ 24 | Initialize the priority fee manager. 25 | 26 | Args: 27 | client: Solana RPC client for dynamic fee calculation. 28 | enable_dynamic_fee: Whether to enable dynamic fee calculation. 29 | enable_fixed_fee: Whether to enable fixed fee. 30 | fixed_fee: Fixed priority fee in microlamports. 31 | extra_fee: Percentage increase to apply to the base fee. 32 | hard_cap: Maximum allowed priority fee in microlamports. 33 | """ 34 | self.client = client 35 | self.enable_dynamic_fee = enable_dynamic_fee 36 | self.enable_fixed_fee = enable_fixed_fee 37 | self.fixed_fee = fixed_fee 38 | self.extra_fee = extra_fee 39 | self.hard_cap = hard_cap 40 | 41 | # Initialize plugins 42 | self.dynamic_fee_plugin = DynamicPriorityFee(client) 43 | self.fixed_fee_plugin = FixedPriorityFee(fixed_fee) 44 | 45 | async def calculate_priority_fee( 46 | self, accounts: list[Pubkey] | None = None 47 | ) -> int | None: 48 | """ 49 | Calculate the priority fee based on the configuration. 50 | 51 | Args: 52 | accounts: List of accounts to consider for dynamic fee calculation. 53 | If None, the fee is calculated without specific account constraints. 54 | 55 | Returns: 56 | Optional[int]: Calculated priority fee in microlamports, or None if no fee should be applied. 57 | """ 58 | base_fee = await self._get_base_fee(accounts) 59 | if base_fee is None: 60 | return None 61 | 62 | # Apply extra fee (percentage increase) 63 | final_fee = int(base_fee * (1 + self.extra_fee)) 64 | 65 | # Enforce hard cap 66 | if final_fee > self.hard_cap: 67 | logger.warning( 68 | f"Calculated priority fee {final_fee} exceeds hard cap {self.hard_cap}. Applying hard cap." 69 | ) 70 | final_fee = self.hard_cap 71 | 72 | return final_fee 73 | 74 | async def _get_base_fee(self, accounts: list[Pubkey] | None = None) -> int | None: 75 | """ 76 | Determine the base fee based on the configuration. 77 | 78 | Returns: 79 | Optional[int]: Base fee in microlamports, or None if no fee should be applied. 80 | """ 81 | # Prefer dynamic fee if both are enabled 82 | if self.enable_dynamic_fee: 83 | dynamic_fee = await self.dynamic_fee_plugin.get_priority_fee(accounts) 84 | if dynamic_fee is not None: 85 | return dynamic_fee 86 | 87 | # Fall back to fixed fee if enabled 88 | if self.enable_fixed_fee: 89 | return await self.fixed_fee_plugin.get_priority_fee() 90 | 91 | # No priority fee if both are disabled 92 | return None 93 | -------------------------------------------------------------------------------- /learning-examples/cleanup_accounts.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | 4 | from dotenv import load_dotenv 5 | from solders.pubkey import Pubkey 6 | from spl.token.instructions import BurnParams, CloseAccountParams, burn, close_account 7 | 8 | from core.client import SolanaClient 9 | from core.pubkeys import SystemAddresses 10 | from core.wallet import Wallet 11 | from utils.logger import get_logger 12 | 13 | load_dotenv() 14 | logger = get_logger(__name__) 15 | 16 | RPC_ENDPOINT = os.getenv("SOLANA_NODE_RPC_ENDPOINT") 17 | PRIVATE_KEY = os.getenv("SOLANA_PRIVATE_KEY") 18 | 19 | # Update this address to MINT address of a token you want to close 20 | MINT_ADDRESS = Pubkey.from_string("9WHpYbqG6LJvfCYfMjvGbyo1wHXgroCrixPb33s2pump") 21 | 22 | # Token program for the mint - use TOKEN_PROGRAM for legacy SPL tokens, TOKEN_2022_PROGRAM for Token-2022 23 | # This must match the actual token's program to derive the correct ATA address 24 | TOKEN_PROGRAM = SystemAddresses.TOKEN_PROGRAM 25 | 26 | 27 | async def close_account_if_exists( 28 | client: SolanaClient, wallet: Wallet, account: Pubkey, mint: Pubkey 29 | ): 30 | """Safely close a token account if it exists and reclaim rent.""" 31 | try: 32 | solana_client = await client.get_client() 33 | info = await solana_client.get_account_info( 34 | account, encoding="base64" 35 | ) # base64 encoding for account data by deafult 36 | 37 | # WARNING: This will permanently burn all tokens in the account before closing it 38 | # Closing account is impossible if balance is positive 39 | balance = await client.get_token_account_balance(account) 40 | if balance > 0: 41 | logger.info(f"Burning {balance} tokens from account {account}...") 42 | burn_ix = burn( 43 | BurnParams( 44 | account=account, 45 | mint=mint, 46 | owner=wallet.pubkey, 47 | amount=balance, 48 | program_id=TOKEN_PROGRAM, 49 | ) 50 | ) 51 | await client.build_and_send_transaction([burn_ix], wallet.keypair) 52 | logger.info(f"Burned tokens from {account}") 53 | 54 | # If account exists, attempt to close it 55 | if info.value: 56 | logger.info(f"Closing account: {account}") 57 | close_params = CloseAccountParams( 58 | account=account, 59 | dest=wallet.pubkey, 60 | owner=wallet.pubkey, 61 | program_id=TOKEN_PROGRAM, 62 | ) 63 | ix = close_account(close_params) 64 | 65 | tx_sig = await client.build_and_send_transaction( 66 | [ix], 67 | wallet.keypair, 68 | skip_preflight=True, 69 | ) 70 | await client.confirm_transaction(tx_sig) 71 | logger.info(f"Closed successfully: {account}") 72 | else: 73 | logger.info(f"Account does not exist or already closed: {account}") 74 | 75 | except Exception as e: 76 | logger.error(f"Error while processing account {account}: {e}") 77 | 78 | 79 | async def main(): 80 | try: 81 | client = SolanaClient(RPC_ENDPOINT) 82 | wallet = Wallet(PRIVATE_KEY) 83 | 84 | # Get user's ATA for the token 85 | ata = wallet.get_associated_token_address(MINT_ADDRESS, TOKEN_PROGRAM) 86 | await close_account_if_exists(client, wallet, ata, MINT_ADDRESS) 87 | 88 | except Exception as e: 89 | logger.error(f"Unexpected error: {e}") 90 | finally: 91 | await client.close() 92 | 93 | 94 | if __name__ == "__main__": 95 | asyncio.run(main()) 96 | -------------------------------------------------------------------------------- /learning-examples/blockSubscribe_extract_transactions.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import hashlib 3 | import json 4 | import os 5 | 6 | import websockets 7 | from solders.pubkey import Pubkey 8 | 9 | PUMP_PROGRAM = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 10 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 11 | 12 | 13 | async def save_transaction(tx_data, tx_signature): 14 | os.makedirs("blockSubscribe-transactions", exist_ok=True) 15 | hashed_signature = hashlib.sha256(tx_signature.encode()).hexdigest() 16 | file_path = os.path.join("blockSubscribe-transactions", f"{hashed_signature}.json") 17 | with open(file_path, "w") as f: 18 | json.dump(tx_data, f, indent=2) 19 | print(f"Saved transaction: {hashed_signature[:8]}...") 20 | 21 | 22 | async def listen_for_transactions(): 23 | async with websockets.connect(WSS_ENDPOINT) as websocket: 24 | subscription_message = json.dumps( 25 | { 26 | "jsonrpc": "2.0", 27 | "id": 1, 28 | "method": "blockSubscribe", 29 | "params": [ 30 | {"mentionsAccountOrProgram": str(PUMP_PROGRAM)}, 31 | { 32 | "commitment": "confirmed", 33 | "encoding": "base64", 34 | "showRewards": False, 35 | "transactionDetails": "full", 36 | "maxSupportedTransactionVersion": 0, 37 | }, 38 | ], 39 | }, 40 | ) 41 | await websocket.send(subscription_message) 42 | print(f"Subscribed to blocks mentioning program: {PUMP_PROGRAM}") 43 | 44 | while True: 45 | try: 46 | response = await websocket.recv() 47 | data = json.loads(response) 48 | 49 | if "method" in data and data["method"] == "blockNotification": 50 | if "params" in data and "result" in data["params"]: 51 | block_data = data["params"]["result"] 52 | if "value" in block_data and "block" in block_data["value"]: 53 | block = block_data["value"]["block"] 54 | if "transactions" in block: 55 | transactions = block["transactions"] 56 | for tx in transactions: 57 | if isinstance(tx, dict) and "transaction" in tx: 58 | if ( 59 | isinstance(tx["transaction"], list) 60 | and len(tx["transaction"]) > 0 61 | ): 62 | tx_signature = tx["transaction"][0] 63 | elif ( 64 | isinstance(tx["transaction"], dict) 65 | and "signatures" in tx["transaction"] 66 | ): 67 | tx_signature = tx["transaction"][ 68 | "signatures" 69 | ][0] 70 | else: 71 | continue 72 | await save_transaction(tx, tx_signature) 73 | elif "result" in data: 74 | print("Subscription confirmed") 75 | except Exception as e: 76 | print(f"An error occurred: {e!s}") 77 | 78 | 79 | if __name__ == "__main__": 80 | asyncio.run(listen_for_transactions()) 81 | -------------------------------------------------------------------------------- /learning-examples/pumpswap/get_pumpswap_pools.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides functionality to: 3 | 1. Find market addresses by base mint 4 | 2. Fetch and parse market data (including pool addresses) from Pump AMM program accounts 5 | """ 6 | 7 | import asyncio 8 | import os 9 | import struct 10 | 11 | import base58 12 | from dotenv import load_dotenv 13 | from solana.rpc.async_api import AsyncClient 14 | from solana.rpc.types import MemcmpOpts 15 | from solders.pubkey import Pubkey 16 | 17 | load_dotenv() 18 | 19 | RPC_ENDPOINT = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 20 | PUMP_AMM_PROGRAM_ID = Pubkey.from_string("pAMMBay6oceH9fJKBRHGP5D4bD4sWpmSwMn52FMfXEA") 21 | TOKEN_MINT = Pubkey.from_string("...") 22 | 23 | 24 | async def get_market_address_by_base_mint( 25 | base_mint_address: Pubkey, amm_program_id: Pubkey 26 | ): 27 | async with AsyncClient(RPC_ENDPOINT) as client: 28 | base_mint_bytes = bytes(base_mint_address) 29 | 30 | # Define the offset for base_mint field 31 | offset = 43 32 | 33 | # Create the filter to match the base_mint 34 | filters = [MemcmpOpts(offset=offset, bytes=base_mint_bytes)] 35 | 36 | # Retrieve the accounts that match the filter 37 | response = await client.get_program_accounts( 38 | amm_program_id, # AMM program ID 39 | encoding="base64", 40 | filters=filters, 41 | ) 42 | 43 | pool_addresses = [account.pubkey for account in response.value] 44 | return pool_addresses[0] 45 | 46 | 47 | async def get_market_data(market_address: Pubkey): 48 | async with AsyncClient(RPC_ENDPOINT) as client: 49 | response = await client.get_account_info(market_address, encoding="base64") 50 | data = response.value.data 51 | parsed_data = {} 52 | 53 | offset = 8 54 | fields = [ 55 | ("pool_bump", "u8"), 56 | ("index", "u16"), 57 | ("creator", "pubkey"), 58 | ("base_mint", "pubkey"), 59 | ("quote_mint", "pubkey"), 60 | ("lp_mint", "pubkey"), 61 | ("pool_base_token_account", "pubkey"), 62 | ("pool_quote_token_account", "pubkey"), 63 | ("lp_supply", "u64"), 64 | ("coin_creator", "pubkey"), 65 | ] 66 | 67 | for field_name, field_type in fields: 68 | if field_type == "pubkey": 69 | value = data[offset : offset + 32] 70 | parsed_data[field_name] = base58.b58encode(value).decode("utf-8") 71 | offset += 32 72 | elif field_type in {"u64", "i64"}: 73 | value = ( 74 | struct.unpack(" None: 26 | """Parse bonding curve data - supports all versions.""" 27 | if data[:8] != EXPECTED_DISCRIMINATOR: 28 | raise ValueError("Invalid curve state discriminator") 29 | 30 | # Required fields (always present) 31 | offset = 8 32 | self.virtual_token_reserves = int.from_bytes( 33 | data[offset : offset + 8], "little" 34 | ) 35 | offset += 8 36 | self.virtual_sol_reserves = int.from_bytes(data[offset : offset + 8], "little") 37 | offset += 8 38 | self.real_token_reserves = int.from_bytes(data[offset : offset + 8], "little") 39 | offset += 8 40 | self.real_sol_reserves = int.from_bytes(data[offset : offset + 8], "little") 41 | offset += 8 42 | self.token_total_supply = int.from_bytes(data[offset : offset + 8], "little") 43 | offset += 8 44 | self.complete = bool(data[offset]) 45 | offset += 1 46 | 47 | # Optional fields (may not be present in older versions) 48 | if len(data) >= offset + 32: 49 | self.creator = Pubkey.from_bytes(data[offset : offset + 32]) 50 | offset += 32 51 | 52 | if len(data) > offset: 53 | self.is_mayhem_mode = bool(data[offset]) 54 | else: 55 | self.is_mayhem_mode = None 56 | 57 | else: 58 | self.creator = None 59 | 60 | 61 | def calculate_bonding_curve_price(curve_state: BondingCurveState) -> float: 62 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 63 | raise ValueError("Invalid reserve state") 64 | 65 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 66 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 67 | ) 68 | 69 | 70 | def decode_bonding_curve_data(raw_data: str) -> BondingCurveState: 71 | decoded_data = base64.b64decode(raw_data) 72 | if decoded_data[:8] != EXPECTED_DISCRIMINATOR: 73 | raise ValueError("Invalid curve state discriminator") 74 | return BondingCurveState(decoded_data) 75 | 76 | 77 | # Load the JSON data 78 | with open("learning-examples/raw_bondingCurve_from_getAccountInfo.json") as file: 79 | json_data = json.load(file) 80 | 81 | # Extract the base64 encoded data 82 | encoded_data = json_data["result"]["value"]["data"][0] 83 | 84 | # Decode the data 85 | bonding_curve_state = decode_bonding_curve_data(encoded_data) 86 | 87 | # Calculate and print the token price 88 | token_price_sol = calculate_bonding_curve_price(bonding_curve_state) 89 | 90 | print("Bonding Curve State:") 91 | print(f" Virtual Token Reserves: {bonding_curve_state.virtual_token_reserves}") 92 | print(f" Virtual SOL Reserves: {bonding_curve_state.virtual_sol_reserves}") 93 | print(f" Real Token Reserves: {bonding_curve_state.real_token_reserves}") 94 | print(f" Real SOL Reserves: {bonding_curve_state.real_sol_reserves}") 95 | print(f" Token Total Supply: {bonding_curve_state.token_total_supply}") 96 | print(f" Complete: {bonding_curve_state.complete}") 97 | print(f"\nToken Price: {token_price_sol:.10f} SOL") 98 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | trades/* 2 | 3 | .vscode 4 | .pylintrc 5 | .ruff_cache 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | cover/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | .pybuilder/ 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | # For a library or package, you might want to ignore these files since the code is 93 | # intended to run in multiple environments; otherwise, check them in: 94 | # .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | trades/trades.log 170 | # Mac OS X 171 | .DS_Store -------------------------------------------------------------------------------- /src/geyser/proto/solana-storage.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package solana.storage.ConfirmedBlock; 4 | 5 | option go_package = "github.com/rpcpool/yellowstone-grpc/examples/golang/proto"; 6 | 7 | message ConfirmedBlock { 8 | string previous_blockhash = 1; 9 | string blockhash = 2; 10 | uint64 parent_slot = 3; 11 | repeated ConfirmedTransaction transactions = 4; 12 | repeated Reward rewards = 5; 13 | UnixTimestamp block_time = 6; 14 | BlockHeight block_height = 7; 15 | NumPartitions num_partitions = 8; 16 | } 17 | 18 | message ConfirmedTransaction { 19 | Transaction transaction = 1; 20 | TransactionStatusMeta meta = 2; 21 | } 22 | 23 | message Transaction { 24 | repeated bytes signatures = 1; 25 | Message message = 2; 26 | } 27 | 28 | message Message { 29 | MessageHeader header = 1; 30 | repeated bytes account_keys = 2; 31 | bytes recent_blockhash = 3; 32 | repeated CompiledInstruction instructions = 4; 33 | bool versioned = 5; 34 | repeated MessageAddressTableLookup address_table_lookups = 6; 35 | } 36 | 37 | message MessageHeader { 38 | uint32 num_required_signatures = 1; 39 | uint32 num_readonly_signed_accounts = 2; 40 | uint32 num_readonly_unsigned_accounts = 3; 41 | } 42 | 43 | message MessageAddressTableLookup { 44 | bytes account_key = 1; 45 | bytes writable_indexes = 2; 46 | bytes readonly_indexes = 3; 47 | } 48 | 49 | message TransactionStatusMeta { 50 | TransactionError err = 1; 51 | uint64 fee = 2; 52 | repeated uint64 pre_balances = 3; 53 | repeated uint64 post_balances = 4; 54 | repeated InnerInstructions inner_instructions = 5; 55 | bool inner_instructions_none = 10; 56 | repeated string log_messages = 6; 57 | bool log_messages_none = 11; 58 | repeated TokenBalance pre_token_balances = 7; 59 | repeated TokenBalance post_token_balances = 8; 60 | repeated Reward rewards = 9; 61 | repeated bytes loaded_writable_addresses = 12; 62 | repeated bytes loaded_readonly_addresses = 13; 63 | ReturnData return_data = 14; 64 | bool return_data_none = 15; 65 | 66 | // Sum of compute units consumed by all instructions. 67 | // Available since Solana v1.10.35 / v1.11.6. 68 | // Set to `None` for txs executed on earlier versions. 69 | optional uint64 compute_units_consumed = 16; 70 | } 71 | 72 | message TransactionError { 73 | bytes err = 1; 74 | } 75 | 76 | message InnerInstructions { 77 | uint32 index = 1; 78 | repeated InnerInstruction instructions = 2; 79 | } 80 | 81 | message InnerInstruction { 82 | uint32 program_id_index = 1; 83 | bytes accounts = 2; 84 | bytes data = 3; 85 | 86 | // Invocation stack height of an inner instruction. 87 | // Available since Solana v1.14.6 88 | // Set to `None` for txs executed on earlier versions. 89 | optional uint32 stack_height = 4; 90 | } 91 | 92 | message CompiledInstruction { 93 | uint32 program_id_index = 1; 94 | bytes accounts = 2; 95 | bytes data = 3; 96 | } 97 | 98 | message TokenBalance { 99 | uint32 account_index = 1; 100 | string mint = 2; 101 | UiTokenAmount ui_token_amount = 3; 102 | string owner = 4; 103 | string program_id = 5; 104 | } 105 | 106 | message UiTokenAmount { 107 | double ui_amount = 1; 108 | uint32 decimals = 2; 109 | string amount = 3; 110 | string ui_amount_string = 4; 111 | } 112 | 113 | message ReturnData { 114 | bytes program_id = 1; 115 | bytes data = 2; 116 | } 117 | 118 | enum RewardType { 119 | Unspecified = 0; 120 | Fee = 1; 121 | Rent = 2; 122 | Staking = 3; 123 | Voting = 4; 124 | } 125 | 126 | message Reward { 127 | string pubkey = 1; 128 | int64 lamports = 2; 129 | uint64 post_balance = 3; 130 | RewardType reward_type = 4; 131 | string commission = 5; 132 | } 133 | 134 | message Rewards { 135 | repeated Reward rewards = 1; 136 | NumPartitions num_partitions = 2; 137 | } 138 | 139 | message UnixTimestamp { 140 | int64 timestamp = 1; 141 | } 142 | 143 | message BlockHeight { 144 | uint64 block_height = 1; 145 | } 146 | 147 | message NumPartitions { 148 | uint64 num_partitions = 1; 149 | } -------------------------------------------------------------------------------- /learning-examples/listen-new-tokens/proto/solana-storage.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package solana.storage.ConfirmedBlock; 4 | 5 | option go_package = "github.com/rpcpool/yellowstone-grpc/examples/golang/proto"; 6 | 7 | message ConfirmedBlock { 8 | string previous_blockhash = 1; 9 | string blockhash = 2; 10 | uint64 parent_slot = 3; 11 | repeated ConfirmedTransaction transactions = 4; 12 | repeated Reward rewards = 5; 13 | UnixTimestamp block_time = 6; 14 | BlockHeight block_height = 7; 15 | NumPartitions num_partitions = 8; 16 | } 17 | 18 | message ConfirmedTransaction { 19 | Transaction transaction = 1; 20 | TransactionStatusMeta meta = 2; 21 | } 22 | 23 | message Transaction { 24 | repeated bytes signatures = 1; 25 | Message message = 2; 26 | } 27 | 28 | message Message { 29 | MessageHeader header = 1; 30 | repeated bytes account_keys = 2; 31 | bytes recent_blockhash = 3; 32 | repeated CompiledInstruction instructions = 4; 33 | bool versioned = 5; 34 | repeated MessageAddressTableLookup address_table_lookups = 6; 35 | } 36 | 37 | message MessageHeader { 38 | uint32 num_required_signatures = 1; 39 | uint32 num_readonly_signed_accounts = 2; 40 | uint32 num_readonly_unsigned_accounts = 3; 41 | } 42 | 43 | message MessageAddressTableLookup { 44 | bytes account_key = 1; 45 | bytes writable_indexes = 2; 46 | bytes readonly_indexes = 3; 47 | } 48 | 49 | message TransactionStatusMeta { 50 | TransactionError err = 1; 51 | uint64 fee = 2; 52 | repeated uint64 pre_balances = 3; 53 | repeated uint64 post_balances = 4; 54 | repeated InnerInstructions inner_instructions = 5; 55 | bool inner_instructions_none = 10; 56 | repeated string log_messages = 6; 57 | bool log_messages_none = 11; 58 | repeated TokenBalance pre_token_balances = 7; 59 | repeated TokenBalance post_token_balances = 8; 60 | repeated Reward rewards = 9; 61 | repeated bytes loaded_writable_addresses = 12; 62 | repeated bytes loaded_readonly_addresses = 13; 63 | ReturnData return_data = 14; 64 | bool return_data_none = 15; 65 | 66 | // Sum of compute units consumed by all instructions. 67 | // Available since Solana v1.10.35 / v1.11.6. 68 | // Set to `None` for txs executed on earlier versions. 69 | optional uint64 compute_units_consumed = 16; 70 | } 71 | 72 | message TransactionError { 73 | bytes err = 1; 74 | } 75 | 76 | message InnerInstructions { 77 | uint32 index = 1; 78 | repeated InnerInstruction instructions = 2; 79 | } 80 | 81 | message InnerInstruction { 82 | uint32 program_id_index = 1; 83 | bytes accounts = 2; 84 | bytes data = 3; 85 | 86 | // Invocation stack height of an inner instruction. 87 | // Available since Solana v1.14.6 88 | // Set to `None` for txs executed on earlier versions. 89 | optional uint32 stack_height = 4; 90 | } 91 | 92 | message CompiledInstruction { 93 | uint32 program_id_index = 1; 94 | bytes accounts = 2; 95 | bytes data = 3; 96 | } 97 | 98 | message TokenBalance { 99 | uint32 account_index = 1; 100 | string mint = 2; 101 | UiTokenAmount ui_token_amount = 3; 102 | string owner = 4; 103 | string program_id = 5; 104 | } 105 | 106 | message UiTokenAmount { 107 | double ui_amount = 1; 108 | uint32 decimals = 2; 109 | string amount = 3; 110 | string ui_amount_string = 4; 111 | } 112 | 113 | message ReturnData { 114 | bytes program_id = 1; 115 | bytes data = 2; 116 | } 117 | 118 | enum RewardType { 119 | Unspecified = 0; 120 | Fee = 1; 121 | Rent = 2; 122 | Staking = 3; 123 | Voting = 4; 124 | } 125 | 126 | message Reward { 127 | string pubkey = 1; 128 | int64 lamports = 2; 129 | uint64 post_balance = 3; 130 | RewardType reward_type = 4; 131 | string commission = 5; 132 | } 133 | 134 | message Rewards { 135 | repeated Reward rewards = 1; 136 | NumPartitions num_partitions = 2; 137 | } 138 | 139 | message UnixTimestamp { 140 | int64 timestamp = 1; 141 | } 142 | 143 | message BlockHeight { 144 | uint64 block_height = 1; 145 | } 146 | 147 | message NumPartitions { 148 | uint64 num_partitions = 1; 149 | } -------------------------------------------------------------------------------- /learning-examples/listen-new-tokens/listen_pumpportal.py: -------------------------------------------------------------------------------- 1 | """ 2 | Listens for new Pump.fun token creations via PumpPortal WebSocket. 3 | 4 | Performance: Fast, real-time data via third-party API. 5 | 6 | This script uses PumpPortal's WebSocket API, a third-party service that aggregates 7 | and provides real-time Pump.fun token creation events. This provides additional 8 | market data like initial buy amounts and market cap that aren't available in 9 | raw blockchain data. 10 | 11 | PumpPortal API: https://pumpportal.fun/ 12 | 13 | Note: This is a third-party service and requires trust in the data provider. 14 | For trustless monitoring, use the direct blockchain listeners (logs, block, geyser). 15 | """ 16 | 17 | import asyncio 18 | import json 19 | from datetime import datetime 20 | 21 | import websockets 22 | 23 | # PumpPortal WebSocket URL 24 | WS_URL = "wss://pumpportal.fun/api/data" 25 | 26 | 27 | def print_token_info(token_data): 28 | """ 29 | Print token information in a consistent, user-friendly format. 30 | 31 | Args: 32 | token_data: Dictionary containing token fields from PumpPortal 33 | """ 34 | print("\n" + "=" * 80) 35 | print("🎯 NEW TOKEN DETECTED (via PumpPortal)") 36 | print("=" * 80) 37 | print(f"Name: {token_data.get('name', 'N/A')}") 38 | print(f"Symbol: {token_data.get('symbol', 'N/A')}") 39 | print(f"Mint: {token_data.get('mint', 'N/A')}") 40 | 41 | # PumpPortal-specific fields 42 | if "initialBuy" in token_data: 43 | initial_buy_sol = token_data['initialBuy'] 44 | print(f"Initial Buy: {initial_buy_sol:.6f} SOL") 45 | 46 | if "marketCapSol" in token_data: 47 | market_cap_sol = token_data['marketCapSol'] 48 | print(f"Market Cap: {market_cap_sol:.6f} SOL") 49 | 50 | if "bondingCurveKey" in token_data: 51 | print(f"Bonding Curve: {token_data['bondingCurveKey']}") 52 | 53 | if "traderPublicKey" in token_data: 54 | print(f"Creator: {token_data['traderPublicKey']}") 55 | 56 | # Virtual reserves 57 | if "vSolInBondingCurve" in token_data: 58 | v_sol = token_data['vSolInBondingCurve'] 59 | print(f"Virtual SOL: {v_sol:.6f} SOL") 60 | 61 | if "vTokensInBondingCurve" in token_data: 62 | v_tokens = token_data['vTokensInBondingCurve'] 63 | print(f"Virtual Tokens: {v_tokens:,.0f}") 64 | 65 | if "uri" in token_data: 66 | print(f"URI: {token_data['uri']}") 67 | 68 | if "signature" in token_data: 69 | print(f"Signature: {token_data['signature']}") 70 | 71 | print("=" * 80 + "\n") 72 | 73 | 74 | 75 | async def listen_for_new_tokens(): 76 | async with websockets.connect(WS_URL) as websocket: 77 | # Subscribe to new token events 78 | await websocket.send(json.dumps({"method": "subscribeNewToken", "params": []})) 79 | 80 | print("Listening for new token creations...") 81 | 82 | while True: 83 | try: 84 | message = await websocket.recv() 85 | data = json.loads(message) 86 | 87 | if "method" in data and data["method"] == "newToken": 88 | token_info = data.get("params", [{}])[0] 89 | elif "signature" in data and "mint" in data: 90 | token_info = data 91 | else: 92 | continue 93 | 94 | # Print token information in consistent format 95 | print_token_info(token_info) 96 | except websockets.exceptions.ConnectionClosed: 97 | print("\nWebSocket connection closed. Reconnecting...") 98 | break 99 | except json.JSONDecodeError: 100 | print(f"\nReceived non-JSON message: {message}") 101 | except Exception as e: 102 | print(f"\nAn error occurred: {e}") 103 | 104 | 105 | async def main(): 106 | while True: 107 | try: 108 | await listen_for_new_tokens() 109 | except Exception as e: 110 | print(f"\nAn error occurred: {e}") 111 | print("Reconnecting in 5 seconds...") 112 | await asyncio.sleep(5) 113 | 114 | 115 | if __name__ == "__main__": 116 | asyncio.run(main()) 117 | -------------------------------------------------------------------------------- /learning-examples/fetch_price.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import struct 4 | from typing import Final 5 | 6 | from construct import Bytes, Flag, Int64ul, Struct 7 | from solana.rpc.async_api import AsyncClient 8 | from solders.pubkey import Pubkey 9 | 10 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 11 | TOKEN_DECIMALS: Final[int] = 6 12 | CURVE_ADDRESS: Final[str] = "..." # Replace with actual bonding curve address 13 | 14 | # Here and later all the discriminators are precalculated. See learning-examples/calculate_discriminator.py 15 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 54 | """Parse bonding curve data - auto-detects version.""" 55 | data_length = len(data) - 8 56 | 57 | if data_length < 73: # V1: without creator and mayhem mode 58 | parsed = self._STRUCT_V1.parse(data[8:]) 59 | self.__dict__.update(parsed) 60 | self.creator = None 61 | self.is_mayhem_mode = False 62 | elif data_length == 73: # V2: with creator, without mayhem mode 63 | parsed = self._STRUCT_V2.parse(data[8:]) 64 | self.__dict__.update(parsed) 65 | if isinstance(self.creator, bytes): 66 | self.creator = Pubkey.from_bytes(self.creator) 67 | self.is_mayhem_mode = False 68 | else: # V3: with creator and mayhem mode 69 | parsed = self._STRUCT_V3.parse(data[8:]) 70 | self.__dict__.update(parsed) 71 | if isinstance(self.creator, bytes): 72 | self.creator = Pubkey.from_bytes(self.creator) 73 | 74 | 75 | async def get_bonding_curve_state( 76 | conn: AsyncClient, curve_address: Pubkey 77 | ) -> BondingCurveState: 78 | response = await conn.get_account_info(curve_address, encoding="base64") 79 | if not response.value or not response.value.data: 80 | raise ValueError("Invalid curve state: No data") 81 | 82 | data = response.value.data 83 | if data[:8] != EXPECTED_DISCRIMINATOR: 84 | raise ValueError("Invalid curve state discriminator") 85 | 86 | return BondingCurveState(data) 87 | 88 | 89 | def calculate_bonding_curve_price(curve_state: BondingCurveState) -> float: 90 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 91 | raise ValueError("Invalid reserve state") 92 | 93 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 94 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 95 | ) 96 | 97 | 98 | async def main() -> None: 99 | try: 100 | async with AsyncClient(RPC_ENDPOINT) as conn: 101 | curve_address = Pubkey.from_string(CURVE_ADDRESS) 102 | bonding_curve_state = await get_bonding_curve_state(conn, curve_address) 103 | token_price_sol = calculate_bonding_curve_price(bonding_curve_state) 104 | 105 | print("Token price:") 106 | print(f" {token_price_sol:.10f} SOL") 107 | except ValueError as e: 108 | print(f"Error: {e}") 109 | except Exception as e: 110 | print(f"An unexpected error occurred: {e}") 111 | 112 | 113 | if __name__ == "__main__": 114 | asyncio.run(main()) 115 | -------------------------------------------------------------------------------- /src/cleanup/manager.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from solders.pubkey import Pubkey 4 | from spl.token.instructions import BurnParams, CloseAccountParams, burn, close_account 5 | 6 | from core.client import SolanaClient 7 | from core.priority_fee.manager import PriorityFeeManager 8 | from core.pubkeys import SystemAddresses 9 | from core.wallet import Wallet 10 | from utils.logger import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | class AccountCleanupManager: 16 | """Handles safe cleanup of token accounts (ATA) after trading sessions.""" 17 | 18 | def __init__( 19 | self, 20 | client: SolanaClient, 21 | wallet: Wallet, 22 | priority_fee_manager: PriorityFeeManager, 23 | use_priority_fee: bool = False, 24 | force_burn: bool = False, 25 | ): 26 | """ 27 | Args: 28 | client: Solana RPC client 29 | wallet: Wallet for signing transactions 30 | """ 31 | self.client = client 32 | self.wallet = wallet 33 | self.priority_fee_manager = priority_fee_manager 34 | self.use_priority_fee = use_priority_fee 35 | self.close_with_force_burn = force_burn 36 | 37 | async def cleanup_ata(self, mint: Pubkey, token_program_id: Pubkey | None = None) -> None: 38 | """ 39 | Attempt to burn any remaining tokens and close the ATA. 40 | Skips if account doesn't exist or is already empty/closed. 41 | 42 | Args: 43 | mint: Token mint address 44 | token_program_id: Token program (TOKEN or TOKEN_2022). Defaults to TOKEN_2022_PROGRAM 45 | """ 46 | if token_program_id is None: 47 | token_program_id = SystemAddresses.TOKEN_2022_PROGRAM 48 | 49 | ata = self.wallet.get_associated_token_address(mint, token_program_id) 50 | solana_client = await self.client.get_client() 51 | 52 | priority_fee = ( 53 | await self.priority_fee_manager.calculate_priority_fee([ata]) 54 | if self.use_priority_fee 55 | else None 56 | ) 57 | 58 | logger.info("Waiting for 15 seconds for RPC node to synchronize...") 59 | await asyncio.sleep(15) 60 | 61 | try: 62 | info = await solana_client.get_account_info(ata, encoding="base64") 63 | if not info.value: 64 | logger.info(f"ATA {ata} does not exist or already closed.") 65 | return 66 | 67 | balance = await self.client.get_token_account_balance(ata) 68 | instructions = [] 69 | 70 | if balance > 0 and self.close_with_force_burn: 71 | logger.info( 72 | f"Burning {balance} tokens from ATA {ata} (mint: {mint})..." 73 | ) 74 | burn_ix = burn( 75 | BurnParams( 76 | account=ata, 77 | mint=mint, 78 | owner=self.wallet.pubkey, 79 | amount=balance, 80 | program_id=token_program_id, 81 | ) 82 | ) 83 | instructions.append(burn_ix) 84 | 85 | elif balance > 0: 86 | logger.info( 87 | f"Skipping ATA {ata} with non-zero balance ({balance} tokens) " 88 | f"because CLEANUP_FORCE_CLOSE_WITH_BURN is disabled." 89 | ) 90 | return 91 | 92 | # Include close account instruction 93 | logger.info(f"Closing ATA: {ata}") 94 | close_ix = close_account( 95 | CloseAccountParams( 96 | account=ata, 97 | dest=self.wallet.pubkey, 98 | owner=self.wallet.pubkey, 99 | program_id=token_program_id, 100 | ) 101 | ) 102 | instructions.append(close_ix) 103 | 104 | # Send both burn and close instructions in the same transaction 105 | if instructions: 106 | tx_sig = await self.client.build_and_send_transaction( 107 | instructions, 108 | self.wallet.keypair, 109 | skip_preflight=True, 110 | priority_fee=priority_fee, 111 | ) 112 | await self.client.confirm_transaction(tx_sig) 113 | logger.info(f"Closed successfully: {ata}") 114 | 115 | except Exception as e: 116 | logger.warning(f"Cleanup failed for ATA {ata}: {e!s}") 117 | -------------------------------------------------------------------------------- /.cursor/rules/python-style.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | alwaysApply: true 3 | --- 4 | 5 | # Python Code Style Rules 6 | 7 | ## Formatting Standards 8 | 9 | ### Ruff Configuration Compliance 10 | - Use 88 character line length limit 11 | - Use 4 spaces for indentation (never tabs) 12 | - Use double quotes for strings consistently 13 | - Target Python 3.11+ features and syntax 14 | - Enable automatic import sorting and organization 15 | 16 | ### Import Organization 17 | ```python 18 | # Standard library imports first 19 | import asyncio 20 | import logging 21 | from pathlib import Path 22 | 23 | # Third-party imports second 24 | import aiohttp 25 | from solana.rpc.async_api import AsyncClient 26 | 27 | # Local imports last 28 | from config_loader import load_bot_config 29 | from utils.logger import get_logger 30 | ``` 31 | 32 | ### Type Annotations 33 | - Add type hints to ALL public functions and methods 34 | - Use modern typing syntax (Python 3.9+ union syntax where applicable) 35 | - Include return type annotations 36 | - Use `from typing import Any` for complex types 37 | 38 | ```python 39 | def process_transaction(tx_data: dict[str, Any]) -> bool: 40 | """Process transaction data and return success status.""" 41 | pass 42 | 43 | async def fetch_data(endpoint: str) -> dict[str, Any] | None: 44 | """Fetch data from endpoint, return None on failure.""" 45 | pass 46 | ``` 47 | 48 | ## Documentation Standards 49 | 50 | ### Docstring Format 51 | Use Google-style docstrings for all functions and classes: 52 | 53 | ```python 54 | def calculate_slippage(amount: float, slippage_percent: float) -> float: 55 | """Calculate slippage amount for a trade. 56 | 57 | Args: 58 | amount: The trade amount in SOL 59 | slippage_percent: Slippage percentage (0.1 = 10%) 60 | 61 | Returns: 62 | The calculated slippage amount 63 | 64 | Raises: 65 | ValueError: If slippage_percent is negative 66 | """ 67 | if slippage_percent < 0: 68 | raise ValueError("Slippage percentage cannot be negative") 69 | return amount * slippage_percent 70 | ``` 71 | 72 | ## Error Handling 73 | 74 | ### Comprehensive Exception Handling 75 | - Use try-catch blocks for all external operations (RPC calls, file I/O) 76 | - Log exceptions with context using `logging.exception()` 77 | - Provide meaningful error messages 78 | - Don't suppress exceptions without good reason 79 | 80 | ```python 81 | try: 82 | result = await client.get_account_info(address) 83 | logger.info(f"Successfully fetched account info for {address}") 84 | except Exception as e: 85 | logger.exception(f"Failed to fetch account info for {address}: {e}") 86 | raise 87 | ``` 88 | 89 | ## Logging Standards 90 | 91 | ### Logger Usage 92 | - Use `get_logger(__name__)` pattern consistently 93 | - Import from `utils.logger` 94 | - Use appropriate log levels (DEBUG, INFO, WARNING, ERROR) 95 | - Include context in log messages 96 | 97 | ```python 98 | from utils.logger import get_logger 99 | 100 | logger = get_logger(__name__) 101 | 102 | # Good logging examples 103 | logger.info(f"Starting bot '{bot_name}' with platform {platform.value}") 104 | logger.warning(f"Transaction failed, attempt {attempt}/{max_attempts}") 105 | logger.error(f"Platform {platform.value} is not supported") 106 | ``` 107 | 108 | ## Security Rules 109 | 110 | ### Sensitive Data 111 | - NEVER hardcode private keys, API tokens, or secrets 112 | - Use environment variables for all sensitive configuration 113 | - Don't log sensitive information 114 | - Validate all external inputs 115 | 116 | ### Safe Practices 117 | ```python 118 | # Good - using environment variables 119 | private_key = os.getenv("SOLANA_PRIVATE_KEY") 120 | if not private_key: 121 | raise ValueError("SOLANA_PRIVATE_KEY environment variable is required") 122 | 123 | # Bad - hardcoded secrets 124 | private_key = "your_secret_key_here" # NEVER DO THIS 125 | ``` 126 | 127 | ## Code Quality 128 | 129 | ### Linting Compliance 130 | Ensure code passes all enabled Ruff rules: 131 | - Security best practices (S) 132 | - Type annotations (ANN) 133 | - Exception handling (BLE, TRY) 134 | - Code complexity (C90) 135 | - Pylint conventions (PL) 136 | - No commented-out code (ERA) 137 | 138 | ### Performance Considerations 139 | - Use async/await for I/O operations 140 | - Implement proper connection pooling for HTTP clients 141 | - Cache expensive computations when appropriate 142 | - Use uvloop for better async performance 143 | 144 | ```python 145 | # Set uvloop policy at module level 146 | import asyncio 147 | import uvloop 148 | 149 | asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) 150 | ``` -------------------------------------------------------------------------------- /.kiro/steering/python-style.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | inclusion: always 3 | --- 4 | 5 | # Python Code Style Rules 6 | 7 | ## Formatting Standards 8 | 9 | ### Ruff Configuration Compliance 10 | - Use 88 character line length limit 11 | - Use 4 spaces for indentation (never tabs) 12 | - Use double quotes for strings consistently 13 | - Target Python 3.11+ features and syntax 14 | - Enable automatic import sorting and organization 15 | 16 | ### Import Organization 17 | ```python 18 | # Standard library imports first 19 | import asyncio 20 | import logging 21 | from pathlib import Path 22 | 23 | # Third-party imports second 24 | import aiohttp 25 | from solana.rpc.async_api import AsyncClient 26 | 27 | # Local imports last 28 | from config_loader import load_bot_config 29 | from utils.logger import get_logger 30 | ``` 31 | 32 | ### Type Annotations 33 | - Add type hints to ALL public functions and methods 34 | - Use modern typing syntax (Python 3.9+ union syntax where applicable) 35 | - Include return type annotations 36 | - Use `from typing import Any` for complex types 37 | 38 | ```python 39 | def process_transaction(tx_data: dict[str, Any]) -> bool: 40 | """Process transaction data and return success status.""" 41 | pass 42 | 43 | async def fetch_data(endpoint: str) -> dict[str, Any] | None: 44 | """Fetch data from endpoint, return None on failure.""" 45 | pass 46 | ``` 47 | 48 | ## Documentation Standards 49 | 50 | ### Docstring Format 51 | Use Google-style docstrings for all functions and classes: 52 | 53 | ```python 54 | def calculate_slippage(amount: float, slippage_percent: float) -> float: 55 | """Calculate slippage amount for a trade. 56 | 57 | Args: 58 | amount: The trade amount in SOL 59 | slippage_percent: Slippage percentage (0.1 = 10%) 60 | 61 | Returns: 62 | The calculated slippage amount 63 | 64 | Raises: 65 | ValueError: If slippage_percent is negative 66 | """ 67 | if slippage_percent < 0: 68 | raise ValueError("Slippage percentage cannot be negative") 69 | return amount * slippage_percent 70 | ``` 71 | 72 | ## Error Handling 73 | 74 | ### Comprehensive Exception Handling 75 | - Use try-catch blocks for all external operations (RPC calls, file I/O) 76 | - Log exceptions with context using `logging.exception()` 77 | - Provide meaningful error messages 78 | - Don't suppress exceptions without good reason 79 | 80 | ```python 81 | try: 82 | result = await client.get_account_info(address) 83 | logger.info(f"Successfully fetched account info for {address}") 84 | except Exception as e: 85 | logger.exception(f"Failed to fetch account info for {address}: {e}") 86 | raise 87 | ``` 88 | 89 | ## Logging Standards 90 | 91 | ### Logger Usage 92 | - Use `get_logger(__name__)` pattern consistently 93 | - Import from `utils.logger` 94 | - Use appropriate log levels (DEBUG, INFO, WARNING, ERROR) 95 | - Include context in log messages 96 | 97 | ```python 98 | from utils.logger import get_logger 99 | 100 | logger = get_logger(__name__) 101 | 102 | # Good logging examples 103 | logger.info(f"Starting bot '{bot_name}' with platform {platform.value}") 104 | logger.warning(f"Transaction failed, attempt {attempt}/{max_attempts}") 105 | logger.error(f"Platform {platform.value} is not supported") 106 | ``` 107 | 108 | ## Security Rules 109 | 110 | ### Sensitive Data 111 | - NEVER hardcode private keys, API tokens, or secrets 112 | - Use environment variables for all sensitive configuration 113 | - Don't log sensitive information 114 | - Validate all external inputs 115 | 116 | ### Safe Practices 117 | ```python 118 | # Good - using environment variables 119 | private_key = os.getenv("SOLANA_PRIVATE_KEY") 120 | if not private_key: 121 | raise ValueError("SOLANA_PRIVATE_KEY environment variable is required") 122 | 123 | # Bad - hardcoded secrets 124 | private_key = "your_secret_key_here" # NEVER DO THIS 125 | ``` 126 | 127 | ## Code Quality 128 | 129 | ### Linting Compliance 130 | Ensure code passes all enabled Ruff rules: 131 | - Security best practices (S) 132 | - Type annotations (ANN) 133 | - Exception handling (BLE, TRY) 134 | - Code complexity (C90) 135 | - Pylint conventions (PL) 136 | - No commented-out code (ERA) 137 | 138 | ### Performance Considerations 139 | - Use async/await for I/O operations 140 | - Implement proper connection pooling for HTTP clients 141 | - Cache expensive computations when appropriate 142 | - Use uvloop for better async performance 143 | 144 | ```python 145 | # Set uvloop policy at module level 146 | import asyncio 147 | import uvloop 148 | 149 | asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) 150 | ``` -------------------------------------------------------------------------------- /.windsurf/rules/python-style.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | trigger: always_on 3 | --- 4 | 5 | # Python Code Style Rules 6 | 7 | ## Formatting Standards 8 | 9 | ### Ruff Configuration Compliance 10 | - Use 88 character line length limit 11 | - Use 4 spaces for indentation (never tabs) 12 | - Use double quotes for strings consistently 13 | - Target Python 3.11+ features and syntax 14 | - Enable automatic import sorting and organization 15 | 16 | ### Import Organization 17 | ```python 18 | # Standard library imports first 19 | import asyncio 20 | import logging 21 | from pathlib import Path 22 | 23 | # Third-party imports second 24 | import aiohttp 25 | from solana.rpc.async_api import AsyncClient 26 | 27 | # Local imports last 28 | from config_loader import load_bot_config 29 | from utils.logger import get_logger 30 | ``` 31 | 32 | ### Type Annotations 33 | - Add type hints to ALL public functions and methods 34 | - Use modern typing syntax (Python 3.9+ union syntax where applicable) 35 | - Include return type annotations 36 | - Use `from typing import Any` for complex types 37 | 38 | ```python 39 | def process_transaction(tx_data: dict[str, Any]) -> bool: 40 | """Process transaction data and return success status.""" 41 | pass 42 | 43 | async def fetch_data(endpoint: str) -> dict[str, Any] | None: 44 | """Fetch data from endpoint, return None on failure.""" 45 | pass 46 | ``` 47 | 48 | ## Documentation Standards 49 | 50 | ### Docstring Format 51 | Use Google-style docstrings for all functions and classes: 52 | 53 | ```python 54 | def calculate_slippage(amount: float, slippage_percent: float) -> float: 55 | """Calculate slippage amount for a trade. 56 | 57 | Args: 58 | amount: The trade amount in SOL 59 | slippage_percent: Slippage percentage (0.1 = 10%) 60 | 61 | Returns: 62 | The calculated slippage amount 63 | 64 | Raises: 65 | ValueError: If slippage_percent is negative 66 | """ 67 | if slippage_percent < 0: 68 | raise ValueError("Slippage percentage cannot be negative") 69 | return amount * slippage_percent 70 | ``` 71 | 72 | ## Error Handling 73 | 74 | ### Comprehensive Exception Handling 75 | - Use try-catch blocks for all external operations (RPC calls, file I/O) 76 | - Log exceptions with context using `logging.exception()` 77 | - Provide meaningful error messages 78 | - Don't suppress exceptions without good reason 79 | 80 | ```python 81 | try: 82 | result = await client.get_account_info(address) 83 | logger.info(f"Successfully fetched account info for {address}") 84 | except Exception as e: 85 | logger.exception(f"Failed to fetch account info for {address}: {e}") 86 | raise 87 | ``` 88 | 89 | ## Logging Standards 90 | 91 | ### Logger Usage 92 | - Use `get_logger(__name__)` pattern consistently 93 | - Import from `utils.logger` 94 | - Use appropriate log levels (DEBUG, INFO, WARNING, ERROR) 95 | - Include context in log messages 96 | 97 | ```python 98 | from utils.logger import get_logger 99 | 100 | logger = get_logger(__name__) 101 | 102 | # Good logging examples 103 | logger.info(f"Starting bot '{bot_name}' with platform {platform.value}") 104 | logger.warning(f"Transaction failed, attempt {attempt}/{max_attempts}") 105 | logger.error(f"Platform {platform.value} is not supported") 106 | ``` 107 | 108 | ## Security Rules 109 | 110 | ### Sensitive Data 111 | - NEVER hardcode private keys, API tokens, or secrets 112 | - Use environment variables for all sensitive configuration 113 | - Don't log sensitive information 114 | - Validate all external inputs 115 | 116 | ### Safe Practices 117 | ```python 118 | # Good - using environment variables 119 | private_key = os.getenv("SOLANA_PRIVATE_KEY") 120 | if not private_key: 121 | raise ValueError("SOLANA_PRIVATE_KEY environment variable is required") 122 | 123 | # Bad - hardcoded secrets 124 | private_key = "your_secret_key_here" # NEVER DO THIS 125 | ``` 126 | 127 | ## Code Quality 128 | 129 | ### Linting Compliance 130 | Ensure code passes all enabled Ruff rules: 131 | - Security best practices (S) 132 | - Type annotations (ANN) 133 | - Exception handling (BLE, TRY) 134 | - Code complexity (C90) 135 | - Pylint conventions (PL) 136 | - No commented-out code (ERA) 137 | 138 | ### Performance Considerations 139 | - Use async/await for I/O operations 140 | - Implement proper connection pooling for HTTP clients 141 | - Cache expensive computations when appropriate 142 | - Use uvloop for better async performance 143 | 144 | ```python 145 | # Set uvloop policy at module level 146 | import asyncio 147 | import uvloop 148 | 149 | asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) 150 | ``` -------------------------------------------------------------------------------- /bots/bot-sniper-2-logs.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-2" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: false # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Options: "pump_fun" (default), "lets_bonk" 15 | platform: "pump_fun" 16 | 17 | # Geyser configuration (fastest method for getting updates) 18 | geyser: 19 | endpoint: "${GEYSER_ENDPOINT}" 20 | api_token: "${GEYSER_API_TOKEN}" 21 | auth_type: "basic" # or "x-token" 22 | 23 | # Trading parameters 24 | # Control trade execution: amount of SOL per trade and acceptable price deviation 25 | trade: 26 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 27 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 28 | sell_slippage: 0.3 29 | 30 | # Exit strategy configuration 31 | exit_strategy: "time_based" # Options: "time_based", "tp_sl", "manual" 32 | take_profit_percentage: 0.4 # Take profit at 40% gain (0.4 = 40%) 33 | stop_loss_percentage: 0.4 # Stop loss at 40% loss (0.4 = 40%) 34 | max_hold_time: 15 # Maximum hold time in seconds for TP/SL strategy, for time_based - see wait_after_buy 35 | price_check_interval: 2 # Check price every 2 seconds 36 | 37 | # EXTREME FAST mode configuration 38 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 39 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 40 | extreme_fast_mode: true 41 | extreme_fast_token_amount: 20 # Amount of tokens to buy 42 | 43 | # Priority fee configuration 44 | # Manage transaction speed and cost on the Solana network. 45 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 46 | priority_fees: 47 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 48 | enable_fixed: true # Use fixed amount below 49 | fixed_amount: 200_000 # Base fee in microlamports 50 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 51 | hard_cap: 200_000 # Maximum allowable fee in microlamports to prevent excessive spending 52 | 53 | # Compute unit limits for transaction processing 54 | # Operation-specific defaults are used if not specified: buy=100K, sell=60K 55 | compute_units: 56 | # Override default CU limits for this platform 57 | # buy: 100_000 # Buy operations (ATA creation + trading) 58 | # sell: 60_000 # Sell operations (just trading) 59 | 60 | # Account data size optimization (reduces CU cost and improves tx priority) 61 | # Reduces CU cost from 16k to ~128 CU by limiting loaded account data. 62 | # Default is 64MB (16k CU). Setting to 512KB significantly reduces overhead. 63 | # Note: Savings don't show in "consumed CU" but improve tx priority/cost. 64 | # Note (Nov 23, 2025): with data size set to 512KB, transactions fail - increasing to 12.5MB resolves the issue. 65 | # Reference: https://www.anza.xyz/blog/cu-optimization-with-setloadedaccountsdatasizelimit 66 | account_data_size: 12_500_000 67 | 68 | # Filters for token selection 69 | filters: 70 | match_string: null # Only process tokens with this string in name/symbol 71 | bro_address: null # Only trade tokens created by this user address 72 | listener_type: "logs" # Method for detecting new tokens: "logs", "blocks", or "geyser" 73 | max_token_age: 0.001 # Maximum token age in seconds for processing 74 | marry_mode: false # Only buy tokens, skip selling 75 | yolo_mode: false # Continuously trade tokens 76 | 77 | # Retry and timeout settings 78 | retries: 79 | max_attempts: 1 # Number of attempts for transaction submission 80 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 81 | wait_after_buy: 15 # Holding period after buy transaction 82 | wait_before_new_token: 15 # Pause between token trades 83 | 84 | # Token and account management 85 | cleanup: 86 | # Cleanup mode determines when to manage token accounts. Options: 87 | # "disabled": no cleanup will occur. 88 | # "on_fail": only clean up if a buy transaction fails. 89 | # "after_sell": clean up after selling. 90 | # "post_session": clean up all empty accounts after a trading session ends. 91 | mode: "post_session" 92 | force_close_with_burn: false # Force burning remaining tokens before closing account 93 | with_priority_fee: false # Use priority fees for cleanup transactions 94 | 95 | # Node provider configuration (not implemented) 96 | node: 97 | max_rps: 25 # Maximum requests per second 98 | -------------------------------------------------------------------------------- /bots/bot-sniper-1-geyser.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-1" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: true # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Options: "pump_fun" (default), "lets_bonk" 15 | platform: "pump_fun" 16 | 17 | # Geyser configuration (fastest method for getting updates) 18 | geyser: 19 | endpoint: "${GEYSER_ENDPOINT}" 20 | api_token: "${GEYSER_API_TOKEN}" 21 | auth_type: "x-token" # or "basic" 22 | 23 | # Trading parameters 24 | # Control trade execution: amount of SOL per trade and acceptable price deviation 25 | trade: 26 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 27 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 28 | sell_slippage: 0.3 29 | 30 | # Exit strategy configuration 31 | exit_strategy: "time_based" # Options: "time_based", "tp_sl", "manual" 32 | #take_profit_percentage: 0.1 # Take profit at 10% gain (0.1 = 10%) 33 | #stop_loss_percentage: 0.1 # Stop loss at 10% loss (0.1 = 10%) 34 | max_hold_time: 5 # Maximum hold time in seconds for TP/SL strategy, for time_based - see wait_after_buy 35 | #price_check_interval: 2 # Check price every 2 seconds 36 | 37 | # EXTREME FAST mode configuration 38 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 39 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 40 | extreme_fast_mode: true 41 | extreme_fast_token_amount: 20 # Amount of tokens to buy 42 | 43 | # Priority fee configuration 44 | # Manage transaction speed and cost on the Solana network. 45 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 46 | priority_fees: 47 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 48 | enable_fixed: true # Use fixed amount below 49 | fixed_amount: 1_000_000 # Base fee in microlamports 50 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 51 | hard_cap: 1_000_000 # Maximum allowable fee in microlamports to prevent excessive spending 52 | 53 | # Compute unit limits for transaction processing 54 | # Operation-specific defaults are used if not specified: buy=100K, sell=60K 55 | compute_units: 56 | # Override default CU limits for this platform 57 | # buy: 100_000 # Buy operations (ATA creation + trading) 58 | # sell: 60_000 # Sell operations (just trading) 59 | 60 | # Account data size optimization (reduces CU cost and improves tx priority) 61 | # Reduces CU cost from 16k to ~128 CU by limiting loaded account data. 62 | # Default is 64MB (16k CU). Setting to 512KB significantly reduces overhead. 63 | # Note: Savings don't show in "consumed CU" but improve tx priority/cost. 64 | # Note (Nov 23, 2025): with data size set to 512KB, transactions fail - increasing to 12.5MB resolves the issue. 65 | # Reference: https://www.anza.xyz/blog/cu-optimization-with-setloadedaccountsdatasizelimit 66 | account_data_size: 12_500_000 67 | 68 | # Filters for token selection 69 | filters: 70 | match_string: null # Only process tokens with this string in name/symbol 71 | bro_address: null # Only trade tokens created by this user address 72 | listener_type: "geyser" # Method for detecting new tokens: "logs", "blocks", or "geyser" 73 | max_token_age: 0.001 # Maximum token age in seconds for processing 74 | marry_mode: false # Only buy tokens, skip selling 75 | yolo_mode: false # Continuously trade tokens 76 | 77 | # Retry and timeout settings 78 | retries: 79 | max_attempts: 1 # Number of attempts for transaction submission 80 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 81 | wait_after_buy: 5 # Holding period after buy transaction 82 | wait_before_new_token: 15 # Pause between token trades 83 | 84 | # Token and account management 85 | cleanup: 86 | # Cleanup mode determines when to manage token accounts. Options: 87 | # "disabled": no cleanup will occur. 88 | # "on_fail": only clean up if a buy transaction fails. 89 | # "after_sell": clean up after selling. 90 | # "post_session": clean up all empty accounts after a trading session ends. 91 | mode: "post_session" 92 | force_close_with_burn: false # Force burning remaining tokens before closing account 93 | with_priority_fee: false # Use priority fees for cleanup transactions 94 | 95 | # Node provider configuration (not implemented) 96 | node: 97 | max_rps: 25 # Maximum requests per second 98 | -------------------------------------------------------------------------------- /bots/bot-sniper-3-blocks.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-3" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: false # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Options: "pump_fun" (default), "lets_bonk" 15 | platform: "pump_fun" 16 | 17 | # Geyser configuration (fastest method for getting updates) 18 | geyser: 19 | endpoint: "${GEYSER_ENDPOINT}" 20 | api_token: "${GEYSER_API_TOKEN}" 21 | auth_type: "basic" # or "x-token" 22 | 23 | # Trading parameters 24 | # Control trade execution: amount of SOL per trade and acceptable price deviation 25 | trade: 26 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 27 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 28 | sell_slippage: 0.3 29 | 30 | # Exit strategy configuration 31 | exit_strategy: "time_based" # Options: "time_based", "tp_sl", "manual" 32 | #take_profit_percentage: 0.1 # Take profit at 10% gain (0.1 = 10%) 33 | #stop_loss_percentage: 0.1 # Stop loss at 10% loss (0.1 = 10%) 34 | max_hold_time: 15 # Maximum hold time in seconds for TP/SL strategy, for time_based - see wait_after_buy 35 | #price_check_interval: 2 # Check price every 2 seconds 36 | 37 | # EXTREME FAST mode configuration 38 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 39 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 40 | extreme_fast_mode: true 41 | extreme_fast_token_amount: 20 # Amount of tokens to buy 42 | 43 | # Priority fee configuration 44 | # Manage transaction speed and cost on the Solana network. 45 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 46 | priority_fees: 47 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 48 | enable_fixed: true # Use fixed amount below 49 | fixed_amount: 200_000 # Base fee in microlamports 50 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 51 | hard_cap: 200_000 # Maximum allowable fee in microlamports to prevent excessive spending 52 | 53 | # Compute unit limits for transaction processing 54 | # Operation-specific defaults are used if not specified: buy=100K, sell=60K 55 | compute_units: 56 | # Override default CU limits for this platform 57 | # buy: 100_000 # Buy operations (ATA creation + trading) 58 | # sell: 60_000 # Sell operations (just trading) 59 | 60 | # Account data size optimization (reduces CU cost and improves tx priority) 61 | # Reduces CU cost from 16k to ~128 CU by limiting loaded account data. 62 | # Default is 64MB (16k CU). Setting to 512KB significantly reduces overhead. 63 | # Note: Savings don't show in "consumed CU" but improve tx priority/cost. 64 | # Note (Nov 23, 2025): with data size set to 512KB, transactions fail - increasing to 12.5MB resolves the issue. 65 | # Reference: https://www.anza.xyz/blog/cu-optimization-with-setloadedaccountsdatasizelimit 66 | account_data_size: 12_500_000 67 | 68 | # Filters for token selection 69 | filters: 70 | match_string: null # Only process tokens with this string in name/symbol 71 | bro_address: null # Only trade tokens created by this user address 72 | listener_type: "blocks" # Method for detecting new tokens: "logs", "blocks", or "geyser" 73 | max_token_age: 0.001 # Maximum token age in seconds for processing 74 | marry_mode: false # Only buy tokens, skip selling 75 | yolo_mode: false # Continuously trade tokens 76 | 77 | # Retry and timeout settings 78 | retries: 79 | max_attempts: 1 # Number of attempts for transaction submission 80 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 81 | wait_after_buy: 15 # Holding period after buy transaction 82 | wait_before_new_token: 15 # Pause between token trades 83 | 84 | # Token and account management 85 | cleanup: 86 | # Cleanup mode determines when to manage token accounts. Options: 87 | # "disabled": no cleanup will occur. 88 | # "on_fail": only clean up if a buy transaction fails. 89 | # "after_sell": clean up after selling. 90 | # "post_session": clean up all empty accounts after a trading session ends. 91 | mode: "post_session" 92 | force_close_with_burn: false # Force burning remaining tokens before closing account 93 | with_priority_fee: false # Use priority fees for cleanup transactions 94 | 95 | # Node provider configuration (not implemented) 96 | node: 97 | max_rps: 25 # Maximum requests per second 98 | -------------------------------------------------------------------------------- /bots/bot-sniper-4-pp.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-pumpportal" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: false # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Options: "pump_fun" (default), "lets_bonk" 15 | platform: "lets_bonk" 16 | 17 | # PumpPortal configuration (optional - uses default URL if not specified) 18 | pumpportal: 19 | url: "wss://pumpportal.fun/api/data" # Default PumpPortal WebSocket URL 20 | 21 | # Trading parameters 22 | # Control trade execution: amount of SOL per trade and acceptable price deviation 23 | trade: 24 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 25 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 26 | sell_slippage: 0.3 27 | 28 | # Exit strategy configuration 29 | exit_strategy: "time_based" # Options: "time_based", "tp_sl", "manual" 30 | take_profit_percentage: 0.1 # Take profit at 10% gain (0.1 = 10%) 31 | stop_loss_percentage: 0.1 # Stop loss at 10% loss (0.1 = 10%) 32 | max_hold_time: 15 # Maximum hold time in seconds for TP/SL strategy, for time_based - see wait_after_buy 33 | price_check_interval: 2 # Check price every 2 seconds 34 | 35 | # EXTREME FAST mode configuration 36 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 37 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 38 | extreme_fast_mode: true 39 | extreme_fast_token_amount: 20 # Amount of tokens to buy 40 | 41 | # Priority fee configuration 42 | # Manage transaction speed and cost on the Solana network. 43 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 44 | priority_fees: 45 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 46 | enable_fixed: true # Use fixed amount below 47 | fixed_amount: 200_000 # Base fee in microlamports 48 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 49 | hard_cap: 200_000 # Maximum allowable fee in microlamports to prevent excessive spending 50 | 51 | # Compute unit limits for transaction processing 52 | # Operation-specific defaults are used if not specified: buy=100K, sell=60K 53 | compute_units: 54 | # Override default CU limits for this platform 55 | # buy: 100_000 # Buy operations (ATA creation + trading) 56 | # sell: 60_000 # Sell operations (just trading) 57 | 58 | # Account data size optimization (reduces CU cost and improves tx priority) 59 | # Reduces CU cost from 16k to ~128 CU by limiting loaded account data. 60 | # Default is 64MB (16k CU). Setting to 512KB significantly reduces overhead. 61 | # Note: Savings don't show in "consumed CU" but improve tx priority/cost. 62 | # Note (Nov 23, 2025): with data size set to 512KB, transactions fail - increasing to 12.5MB resolves the issue. 63 | # Reference: https://www.anza.xyz/blog/cu-optimization-with-setloadedaccountsdatasizelimit 64 | account_data_size: 12_500_000 65 | 66 | # Filters for token selection 67 | filters: 68 | match_string: null # Only process tokens with this string in name/symbol 69 | bro_address: null # Only trade tokens created by this user address 70 | listener_type: "pumpportal" # Method for detecting new tokens: "logs", "blocks", "geyser", or "pumpportal" 71 | max_token_age: 0.001 # Maximum token age in seconds for processing 72 | marry_mode: false # Only buy tokens, skip selling 73 | yolo_mode: false # Continuously trade tokens 74 | 75 | # Retry and timeout settings 76 | retries: 77 | max_attempts: 1 # Number of attempts for transaction submission 78 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 79 | wait_after_buy: 15 # Holding period after buy transaction 80 | wait_before_new_token: 15 # Pause between token trades 81 | 82 | # Token and account management 83 | cleanup: 84 | # Cleanup mode determines when to manage token accounts. Options: 85 | # "disabled": no cleanup will occur. 86 | # "on_fail": only clean up if a buy transaction fails. 87 | # "after_sell": clean up after selling. 88 | # "post_session": clean up all empty accounts after a trading session ends. 89 | mode: "post_session" 90 | force_close_with_burn: false # Force burning remaining tokens before closing account 91 | with_priority_fee: false # Use priority fees for cleanup transactions 92 | 93 | # Node provider configuration (not implemented) 94 | node: 95 | max_rps: 25 # Maximum requests per second 96 | -------------------------------------------------------------------------------- /src/platforms/pumpfun/pumpportal_processor.py: -------------------------------------------------------------------------------- 1 | """ 2 | PumpFun-specific PumpPortal event processor. 3 | File: src/platforms/pumpfun/pumpportal_processor.py 4 | """ 5 | 6 | from solders.pubkey import Pubkey 7 | 8 | from core.pubkeys import SystemAddresses 9 | from interfaces.core import Platform, TokenInfo 10 | from platforms.pumpfun.address_provider import PumpFunAddressProvider 11 | from utils.logger import get_logger 12 | 13 | logger = get_logger(__name__) 14 | 15 | 16 | class PumpFunPumpPortalProcessor: 17 | """PumpPortal processor for pump.fun tokens.""" 18 | 19 | def __init__(self): 20 | """Initialize the processor with address provider.""" 21 | self.address_provider = PumpFunAddressProvider() 22 | 23 | @property 24 | def platform(self) -> Platform: 25 | """Get the platform this processor handles.""" 26 | return Platform.PUMP_FUN 27 | 28 | @property 29 | def supported_pool_names(self) -> list[str]: 30 | """Get the pool names this processor supports from PumpPortal.""" 31 | return ["pump"] # PumpPortal pool name for pump.fun 32 | 33 | def can_process(self, token_data: dict) -> bool: 34 | """Check if this processor can handle the given token data. 35 | 36 | Args: 37 | token_data: Token data from PumpPortal 38 | 39 | Returns: 40 | True if this processor can handle the token data 41 | """ 42 | pool = token_data.get("pool", "").lower() 43 | return pool in self.supported_pool_names 44 | 45 | def process_token_data(self, token_data: dict) -> TokenInfo | None: 46 | """Process pump.fun token data from PumpPortal. 47 | 48 | Args: 49 | token_data: Token data from PumpPortal WebSocket 50 | 51 | Returns: 52 | TokenInfo if token creation found, None otherwise 53 | """ 54 | try: 55 | # Extract required fields 56 | name = token_data.get("name", "") 57 | symbol = token_data.get("symbol", "") 58 | mint_str = token_data.get("mint") 59 | bonding_curve_str = token_data.get("bondingCurveKey") 60 | creator_str = token_data.get("traderPublicKey") # Maps to user field 61 | uri = token_data.get("uri", "") 62 | 63 | # Additional fields available from PumpPortal but not currently used: 64 | # - initialBuy: Initial buy amount in tokens 65 | # - solAmount: SOL amount spent on initial buy 66 | # - vSolInBondingCurve: Virtual SOL in bonding curve 67 | # - vTokensInBondingCurve: Virtual tokens in bonding curve 68 | # - marketCapSol: Market cap in SOL 69 | # - signature: Transaction signature 70 | 71 | if not all([name, symbol, mint_str, bonding_curve_str, creator_str]): 72 | logger.warning("Missing required fields in PumpPortal token data") 73 | return None 74 | 75 | # Convert string addresses to Pubkey objects 76 | mint = Pubkey.from_string(mint_str) 77 | bonding_curve = Pubkey.from_string(bonding_curve_str) 78 | user = Pubkey.from_string(creator_str) 79 | 80 | # For PumpPortal, we assume the creator is the same as the user 81 | # since PumpPortal doesn't distinguish between them 82 | creator = user 83 | 84 | # Derive additional addresses using platform provider 85 | # PumpPortal doesn't distinguish between Token and Token2022. 86 | # Default to TOKEN_2022_PROGRAM as per pump.fun's migration to create_v2. 87 | # Technical limitation: Cannot distinguish from pre-parsed data, but risk is low 88 | # since pump.fun now defaults to Token2022 for all new tokens. 89 | token_program_id = SystemAddresses.TOKEN_2022_PROGRAM 90 | 91 | associated_bonding_curve = ( 92 | self.address_provider.derive_associated_bonding_curve( 93 | mint, bonding_curve, token_program_id 94 | ) 95 | ) 96 | creator_vault = self.address_provider.derive_creator_vault(creator) 97 | 98 | return TokenInfo( 99 | name=name, 100 | symbol=symbol, 101 | uri=uri, 102 | mint=mint, 103 | platform=Platform.PUMP_FUN, 104 | bonding_curve=bonding_curve, 105 | associated_bonding_curve=associated_bonding_curve, 106 | user=user, 107 | creator=creator, 108 | creator_vault=creator_vault, 109 | token_program_id=token_program_id, 110 | ) 111 | 112 | except Exception: 113 | logger.exception("Failed to process PumpPortal token data") 114 | return None 115 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | # AGENTS Guidelines for This Repository 2 | 3 | This repository contains a Solana trading bot for pump.fun and letsbonk.fun platforms. When working on the project interactively with an agent (e.g. the Codex CLI) please follow the guidelines below for safe development and testing. 4 | 5 | ## 1. Use Learning Examples for Testing 6 | 7 | * **Always test with learning examples first** in `learning-examples/` before modifying the main bot. 8 | * **Do _not_ run the main bot with real funds** during agent development sessions. 9 | * **Test all changes** using manual buy/sell scripts with minimal amounts before production use. 10 | * **Use testnet** or paper trading when available to validate logic. 11 | 12 | ## 2. Keep Dependencies in Sync 13 | 14 | If you add or update dependencies: 15 | 16 | 1. Use `uv add ` to add new dependencies. 17 | 2. The `uv.lock` file will be automatically updated. 18 | 3. Restart any running bots after dependency changes. 19 | 4. Verify compatibility with Python 3.9+ as specified in the project. 20 | 21 | ## 3. Coding Conventions 22 | 23 | * Follow Ruff linting rules defined in `pyproject.toml`. 24 | * Use Google-style docstrings for functions and classes. 25 | * Include type hints for all public functions. 26 | * Use the centralized logger: `from src.utils.logger import get_logger`. 27 | * Keep line length to 88 characters (auto-formatted). 28 | * Use double quotes for strings. 29 | 30 | ## 4. Code Quality Checks 31 | 32 | Before completing any task, run these quality checks: 33 | 34 | | Command | Purpose | 35 | | ----------------------- | ------------------------------------------ | 36 | | `ruff format` | Format code to project standards | 37 | | `ruff check` | Run linting checks | 38 | | `ruff check --fix` | Auto-fix linting issues where possible | 39 | 40 | ## 5. Testing Workflow 41 | 42 | Test changes progressively: 43 | 44 | 1. **Unit testing**: Use individual learning examples 45 | ```bash 46 | uv run learning-examples/fetch_price.py 47 | ``` 48 | 49 | 2. **Integration testing**: Test specific listeners 50 | ```bash 51 | uv run learning-examples/listen-new-tokens/listen_logsubscribe.py 52 | ``` 53 | 54 | 3. **Configuration testing**: Validate YAML configs before running 55 | ```bash 56 | # Check syntax and required fields manually 57 | ``` 58 | 59 | 4. **Dry run**: Use minimal amounts and conservative settings first 60 | 61 | ## 6. Environment Configuration 62 | 63 | Never commit sensitive data: 64 | 65 | * Keep private keys in `.env` file (git-ignored). 66 | * Use separate `.env` files for development and production. 67 | * Required environment variables: 68 | ```env 69 | SOLANA_RPC_WEBSOCKET=wss://... 70 | SOLANA_RPC_HTTP=https://... 71 | PRIVATE_KEY=your_private_key_here 72 | ``` 73 | 74 | ## 7. Bot Configuration Best Practices 75 | 76 | * Edit YAML files in `bots/` directory for bot instances. 77 | * Start with conservative settings: 78 | - Low `buy_amount` 79 | - High `min_sol_balance` 80 | - Strict filters 81 | * Test one bot instance at a time during development. 82 | * Monitor logs in `logs/` directory for debugging. 83 | 84 | ## 8. Platform-Specific Development 85 | 86 | When adding features: 87 | 88 | * Check platform compatibility (`pump_fun` vs `lets_bonk`). 89 | * Test with both platforms if changes affect core logic. 90 | * Update platform-specific implementations in `src/platforms/`. 91 | * Verify IDL files match the on-chain programs. 92 | 93 | ## 9. Safety Reminders 94 | 95 | * **Never expose private keys** in code, logs, or commits. 96 | * **Test with minimal amounts** first. 97 | * **Verify transactions** on Solana explorer before scaling up. 98 | * **Monitor rate limits** of your RPC provider. 99 | * **Keep logs** for audit and debugging purposes. 100 | 101 | ## 10. Useful Commands Recap 102 | 103 | | Command | Purpose | 104 | | -------------------------------------------------- | --------------------------------- | 105 | | `uv sync` | Install/update dependencies | 106 | | `source .venv/bin/activate` | Activate virtual environment | 107 | | `uv pip install -e .` | Install bot as editable package | 108 | | `pump_bot` | Run the main bot | 109 | | `uv run learning-examples/manual_buy.py` | Test manual buy | 110 | | `uv run learning-examples/manual_sell.py` | Test manual sell | 111 | 112 | --- 113 | 114 | Following these practices ensures safe development, prevents accidental trades, and maintains code quality. Always prioritize testing and security when working with trading bots. -------------------------------------------------------------------------------- /src/platforms/letsbonk/pumpportal_processor.py: -------------------------------------------------------------------------------- 1 | """ 2 | LetsBonk-specific PumpPortal event processor. 3 | File: src/platforms/letsbonk/pumpportal_processor.py 4 | """ 5 | 6 | from solders.pubkey import Pubkey 7 | 8 | from interfaces.core import Platform, TokenInfo 9 | from platforms.letsbonk.address_provider import LetsBonkAddressProvider 10 | from utils.logger import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | class LetsBonkPumpPortalProcessor: 16 | """PumpPortal processor for LetsBonk tokens.""" 17 | 18 | def __init__(self): 19 | """Initialize the processor with address provider.""" 20 | self.address_provider = LetsBonkAddressProvider() 21 | 22 | @property 23 | def platform(self) -> Platform: 24 | """Get the platform this processor handles.""" 25 | return Platform.LETS_BONK 26 | 27 | @property 28 | def supported_pool_names(self) -> list[str]: 29 | """Get the pool names this processor supports from PumpPortal.""" 30 | return ["bonk"] # PumpPortal pool name for LetsBonk/bonk pools 31 | 32 | def can_process(self, token_data: dict) -> bool: 33 | """Check if this processor can handle the given token data. 34 | 35 | Args: 36 | token_data: Token data from PumpPortal 37 | 38 | Returns: 39 | True if this processor can handle the token data 40 | """ 41 | pool = token_data.get("pool", "").lower() 42 | return pool in self.supported_pool_names 43 | 44 | def process_token_data(self, token_data: dict) -> TokenInfo | None: 45 | """Process LetsBonk token data from PumpPortal. 46 | 47 | Args: 48 | token_data: Token data from PumpPortal WebSocket 49 | 50 | Returns: 51 | TokenInfo if token creation found, None otherwise 52 | """ 53 | try: 54 | # Extract required fields for LetsBonk 55 | name = token_data.get("name", "") 56 | symbol = token_data.get("symbol", "") 57 | mint_str = token_data.get("mint") 58 | creator_str = token_data.get("traderPublicKey") 59 | uri = token_data.get("uri", "") 60 | 61 | # Note: LetsBonk tokens from PumpPortal might have different field mappings 62 | # This would need to be adjusted based on actual PumpPortal data for LetsBonk tokens 63 | 64 | if not all([name, symbol, mint_str, creator_str]): 65 | logger.warning( 66 | "Missing required fields in PumpPortal LetsBonk token data" 67 | ) 68 | return None 69 | 70 | # Convert string addresses to Pubkey objects 71 | mint = Pubkey.from_string(mint_str) 72 | user = Pubkey.from_string(creator_str) 73 | creator = user 74 | 75 | # Derive LetsBonk-specific addresses 76 | pool_state = self.address_provider.derive_pool_address(mint) 77 | 78 | # For LetsBonk, vault addresses might need to be derived differently 79 | # or provided in the PumpPortal data. For now, we'll derive them 80 | # using the standard pattern, but this might need adjustment 81 | additional_accounts = self.address_provider.get_additional_accounts( 82 | # Create a minimal TokenInfo to get additional accounts 83 | TokenInfo( 84 | name=name, 85 | symbol=symbol, 86 | uri=uri, 87 | mint=mint, 88 | platform=Platform.LETS_BONK, 89 | pool_state=pool_state, 90 | user=user, 91 | creator=creator, 92 | base_vault=None, # Will be filled from additional_accounts 93 | quote_vault=None, # Will be filled from additional_accounts 94 | ) 95 | ) 96 | 97 | # Extract vault addresses if available 98 | base_vault = additional_accounts.get("base_vault") 99 | quote_vault = additional_accounts.get("quote_vault") 100 | 101 | # If vaults aren't available from additional_accounts, 102 | # we might need to derive them or leave them None 103 | # and let the trading logic handle the derivation 104 | 105 | return TokenInfo( 106 | name=name, 107 | symbol=symbol, 108 | uri=uri, 109 | mint=mint, 110 | platform=Platform.LETS_BONK, 111 | pool_state=pool_state, 112 | base_vault=base_vault, 113 | quote_vault=quote_vault, 114 | user=user, 115 | creator=creator, 116 | ) 117 | 118 | except Exception: 119 | logger.exception("Failed to process PumpPortal LetsBonk token data") 120 | return None 121 | -------------------------------------------------------------------------------- /learning-examples/listen-migrations/listen_blocksubscribe_old_raydium.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import os 4 | 5 | import websockets 6 | from dotenv import load_dotenv 7 | from solders.pubkey import Pubkey 8 | 9 | load_dotenv() 10 | 11 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 12 | PUMP_MIGRATOR_ID = Pubkey.from_string("39azUYFWPz3VHgKCf3VChUwbpURdCHRxjWVowf5jUJjg") 13 | 14 | 15 | def process_initialize2_transaction(data): 16 | """Process and decode an initialize2 transaction""" 17 | try: 18 | signature = data["transaction"]["signatures"][0] 19 | account_keys = data["transaction"]["message"]["accountKeys"] 20 | 21 | # Check raydium_amm_idl.json for the account keys 22 | # The token address is typically the 19th account (index 18) 23 | # The liquidity pool address is typically the 3rd account (index 2) 24 | if len(account_keys) > 18: 25 | token_address = account_keys[18] 26 | liquidity_address = account_keys[2] 27 | 28 | print(f"\nSignature: {signature}") 29 | print(f"Token Address: {token_address}") 30 | print(f"Liquidity Address: {liquidity_address}") 31 | print("=" * 50) 32 | else: 33 | print(f"\nError: Not enough account keys (found {len(account_keys)})") 34 | 35 | except Exception as e: 36 | print(f"\nError: {e!s}") 37 | 38 | 39 | async def listen_for_events(): 40 | while True: 41 | try: 42 | async with websockets.connect(WSS_ENDPOINT) as websocket: 43 | subscription_message = json.dumps( 44 | { 45 | "jsonrpc": "2.0", 46 | "id": 1, 47 | "method": "blockSubscribe", 48 | "params": [ 49 | {"mentionsAccountOrProgram": str(PUMP_MIGRATOR_ID)}, 50 | { 51 | "commitment": "confirmed", 52 | "encoding": "json", 53 | "showRewards": False, 54 | "transactionDetails": "full", 55 | "maxSupportedTransactionVersion": 0, 56 | }, 57 | ], 58 | } 59 | ) 60 | 61 | await websocket.send(subscription_message) 62 | response = await websocket.recv() 63 | print(f"Subscription response: {response}") 64 | print("\nListening for Raydium pool initialization events...") 65 | 66 | while True: 67 | try: 68 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 69 | data = json.loads(response) 70 | 71 | if "method" in data and data["method"] == "blockNotification": 72 | if "params" in data and "result" in data["params"]: 73 | block_data = data["params"]["result"] 74 | if ( 75 | "value" in block_data 76 | and "block" in block_data["value"] 77 | ): 78 | block = block_data["value"]["block"] 79 | if "transactions" in block: 80 | for tx in block["transactions"]: 81 | logs = tx.get("meta", {}).get( 82 | "logMessages", [] 83 | ) 84 | 85 | # Check for initialize2 instruction 86 | for log in logs: 87 | if ( 88 | "Program log: initialize2: InitializeInstruction2" 89 | in log 90 | ): 91 | print( 92 | "Found initialize2 instruction!" 93 | ) 94 | process_initialize2_transaction(tx) 95 | break 96 | 97 | except TimeoutError: 98 | print("\nChecking connection...") 99 | print("Connection alive") 100 | continue 101 | 102 | except Exception as e: 103 | print(f"\nConnection error: {e!s}") 104 | print("Retrying in 5 seconds...") 105 | await asyncio.sleep(5) 106 | 107 | 108 | if __name__ == "__main__": 109 | asyncio.run(listen_for_events()) 110 | -------------------------------------------------------------------------------- /.github/workflows/spam-detection.yml: -------------------------------------------------------------------------------- 1 | name: Suspicious Comment Detection 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | pull_request_review_comment: 7 | types: [created] 8 | 9 | jobs: 10 | check_comment: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Check for suspicious patterns 14 | uses: actions/github-script@v6 15 | with: 16 | github-token: ${{ secrets.GITHUB_TOKEN }} 17 | script: | 18 | try { 19 | const comment = context.payload.comment; 20 | const body = comment.body.toLowerCase(); 21 | const author = comment.user.login; 22 | 23 | // Suspicious patterns 24 | const suspiciousPatterns = [ 25 | 'support team', 26 | 'customer service', 27 | 'telegram', 28 | 'whatsapp', 29 | 'contact us', 30 | 'click here', 31 | 'support group', 32 | 't.me/', 33 | 'wa.me/', 34 | 'support chat', 35 | 'live chat', 36 | 'support ticket', 37 | 'ticket id', 38 | 'live support', 39 | 'support line', 40 | 'support agent', 41 | 'support network', 42 | 'dedicated support', 43 | 'personalized assistance', 44 | 'opened for you', 45 | 'kindly talk to', 46 | 'we apologize', 47 | 'live chat with an agent', 48 | 'chat button', 49 | 'dapp portal', 50 | 'decentralized dapp', 51 | 'access the portal', 52 | 'report your request', 53 | 'start a conversation', 54 | 'click the chat', 55 | 'for assistance', 56 | 'reach out to', 57 | 'through the chat', 58 | 'portal', 59 | 'help center', 60 | 'ticket', 61 | 'this will be review', 62 | 'bringing this to our notice', 63 | 'initiate a chat', 64 | 'regards', 65 | 'hello @', 66 | 'thanks for bringing', 67 | ]; 68 | 69 | // Add pattern weight scoring 70 | const patternWeights = { 71 | 'ticket id': 2, 72 | 'support team': 2, 73 | 'live support': 2, 74 | 'help center': 2, 75 | // Regular patterns have weight of 1 76 | }; 77 | 78 | // Calculate spam score 79 | let spamScore = 0; 80 | const foundPatterns = suspiciousPatterns.filter(pattern => { 81 | if (body.includes(pattern)) { 82 | spamScore += patternWeights[pattern] || 1; 83 | return true; 84 | } 85 | return false; 86 | }); 87 | 88 | // Check for external links (excluding common legitimate domains) 89 | const hasExternalLinks = body.includes('http') || body.includes('www'); 90 | const hasGithubLinks = body.includes('github.com'); 91 | const suspiciousLinks = hasExternalLinks && !hasGithubLinks; 92 | 93 | // Trigger on either multiple patterns or high spam score 94 | if (foundPatterns.length > 2 || spamScore >= 3) { 95 | try { 96 | // Create a warning comment 97 | await github.rest.issues.createComment({ 98 | owner: context.repo.owner, 99 | repo: context.repo.repo, 100 | issue_number: context.payload.issue ? context.payload.issue.number : context.payload.pull_request.number, 101 | body: warningMessage 102 | }); 103 | } catch (e) { 104 | console.log('Failed to create comment:', e); 105 | } 106 | 107 | try { 108 | // Add 'potential-scam' label 109 | await github.rest.issues.addLabels({ 110 | owner: context.repo.owner, 111 | repo: context.repo.repo, 112 | issue_number: context.payload.issue ? context.payload.issue.number : context.payload.pull_request.number, 113 | labels: ['potential-scam'] 114 | }); 115 | } catch (e) { 116 | console.log('Failed to add label:', e); 117 | } 118 | } 119 | } catch (e) { 120 | console.log('Workflow error:', e); 121 | // Still mark as failure but with more context 122 | core.setFailed(`Workflow failed: ${e.message}`); 123 | } 124 | 125 | -------------------------------------------------------------------------------- /learning-examples/decode_from_getTransaction.py: -------------------------------------------------------------------------------- 1 | import json 2 | import struct 3 | import sys 4 | 5 | import base58 6 | 7 | tx_file_path = "" 8 | 9 | if len(sys.argv) != 2: 10 | tx_file_path = "learning-examples/raw_buy_tx_from_getTransaction.json" 11 | print(f"No path provided, using the path: {tx_file_path}") 12 | else: 13 | tx_file_path = sys.argv[1] 14 | 15 | # Load the IDL 16 | with open("idl/pump_fun_idl.json") as f: 17 | idl = json.load(f) 18 | 19 | # Load the transaction log 20 | with open(tx_file_path) as f: 21 | tx_log = json.load(f) 22 | 23 | # Extract the transaction data 24 | tx_data = tx_log["result"]["transaction"] 25 | 26 | print(json.dumps(tx_data, indent=2)) 27 | 28 | 29 | def decode_create_instruction(data): 30 | """Decode legacy Create instruction (Metaplex tokens).""" 31 | # The Create instruction has 3 string arguments: name, symbol, uri 32 | offset = 8 # Skip the 8-byte discriminator 33 | results = [] 34 | for _ in range(3): 35 | length = struct.unpack_from(" str: 55 | pass 56 | 57 | # platforms/pumpfun/address_provider.py - Concrete implementation 58 | class PumpFunAddressProvider(AddressProvider): 59 | def get_program_address(self) -> str: 60 | return "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 61 | ``` 62 | 63 | ### Universal Components 64 | Create platform-agnostic wrappers that delegate to platform-specific implementations: 65 | 66 | ```python 67 | class UniversalTrader: 68 | def __init__(self, platform: Platform, **kwargs): 69 | self.platform = platform 70 | self.platform_trader = self._create_platform_trader() 71 | 72 | def _create_platform_trader(self): 73 | # Factory method to create platform-specific trader 74 | pass 75 | ``` 76 | 77 | ### Configuration Management 78 | - Use YAML files for bot configurations in `bots/` directory 79 | - Support environment variable interpolation with `${VARIABLE}` syntax 80 | - Validate configurations before starting bots 81 | - Separate environment-specific settings in `.env` files 82 | 83 | ## Module Dependencies 84 | 85 | ### Import Rules 86 | - Core modules should not import from trading or monitoring 87 | - Platform-specific modules should only import from their own package and core/interfaces 88 | - Avoid circular imports between packages 89 | - Use dependency injection for cross-package dependencies 90 | 91 | ### Dependency Layers (from low to high level) 92 | 1. **utils/** - Utilities and helpers (no business logic dependencies) 93 | 2. **interfaces/** - Abstract base classes and protocols 94 | 3. **core/** - Blockchain and infrastructure (depends on utils, interfaces) 95 | 4. **platforms/** - Platform implementations (depends on core, interfaces) 96 | 5. **trading/** - Trading logic (depends on core, platforms, interfaces) 97 | 6. **monitoring/** - Event listening (depends on core, platforms, interfaces) 98 | 7. **bot_runner.py** - Main orchestrator (depends on all layers) 99 | 100 | ## Async Architecture 101 | 102 | ### Event Loop Management 103 | - Use uvloop for better performance 104 | - Set event loop policy at application startup 105 | - Use asyncio.create_task() for concurrent operations 106 | - Implement proper cleanup on shutdown 107 | 108 | ### Connection Management 109 | - Use connection pooling for HTTP clients 110 | - Implement reconnection logic for WebSocket connections 111 | - Cache expensive resources (blockhash, account info) 112 | - Use async context managers for resource cleanup 113 | 114 | ## Testing Strategy 115 | 116 | ### Test Organization 117 | - Use `learning-examples/` for integration testing and validation 118 | - Test platform-specific components independently 119 | - Mock external dependencies (RPC calls, WebSocket connections) 120 | - Validate configurations with actual bot startup 121 | 122 | ### Test Data 123 | - Use test networks for development 124 | - Never test with real funds or production keys 125 | - Create fixtures for common test scenarios 126 | - Document test account requirements 127 | 128 | ## Performance Considerations 129 | 130 | ### Caching Strategy 131 | - Cache recent blockhash in background task 132 | - Cache account information where appropriate 133 | - Use local caching for IDL data 134 | - Implement TTL for cached data 135 | 136 | ### Resource Management 137 | - Limit concurrent operations based on RPC provider limits 138 | - Implement backoff strategies for failed requests 139 | - Use separate processes for production bot instances 140 | - Monitor memory usage in long-running processes -------------------------------------------------------------------------------- /.kiro/steering/architecture.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | inclusion: always 3 | --- 4 | 5 | # Project Architecture Rules 6 | 7 | ## Directory Structure 8 | 9 | ### Package Organization 10 | Maintain clear separation of concerns: 11 | 12 | ``` 13 | src/ 14 | ├── __init__.py 15 | ├── bot_runner.py # Main entry point 16 | ├── config_loader.py # Configuration management 17 | ├── core/ # Core blockchain functionality 18 | │ ├── client.py # Solana RPC client abstraction 19 | │ ├── wallet.py # Wallet operations 20 | │ └── priority_fee/ # Fee management 21 | ├── platforms/ # Platform-specific implementations 22 | │ ├── pumpfun/ # pump.fun specific code 23 | │ └── letsbonk/ # letsbonk.fun specific code 24 | ├── trading/ # Trading logic 25 | │ ├── base.py # Base trading classes 26 | │ ├── universal_trader.py # Platform-agnostic trader 27 | │ └── position.py # Position management 28 | ├── monitoring/ # Event listening and monitoring 29 | │ ├── base_listener.py # Base listener interface 30 | │ └── universal_*_listener.py # Specific listeners 31 | ├── interfaces/ # Abstract base classes 32 | └── utils/ # Utilities and helpers 33 | ├── logger.py # Logging utilities 34 | └── idl_manager.py # IDL management 35 | ``` 36 | 37 | ### File Naming Conventions 38 | - Use snake_case for all Python files and directories 39 | - Prefix abstract base classes with "Base" or put in `interfaces/` 40 | - Use "Universal" prefix for platform-agnostic implementations 41 | - Group related functionality in subdirectories 42 | 43 | ## Design Patterns 44 | 45 | ### Platform Abstraction 46 | Implement platform-specific functionality using the factory pattern: 47 | 48 | ```python 49 | # interfaces/core.py - Define abstract interfaces 50 | from abc import ABC, abstractmethod 51 | 52 | class AddressProvider(ABC): 53 | @abstractmethod 54 | def get_program_address(self) -> str: 55 | pass 56 | 57 | # platforms/pumpfun/address_provider.py - Concrete implementation 58 | class PumpFunAddressProvider(AddressProvider): 59 | def get_program_address(self) -> str: 60 | return "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 61 | ``` 62 | 63 | ### Universal Components 64 | Create platform-agnostic wrappers that delegate to platform-specific implementations: 65 | 66 | ```python 67 | class UniversalTrader: 68 | def __init__(self, platform: Platform, **kwargs): 69 | self.platform = platform 70 | self.platform_trader = self._create_platform_trader() 71 | 72 | def _create_platform_trader(self): 73 | # Factory method to create platform-specific trader 74 | pass 75 | ``` 76 | 77 | ### Configuration Management 78 | - Use YAML files for bot configurations in `bots/` directory 79 | - Support environment variable interpolation with `${VARIABLE}` syntax 80 | - Validate configurations before starting bots 81 | - Separate environment-specific settings in `.env` files 82 | 83 | ## Module Dependencies 84 | 85 | ### Import Rules 86 | - Core modules should not import from trading or monitoring 87 | - Platform-specific modules should only import from their own package and core/interfaces 88 | - Avoid circular imports between packages 89 | - Use dependency injection for cross-package dependencies 90 | 91 | ### Dependency Layers (from low to high level) 92 | 1. **utils/** - Utilities and helpers (no business logic dependencies) 93 | 2. **interfaces/** - Abstract base classes and protocols 94 | 3. **core/** - Blockchain and infrastructure (depends on utils, interfaces) 95 | 4. **platforms/** - Platform implementations (depends on core, interfaces) 96 | 5. **trading/** - Trading logic (depends on core, platforms, interfaces) 97 | 6. **monitoring/** - Event listening (depends on core, platforms, interfaces) 98 | 7. **bot_runner.py** - Main orchestrator (depends on all layers) 99 | 100 | ## Async Architecture 101 | 102 | ### Event Loop Management 103 | - Use uvloop for better performance 104 | - Set event loop policy at application startup 105 | - Use asyncio.create_task() for concurrent operations 106 | - Implement proper cleanup on shutdown 107 | 108 | ### Connection Management 109 | - Use connection pooling for HTTP clients 110 | - Implement reconnection logic for WebSocket connections 111 | - Cache expensive resources (blockhash, account info) 112 | - Use async context managers for resource cleanup 113 | 114 | ## Testing Strategy 115 | 116 | ### Test Organization 117 | - Use `learning-examples/` for integration testing and validation 118 | - Test platform-specific components independently 119 | - Mock external dependencies (RPC calls, WebSocket connections) 120 | - Validate configurations with actual bot startup 121 | 122 | ### Test Data 123 | - Use test networks for development 124 | - Never test with real funds or production keys 125 | - Create fixtures for common test scenarios 126 | - Document test account requirements 127 | 128 | ## Performance Considerations 129 | 130 | ### Caching Strategy 131 | - Cache recent blockhash in background task 132 | - Cache account information where appropriate 133 | - Use local caching for IDL data 134 | - Implement TTL for cached data 135 | 136 | ### Resource Management 137 | - Limit concurrent operations based on RPC provider limits 138 | - Implement backoff strategies for failed requests 139 | - Use separate processes for production bot instances 140 | - Monitor memory usage in long-running processes -------------------------------------------------------------------------------- /.windsurf/rules/architecture.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | trigger: always_on 3 | --- 4 | 5 | # Project Architecture Rules 6 | 7 | ## Directory Structure 8 | 9 | ### Package Organization 10 | Maintain clear separation of concerns: 11 | 12 | ``` 13 | src/ 14 | ├── __init__.py 15 | ├── bot_runner.py # Main entry point 16 | ├── config_loader.py # Configuration management 17 | ├── core/ # Core blockchain functionality 18 | │ ├── client.py # Solana RPC client abstraction 19 | │ ├── wallet.py # Wallet operations 20 | │ └── priority_fee/ # Fee management 21 | ├── platforms/ # Platform-specific implementations 22 | │ ├── pumpfun/ # pump.fun specific code 23 | │ └── letsbonk/ # letsbonk.fun specific code 24 | ├── trading/ # Trading logic 25 | │ ├── base.py # Base trading classes 26 | │ ├── universal_trader.py # Platform-agnostic trader 27 | │ └── position.py # Position management 28 | ├── monitoring/ # Event listening and monitoring 29 | │ ├── base_listener.py # Base listener interface 30 | │ └── universal_*_listener.py # Specific listeners 31 | ├── interfaces/ # Abstract base classes 32 | └── utils/ # Utilities and helpers 33 | ├── logger.py # Logging utilities 34 | └── idl_manager.py # IDL management 35 | ``` 36 | 37 | ### File Naming Conventions 38 | - Use snake_case for all Python files and directories 39 | - Prefix abstract base classes with "Base" or put in `interfaces/` 40 | - Use "Universal" prefix for platform-agnostic implementations 41 | - Group related functionality in subdirectories 42 | 43 | ## Design Patterns 44 | 45 | ### Platform Abstraction 46 | Implement platform-specific functionality using the factory pattern: 47 | 48 | ```python 49 | # interfaces/core.py - Define abstract interfaces 50 | from abc import ABC, abstractmethod 51 | 52 | class AddressProvider(ABC): 53 | @abstractmethod 54 | def get_program_address(self) -> str: 55 | pass 56 | 57 | # platforms/pumpfun/address_provider.py - Concrete implementation 58 | class PumpFunAddressProvider(AddressProvider): 59 | def get_program_address(self) -> str: 60 | return "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 61 | ``` 62 | 63 | ### Universal Components 64 | Create platform-agnostic wrappers that delegate to platform-specific implementations: 65 | 66 | ```python 67 | class UniversalTrader: 68 | def __init__(self, platform: Platform, **kwargs): 69 | self.platform = platform 70 | self.platform_trader = self._create_platform_trader() 71 | 72 | def _create_platform_trader(self): 73 | # Factory method to create platform-specific trader 74 | pass 75 | ``` 76 | 77 | ### Configuration Management 78 | - Use YAML files for bot configurations in `bots/` directory 79 | - Support environment variable interpolation with `${VARIABLE}` syntax 80 | - Validate configurations before starting bots 81 | - Separate environment-specific settings in `.env` files 82 | 83 | ## Module Dependencies 84 | 85 | ### Import Rules 86 | - Core modules should not import from trading or monitoring 87 | - Platform-specific modules should only import from their own package and core/interfaces 88 | - Avoid circular imports between packages 89 | - Use dependency injection for cross-package dependencies 90 | 91 | ### Dependency Layers (from low to high level) 92 | 1. **utils/** - Utilities and helpers (no business logic dependencies) 93 | 2. **interfaces/** - Abstract base classes and protocols 94 | 3. **core/** - Blockchain and infrastructure (depends on utils, interfaces) 95 | 4. **platforms/** - Platform implementations (depends on core, interfaces) 96 | 5. **trading/** - Trading logic (depends on core, platforms, interfaces) 97 | 6. **monitoring/** - Event listening (depends on core, platforms, interfaces) 98 | 7. **bot_runner.py** - Main orchestrator (depends on all layers) 99 | 100 | ## Async Architecture 101 | 102 | ### Event Loop Management 103 | - Use uvloop for better performance 104 | - Set event loop policy at application startup 105 | - Use asyncio.create_task() for concurrent operations 106 | - Implement proper cleanup on shutdown 107 | 108 | ### Connection Management 109 | - Use connection pooling for HTTP clients 110 | - Implement reconnection logic for WebSocket connections 111 | - Cache expensive resources (blockhash, account info) 112 | - Use async context managers for resource cleanup 113 | 114 | ## Testing Strategy 115 | 116 | ### Test Organization 117 | - Use `learning-examples/` for integration testing and validation 118 | - Test platform-specific components independently 119 | - Mock external dependencies (RPC calls, WebSocket connections) 120 | - Validate configurations with actual bot startup 121 | 122 | ### Test Data 123 | - Use test networks for development 124 | - Never test with real funds or production keys 125 | - Create fixtures for common test scenarios 126 | - Document test account requirements 127 | 128 | ## Performance Considerations 129 | 130 | ### Caching Strategy 131 | - Cache recent blockhash in background task 132 | - Cache account information where appropriate 133 | - Use local caching for IDL data 134 | - Implement TTL for cached data 135 | 136 | ### Resource Management 137 | - Limit concurrent operations based on RPC provider limits 138 | - Implement backoff strategies for failed requests 139 | - Use separate processes for production bot instances 140 | - Monitor memory usage in long-running processes -------------------------------------------------------------------------------- /learning-examples/decode_from_blockSubscribe.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | import json 4 | import struct 5 | import sys 6 | 7 | from solders.transaction import Transaction, VersionedTransaction 8 | 9 | 10 | def load_idl(file_path): 11 | with open(file_path) as f: 12 | return json.load(f) 13 | 14 | 15 | def load_transaction(file_path): 16 | with open(file_path) as f: 17 | data = json.load(f) 18 | return data 19 | 20 | 21 | def decode_instruction(ix_data, ix_def): 22 | args = {} 23 | offset = 8 # Skip 8-byte discriminator 24 | 25 | for arg in ix_def["args"]: 26 | if arg["type"] == "u64": 27 | value = struct.unpack_from(" "Position": 56 | """Create a position from a successful buy transaction. 57 | 58 | Args: 59 | mint: Token mint address 60 | symbol: Token symbol 61 | entry_price: Price at which position was entered 62 | quantity: Quantity of tokens purchased 63 | take_profit_percentage: Take profit percentage (0.5 = 50% profit) 64 | stop_loss_percentage: Stop loss percentage (0.2 = 20% loss) 65 | max_hold_time: Maximum hold time in seconds 66 | 67 | Returns: 68 | Position instance 69 | """ 70 | take_profit_price = None 71 | if take_profit_percentage is not None: 72 | take_profit_price = entry_price * (1 + take_profit_percentage) 73 | 74 | stop_loss_price = None 75 | if stop_loss_percentage is not None: 76 | stop_loss_price = entry_price * (1 - stop_loss_percentage) 77 | 78 | return cls( 79 | mint=mint, 80 | symbol=symbol, 81 | entry_price=entry_price, 82 | quantity=quantity, 83 | entry_time=datetime.utcnow(), 84 | take_profit_price=take_profit_price, 85 | stop_loss_price=stop_loss_price, 86 | max_hold_time=max_hold_time, 87 | ) 88 | 89 | def should_exit(self, current_price: float) -> tuple[bool, ExitReason | None]: 90 | """Check if position should be exited based on current conditions. 91 | 92 | Args: 93 | current_price: Current token price 94 | 95 | Returns: 96 | Tuple of (should_exit, exit_reason) 97 | """ 98 | if not self.is_active: 99 | return False, None 100 | 101 | # Check take profit 102 | if self.take_profit_price and current_price >= self.take_profit_price: 103 | return True, ExitReason.TAKE_PROFIT 104 | 105 | # Check stop loss 106 | if self.stop_loss_price and current_price <= self.stop_loss_price: 107 | return True, ExitReason.STOP_LOSS 108 | 109 | # Check max hold time 110 | if self.max_hold_time: 111 | elapsed_time = (datetime.utcnow() - self.entry_time).total_seconds() 112 | if elapsed_time >= self.max_hold_time: 113 | return True, ExitReason.MAX_HOLD_TIME 114 | 115 | return False, None 116 | 117 | def close_position(self, exit_price: float, exit_reason: ExitReason) -> None: 118 | """Close the position with exit details. 119 | 120 | Args: 121 | exit_price: Price at which position was exited 122 | exit_reason: Reason for exit 123 | """ 124 | self.is_active = False 125 | self.exit_price = exit_price 126 | self.exit_reason = exit_reason 127 | self.exit_time = datetime.utcnow() 128 | 129 | def get_pnl(self, current_price: float | None = None) -> dict: 130 | """Calculate profit/loss for the position. 131 | 132 | Args: 133 | current_price: Current price (uses exit_price if position is closed) 134 | 135 | Returns: 136 | Dictionary with PnL information 137 | """ 138 | if self.is_active and current_price is None: 139 | raise ValueError("current_price required for active position") 140 | 141 | price_to_use = self.exit_price if not self.is_active else current_price 142 | if price_to_use is None: 143 | raise ValueError("No price available for PnL calculation") 144 | 145 | price_change = price_to_use - self.entry_price 146 | price_change_pct = (price_change / self.entry_price) * 100 147 | unrealized_pnl = price_change * self.quantity 148 | 149 | return { 150 | "entry_price": self.entry_price, 151 | "current_price": price_to_use, 152 | "price_change": price_change, 153 | "price_change_pct": price_change_pct, 154 | "unrealized_pnl_sol": unrealized_pnl, 155 | "quantity": self.quantity, 156 | } 157 | 158 | def __str__(self) -> str: 159 | """String representation of position.""" 160 | if self.is_active: 161 | status = "ACTIVE" 162 | elif self.exit_reason: 163 | status = f"CLOSED ({self.exit_reason.value})" 164 | else: 165 | status = "CLOSED (UNKNOWN)" 166 | return f"Position({self.symbol}: {self.quantity:.6f} @ {self.entry_price:.8f} SOL - {status})" 167 | -------------------------------------------------------------------------------- /learning-examples/bonding-curve-progress/poll_bonding_curve_progress.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for tracking the progress of a bonding curve for a Pump.fun token. 3 | It continuously polls the bonding curve state and prints updates at regular intervals. 4 | """ 5 | 6 | import asyncio 7 | import os 8 | import struct 9 | from typing import Final 10 | 11 | from dotenv import load_dotenv 12 | from solana.rpc.async_api import AsyncClient 13 | from solders.pubkey import Pubkey 14 | 15 | load_dotenv() 16 | 17 | # Constants 18 | RPC_URL: Final[str] = os.getenv("SOLANA_NODE_RPC_ENDPOINT") 19 | TOKEN_MINT: Final[str] = ( 20 | "5ZHx2GGGj87xpidVJpBqadMUutqBirhL2TqUR9T9taKc" # Replace with actual token mint address 21 | ) 22 | PUMP_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string( 23 | "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 24 | ) 25 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 26 | TOKEN_DECIMALS: Final[int] = 6 27 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack( 28 | " Pubkey: 34 | """ 35 | Derive the bonding curve PDA address from a mint address. 36 | 37 | Args: 38 | mint: The token mint address 39 | program_id: The program ID for the bonding curve 40 | 41 | Returns: 42 | The bonding curve address 43 | """ 44 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id)[0] 45 | 46 | 47 | async def get_account_data(client: AsyncClient, pubkey: Pubkey) -> bytes: 48 | """ 49 | Fetch raw account data for a given public key. 50 | 51 | Args: 52 | client: AsyncClient connection to Solana RPC 53 | pubkey: The public key of the account to fetch 54 | 55 | Returns: 56 | The raw account data as bytes 57 | 58 | Raises: 59 | ValueError: If the account is not found or has no data 60 | """ 61 | resp = await client.get_account_info(pubkey, encoding="base64") 62 | if not resp.value or not resp.value.data: 63 | raise ValueError(f"Account {pubkey} not found or has no data") 64 | 65 | return resp.value.data 66 | 67 | 68 | def parse_curve_state(data: bytes) -> dict: 69 | """ 70 | Decode bonding curve account data into a readable format. 71 | 72 | Args: 73 | data: The raw bonding curve account data 74 | 75 | Returns: 76 | A dictionary containing parsed bonding curve fields 77 | 78 | Raises: 79 | ValueError: If the account discriminator is invalid 80 | """ 81 | if data[:8] != EXPECTED_DISCRIMINATOR: 82 | raise ValueError("Invalid discriminator for bonding curve") 83 | 84 | # Parse common fields (present in all versions) 85 | fields = struct.unpack_from("= 73: # Has creator field 98 | creator_bytes = data[49:81] # 8 (discriminator) + 41 (base fields) = 49 99 | result["creator"] = Pubkey.from_bytes(creator_bytes) 100 | 101 | # Parse is_mayhem_mode if present 102 | if data_length >= 74: # Has mayhem mode field 103 | result["is_mayhem_mode"] = bool(data[81]) 104 | else: 105 | result["is_mayhem_mode"] = False 106 | 107 | return result 108 | 109 | 110 | def print_curve_status(state: dict) -> None: 111 | """ 112 | Print the current status of the bonding curve in a readable format. 113 | 114 | Args: 115 | state: The parsed bonding curve state dictionary 116 | """ 117 | progress = 0 118 | if state["complete"]: 119 | progress = 100.0 120 | else: 121 | # Pump.fun constants (already converted to human-readable format) 122 | TOTAL_SUPPLY = 1_000_000_000 # 1B tokens 123 | RESERVED_TOKENS = 206_900_000 # 206.9M tokens reserved for migration 124 | 125 | initial_real_token_reserves = TOTAL_SUPPLY - RESERVED_TOKENS # 793.1M tokens 126 | 127 | if initial_real_token_reserves > 0: 128 | left_tokens = state["real_token_reserves"] 129 | progress = 100 - (left_tokens * 100) / initial_real_token_reserves 130 | 131 | print("=" * 30) 132 | print(f"Complete: {'✅' if state['complete'] else '❌'}") 133 | print(f"Progress: {progress:.2f}%") 134 | print(f"Token reserves: {state['real_token_reserves']:.4f}") 135 | print(f"SOL reserves: {state['real_sol_reserves']:.4f}") 136 | print("=" * 30, "\n") 137 | 138 | 139 | async def track_curve() -> None: 140 | """ 141 | Continuously track and display the state of a bonding curve. 142 | """ 143 | if not RPC_URL or not TOKEN_MINT: 144 | print("❌ Set SOLANA_NODE_RPC_ENDPOINT and TOKEN_MINT in .env") 145 | return 146 | 147 | mint_pubkey: Pubkey = Pubkey.from_string(TOKEN_MINT) 148 | curve_pubkey: Pubkey = get_bonding_curve_address(mint_pubkey, PUMP_PROGRAM_ID) 149 | 150 | print("Tracking bonding curve for:", mint_pubkey) 151 | print("Curve address:", curve_pubkey, "\n") 152 | 153 | async with AsyncClient(RPC_URL) as client: 154 | while True: 155 | try: 156 | data = await get_account_data(client, curve_pubkey) 157 | state = parse_curve_state(data) 158 | print_curve_status(state) 159 | except Exception as e: 160 | print(f"⚠️ Error: {e}") 161 | 162 | await asyncio.sleep(POLL_INTERVAL) 163 | 164 | 165 | if __name__ == "__main__": 166 | asyncio.run(track_curve()) 167 | -------------------------------------------------------------------------------- /learning-examples/bonding-curve-progress/get_graduating_tokens.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for querying and analyzing soon-to-gradute tokens in the Pump.fun program. 3 | It includes functionality to fetch bonding curves based on token reserves and 4 | find associated SPL token accounts. 5 | 6 | Note: getProgramAccounts may be slow as it is a pretty heavy method for RPC. 7 | """ 8 | 9 | import asyncio 10 | import os 11 | import struct 12 | from typing import Final 13 | 14 | from dotenv import load_dotenv 15 | from solana.rpc.async_api import AsyncClient 16 | from solana.rpc.types import MemcmpOpts, TokenAccountOpts 17 | from solders.pubkey import Pubkey 18 | 19 | load_dotenv() 20 | 21 | # Constants 22 | RPC_ENDPOINT: Final[str] = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 23 | PUMP_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string( 24 | "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 25 | ) 26 | TOKEN_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string( 27 | "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" 28 | ) 29 | 30 | # The 8-byte discriminator for bonding curve accounts in Pump.fun 31 | BONDING_CURVE_DISCRIMINATOR_BYTES: Final[bytes] = bytes.fromhex("17b7f83760d8ac60") 32 | 33 | 34 | async def get_bonding_curves_by_reserves(client: AsyncClient | None = None) -> list: 35 | """ 36 | Fetch bonding curve accounts with real token reserves below a threshold. 37 | 38 | Args: 39 | client: Optional AsyncClient instance. If None, a new one will be created. 40 | 41 | Returns: 42 | List of bonding curve accounts matching the criteria 43 | """ 44 | # Define the reserve threshold (100 trillion in token base units) 45 | threshold: int = 100_000_000_000_000 46 | threshold_bytes: bytes = threshold.to_bytes(8, "little") 47 | msb_prefix: bytes = threshold_bytes[6:] # Most significant bytes for pre-filtering 48 | 49 | should_close_client: bool = client is None 50 | try: 51 | if should_close_client: 52 | client = AsyncClient(RPC_ENDPOINT, commitment="processed", timeout=180) 53 | await client.is_connected() 54 | 55 | # Define on-chain filters for getProgramAccounts 56 | filters = [ 57 | MemcmpOpts( 58 | offset=0, bytes=BONDING_CURVE_DISCRIMINATOR_BYTES 59 | ), # Match bonding curve accounts 60 | MemcmpOpts( 61 | offset=30, bytes=msb_prefix 62 | ), # Pre-filter by real token reserves MSB 63 | MemcmpOpts(offset=48, bytes=b"\x00"), # Ensure complete flag is False 64 | ] 65 | 66 | # Query accounts matching filters 67 | response = await client.get_program_accounts( 68 | PUMP_PROGRAM_ID, encoding="base64", filters=filters 69 | ) 70 | 71 | result = [] 72 | for acc in response.value: 73 | raw = acc.account.data 74 | 75 | # Extract real_token_reserves (u64 = 8 bytes, little-endian) 76 | offset: int = 24 # real_token_reserves field offset 77 | real_token_reserves: int = struct.unpack(" 0: 117 | return response.value[0].account 118 | else: 119 | print(f"No token accounts found for {bonding_curve_address}") 120 | return None 121 | except Exception as e: 122 | print(f"Error finding associated token account: {e}") 123 | return None 124 | finally: 125 | if should_close_client and client: 126 | await client.close() 127 | 128 | 129 | def get_mint_address(data: bytes) -> str: 130 | """ 131 | Extract the mint address from SPL token account data. 132 | 133 | Args: 134 | data: The token account data as bytes 135 | 136 | Returns: 137 | The mint address as a base58-encoded string 138 | """ 139 | return str(Pubkey(data[:32])) 140 | 141 | 142 | async def main() -> None: 143 | """Main entry point for querying and processing bonding curves.""" 144 | async with AsyncClient(RPC_ENDPOINT, commitment="processed", timeout=120) as client: 145 | await client.is_connected() 146 | 147 | bonding_curves = await get_bonding_curves_by_reserves(client) 148 | print(f"Total matches: {len(bonding_curves)}") 149 | print("=" * 50) 150 | 151 | for bonding_curve in bonding_curves: 152 | # Find the SPL token account owned by the bonding curve 153 | associated_token_account = await find_associated_bonding_curve( 154 | str(bonding_curve.pubkey), client 155 | ) 156 | 157 | if associated_token_account: 158 | mint_address = get_mint_address(associated_token_account.data) 159 | print(f"Bonding curve: {bonding_curve.pubkey}") 160 | print(f"Mint address: {mint_address}") 161 | print("=" * 50) 162 | 163 | # For demonstration, only process the first curve 164 | break 165 | 166 | 167 | if __name__ == "__main__": 168 | asyncio.run(main()) 169 | -------------------------------------------------------------------------------- /src/monitoring/listener_factory.py: -------------------------------------------------------------------------------- 1 | """ 2 | Factory for creating platform-aware token listeners. 3 | """ 4 | 5 | from interfaces.core import Platform 6 | from monitoring.base_listener import BaseTokenListener 7 | from utils.logger import get_logger 8 | 9 | logger = get_logger(__name__) 10 | 11 | 12 | class ListenerFactory: 13 | """Factory for creating appropriate token listeners based on configuration.""" 14 | 15 | @staticmethod 16 | def create_listener( 17 | listener_type: str, 18 | wss_endpoint: str | None = None, 19 | geyser_endpoint: str | None = None, 20 | geyser_api_token: str | None = None, 21 | geyser_auth_type: str = "x-token", 22 | pumpportal_url: str = "wss://pumpportal.fun/api/data", 23 | platforms: list[Platform] | None = None, 24 | ) -> BaseTokenListener: 25 | """Create a token listener based on the specified type. 26 | 27 | Args: 28 | listener_type: Type of listener ('logs', 'blocks', 'geyser', or 'pumpportal') 29 | wss_endpoint: WebSocket endpoint URL (for logs/blocks listeners) 30 | geyser_endpoint: Geyser gRPC endpoint URL (for geyser listener) 31 | geyser_api_token: Geyser API token (for geyser listener) 32 | geyser_auth_type: Geyser authentication type 33 | pumpportal_url: PumpPortal WebSocket URL (for pumpportal listener) 34 | platforms: List of platforms to monitor (if None, monitor all) 35 | 36 | Returns: 37 | Configured token listener 38 | 39 | Raises: 40 | ValueError: If listener type is invalid or required parameters are missing 41 | """ 42 | listener_type = listener_type.lower() 43 | 44 | if listener_type == "geyser": 45 | if not geyser_endpoint or not geyser_api_token: 46 | raise ValueError( 47 | "Geyser endpoint and API token are required for geyser listener" 48 | ) 49 | 50 | from monitoring.universal_geyser_listener import UniversalGeyserListener 51 | 52 | listener = UniversalGeyserListener( 53 | geyser_endpoint=geyser_endpoint, 54 | geyser_api_token=geyser_api_token, 55 | geyser_auth_type=geyser_auth_type, 56 | platforms=platforms, 57 | ) 58 | logger.info("Created Universal Geyser listener for token monitoring") 59 | return listener 60 | 61 | elif listener_type == "logs": 62 | if not wss_endpoint: 63 | raise ValueError("WebSocket endpoint is required for logs listener") 64 | 65 | from monitoring.universal_logs_listener import UniversalLogsListener 66 | 67 | listener = UniversalLogsListener( 68 | wss_endpoint=wss_endpoint, 69 | platforms=platforms, 70 | ) 71 | logger.info("Created Universal Logs listener for token monitoring") 72 | return listener 73 | 74 | elif listener_type == "blocks": 75 | if not wss_endpoint: 76 | raise ValueError("WebSocket endpoint is required for blocks listener") 77 | 78 | from monitoring.universal_block_listener import UniversalBlockListener 79 | 80 | listener = UniversalBlockListener( 81 | wss_endpoint=wss_endpoint, 82 | platforms=platforms, 83 | ) 84 | logger.info("Created Universal Block listener for token monitoring") 85 | return listener 86 | 87 | elif listener_type == "pumpportal": 88 | # Import the new universal PumpPortal listener 89 | from monitoring.universal_pumpportal_listener import ( 90 | UniversalPumpPortalListener, 91 | ) 92 | 93 | # Validate that requested platforms support PumpPortal 94 | supported_pumpportal_platforms = [Platform.PUMP_FUN, Platform.LETS_BONK] 95 | 96 | if platforms: 97 | unsupported = [ 98 | p for p in platforms if p not in supported_pumpportal_platforms 99 | ] 100 | if unsupported: 101 | logger.warning( 102 | f"Platforms {[p.value for p in unsupported]} do not support PumpPortal" 103 | ) 104 | 105 | # Filter to only supported platforms 106 | filtered_platforms = [ 107 | p for p in platforms if p in supported_pumpportal_platforms 108 | ] 109 | if not filtered_platforms: 110 | raise ValueError( 111 | "No supported platforms specified for PumpPortal listener" 112 | ) 113 | platforms = filtered_platforms 114 | 115 | listener = UniversalPumpPortalListener( 116 | pumpportal_url=pumpportal_url, 117 | platforms=platforms, 118 | ) 119 | logger.info( 120 | f"Created Universal PumpPortal listener for platforms: {[p.value for p in (platforms or supported_pumpportal_platforms)]}" 121 | ) 122 | return listener 123 | 124 | else: 125 | raise ValueError( 126 | f"Invalid listener type '{listener_type}'. " 127 | f"Must be one of: 'logs', 'blocks', 'geyser', 'pumpportal'" 128 | ) 129 | 130 | @staticmethod 131 | def get_supported_listener_types() -> list[str]: 132 | """Get list of supported listener types. 133 | 134 | Returns: 135 | List of supported listener type strings 136 | """ 137 | return ["logs", "blocks", "geyser", "pumpportal"] 138 | 139 | @staticmethod 140 | def get_platform_compatible_listeners(platform: Platform) -> list[str]: 141 | """Get list of listener types compatible with a specific platform. 142 | 143 | Args: 144 | platform: Platform to check compatibility for 145 | 146 | Returns: 147 | List of compatible listener types 148 | """ 149 | if platform == Platform.PUMP_FUN: 150 | return ["logs", "blocks", "geyser", "pumpportal"] 151 | elif platform == Platform.LETS_BONK: 152 | return ["blocks", "geyser", "pumpportal"] # Added pumpportal support 153 | else: 154 | return ["blocks", "geyser"] # Default universal listeners 155 | 156 | @staticmethod 157 | def get_pumpportal_supported_platforms() -> list[Platform]: 158 | """Get list of platforms that support PumpPortal listener. 159 | 160 | Returns: 161 | List of platforms with PumpPortal support 162 | """ 163 | return [Platform.PUMP_FUN, Platform.LETS_BONK] 164 | -------------------------------------------------------------------------------- /src/geyser/proto/geyser.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import public "solana-storage.proto"; 4 | 5 | option go_package = "github.com/rpcpool/yellowstone-grpc/examples/golang/proto"; 6 | 7 | package geyser; 8 | 9 | service Geyser { 10 | rpc Subscribe(stream SubscribeRequest) returns (stream SubscribeUpdate) {} 11 | rpc Ping(PingRequest) returns (PongResponse) {} 12 | rpc GetLatestBlockhash(GetLatestBlockhashRequest) returns (GetLatestBlockhashResponse) {} 13 | rpc GetBlockHeight(GetBlockHeightRequest) returns (GetBlockHeightResponse) {} 14 | rpc GetSlot(GetSlotRequest) returns (GetSlotResponse) {} 15 | rpc IsBlockhashValid(IsBlockhashValidRequest) returns (IsBlockhashValidResponse) {} 16 | rpc GetVersion(GetVersionRequest) returns (GetVersionResponse) {} 17 | } 18 | 19 | enum CommitmentLevel { 20 | PROCESSED = 0; 21 | CONFIRMED = 1; 22 | FINALIZED = 2; 23 | FIRST_SHRED_RECEIVED = 3; 24 | COMPLETED = 4; 25 | CREATED_BANK = 5; 26 | DEAD = 6; 27 | } 28 | 29 | message SubscribeRequest { 30 | map accounts = 1; 31 | map slots = 2; 32 | map transactions = 3; 33 | map transactions_status = 10; 34 | map blocks = 4; 35 | map blocks_meta = 5; 36 | map entry = 8; 37 | optional CommitmentLevel commitment = 6; 38 | repeated SubscribeRequestAccountsDataSlice accounts_data_slice = 7; 39 | optional SubscribeRequestPing ping = 9; 40 | } 41 | 42 | message SubscribeRequestFilterAccounts { 43 | repeated string account = 2; 44 | repeated string owner = 3; 45 | repeated SubscribeRequestFilterAccountsFilter filters = 4; 46 | optional bool nonempty_txn_signature = 5; 47 | } 48 | 49 | message SubscribeRequestFilterAccountsFilter { 50 | oneof filter { 51 | SubscribeRequestFilterAccountsFilterMemcmp memcmp = 1; 52 | uint64 datasize = 2; 53 | bool token_account_state = 3; 54 | SubscribeRequestFilterAccountsFilterLamports lamports = 4; 55 | } 56 | } 57 | 58 | message SubscribeRequestFilterAccountsFilterMemcmp { 59 | uint64 offset = 1; 60 | oneof data { 61 | bytes bytes = 2; 62 | string base58 = 3; 63 | string base64 = 4; 64 | } 65 | } 66 | 67 | message SubscribeRequestFilterAccountsFilterLamports { 68 | oneof cmp { 69 | uint64 eq = 1; 70 | uint64 ne = 2; 71 | uint64 lt = 3; 72 | uint64 gt = 4; 73 | } 74 | } 75 | 76 | message SubscribeRequestFilterSlots { 77 | optional bool filter_by_commitment = 1; 78 | } 79 | 80 | message SubscribeRequestFilterTransactions { 81 | optional bool vote = 1; 82 | optional bool failed = 2; 83 | optional string signature = 5; 84 | repeated string account_include = 3; 85 | repeated string account_exclude = 4; 86 | repeated string account_required = 6; 87 | } 88 | 89 | message SubscribeRequestFilterBlocks { 90 | repeated string account_include = 1; 91 | optional bool include_transactions = 2; 92 | optional bool include_accounts = 3; 93 | optional bool include_entries = 4; 94 | } 95 | 96 | message SubscribeRequestFilterBlocksMeta {} 97 | 98 | message SubscribeRequestFilterEntry {} 99 | 100 | message SubscribeRequestAccountsDataSlice { 101 | uint64 offset = 1; 102 | uint64 length = 2; 103 | } 104 | 105 | message SubscribeRequestPing { 106 | int32 id = 1; 107 | } 108 | 109 | message SubscribeUpdate { 110 | repeated string filters = 1; 111 | oneof update_oneof { 112 | SubscribeUpdateAccount account = 2; 113 | SubscribeUpdateSlot slot = 3; 114 | SubscribeUpdateTransaction transaction = 4; 115 | SubscribeUpdateTransactionStatus transaction_status = 10; 116 | SubscribeUpdateBlock block = 5; 117 | SubscribeUpdatePing ping = 6; 118 | SubscribeUpdatePong pong = 9; 119 | SubscribeUpdateBlockMeta block_meta = 7; 120 | SubscribeUpdateEntry entry = 8; 121 | } 122 | } 123 | 124 | message SubscribeUpdateAccount { 125 | SubscribeUpdateAccountInfo account = 1; 126 | uint64 slot = 2; 127 | bool is_startup = 3; 128 | } 129 | 130 | message SubscribeUpdateAccountInfo { 131 | bytes pubkey = 1; 132 | uint64 lamports = 2; 133 | bytes owner = 3; 134 | bool executable = 4; 135 | uint64 rent_epoch = 5; 136 | bytes data = 6; 137 | uint64 write_version = 7; 138 | optional bytes txn_signature = 8; 139 | } 140 | 141 | message SubscribeUpdateSlot { 142 | uint64 slot = 1; 143 | optional uint64 parent = 2; 144 | CommitmentLevel status = 3; 145 | optional string dead_error = 4; 146 | } 147 | 148 | message SubscribeUpdateTransaction { 149 | SubscribeUpdateTransactionInfo transaction = 1; 150 | uint64 slot = 2; 151 | } 152 | 153 | message SubscribeUpdateTransactionInfo { 154 | bytes signature = 1; 155 | bool is_vote = 2; 156 | solana.storage.ConfirmedBlock.Transaction transaction = 3; 157 | solana.storage.ConfirmedBlock.TransactionStatusMeta meta = 4; 158 | uint64 index = 5; 159 | } 160 | 161 | message SubscribeUpdateTransactionStatus { 162 | uint64 slot = 1; 163 | bytes signature = 2; 164 | bool is_vote = 3; 165 | uint64 index = 4; 166 | solana.storage.ConfirmedBlock.TransactionError err = 5; 167 | } 168 | 169 | message SubscribeUpdateBlock { 170 | uint64 slot = 1; 171 | string blockhash = 2; 172 | solana.storage.ConfirmedBlock.Rewards rewards = 3; 173 | solana.storage.ConfirmedBlock.UnixTimestamp block_time = 4; 174 | solana.storage.ConfirmedBlock.BlockHeight block_height = 5; 175 | uint64 parent_slot = 7; 176 | string parent_blockhash = 8; 177 | uint64 executed_transaction_count = 9; 178 | repeated SubscribeUpdateTransactionInfo transactions = 6; 179 | uint64 updated_account_count = 10; 180 | repeated SubscribeUpdateAccountInfo accounts = 11; 181 | uint64 entries_count = 12; 182 | repeated SubscribeUpdateEntry entries = 13; 183 | } 184 | 185 | message SubscribeUpdateBlockMeta { 186 | uint64 slot = 1; 187 | string blockhash = 2; 188 | solana.storage.ConfirmedBlock.Rewards rewards = 3; 189 | solana.storage.ConfirmedBlock.UnixTimestamp block_time = 4; 190 | solana.storage.ConfirmedBlock.BlockHeight block_height = 5; 191 | uint64 parent_slot = 6; 192 | string parent_blockhash = 7; 193 | uint64 executed_transaction_count = 8; 194 | uint64 entries_count = 9; 195 | } 196 | 197 | message SubscribeUpdateEntry { 198 | uint64 slot = 1; 199 | uint64 index = 2; 200 | uint64 num_hashes = 3; 201 | bytes hash = 4; 202 | uint64 executed_transaction_count = 5; 203 | uint64 starting_transaction_index = 6; // added in v1.18, for solana 1.17 value is always 0 204 | } 205 | 206 | message SubscribeUpdatePing {} 207 | 208 | message SubscribeUpdatePong { 209 | int32 id = 1; 210 | } 211 | 212 | // non-streaming methods 213 | 214 | message PingRequest { 215 | int32 count = 1; 216 | } 217 | 218 | message PongResponse { 219 | int32 count = 1; 220 | } 221 | 222 | message GetLatestBlockhashRequest { 223 | optional CommitmentLevel commitment = 1; 224 | } 225 | 226 | message GetLatestBlockhashResponse { 227 | uint64 slot = 1; 228 | string blockhash = 2; 229 | uint64 last_valid_block_height = 3; 230 | } 231 | 232 | message GetBlockHeightRequest { 233 | optional CommitmentLevel commitment = 1; 234 | } 235 | 236 | message GetBlockHeightResponse { 237 | uint64 block_height = 1; 238 | } 239 | 240 | message GetSlotRequest { 241 | optional CommitmentLevel commitment = 1; 242 | } 243 | 244 | message GetSlotResponse { 245 | uint64 slot = 1; 246 | } 247 | 248 | message GetVersionRequest {} 249 | 250 | message GetVersionResponse { 251 | string version = 1; 252 | } 253 | 254 | message IsBlockhashValidRequest { 255 | string blockhash = 1; 256 | optional CommitmentLevel commitment = 2; 257 | } 258 | 259 | message IsBlockhashValidResponse { 260 | uint64 slot = 1; 261 | bool valid = 2; 262 | } -------------------------------------------------------------------------------- /learning-examples/listen-new-tokens/proto/geyser.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import public "solana-storage.proto"; 4 | 5 | option go_package = "github.com/rpcpool/yellowstone-grpc/examples/golang/proto"; 6 | 7 | package geyser; 8 | 9 | service Geyser { 10 | rpc Subscribe(stream SubscribeRequest) returns (stream SubscribeUpdate) {} 11 | rpc Ping(PingRequest) returns (PongResponse) {} 12 | rpc GetLatestBlockhash(GetLatestBlockhashRequest) returns (GetLatestBlockhashResponse) {} 13 | rpc GetBlockHeight(GetBlockHeightRequest) returns (GetBlockHeightResponse) {} 14 | rpc GetSlot(GetSlotRequest) returns (GetSlotResponse) {} 15 | rpc IsBlockhashValid(IsBlockhashValidRequest) returns (IsBlockhashValidResponse) {} 16 | rpc GetVersion(GetVersionRequest) returns (GetVersionResponse) {} 17 | } 18 | 19 | enum CommitmentLevel { 20 | PROCESSED = 0; 21 | CONFIRMED = 1; 22 | FINALIZED = 2; 23 | FIRST_SHRED_RECEIVED = 3; 24 | COMPLETED = 4; 25 | CREATED_BANK = 5; 26 | DEAD = 6; 27 | } 28 | 29 | message SubscribeRequest { 30 | map accounts = 1; 31 | map slots = 2; 32 | map transactions = 3; 33 | map transactions_status = 10; 34 | map blocks = 4; 35 | map blocks_meta = 5; 36 | map entry = 8; 37 | optional CommitmentLevel commitment = 6; 38 | repeated SubscribeRequestAccountsDataSlice accounts_data_slice = 7; 39 | optional SubscribeRequestPing ping = 9; 40 | } 41 | 42 | message SubscribeRequestFilterAccounts { 43 | repeated string account = 2; 44 | repeated string owner = 3; 45 | repeated SubscribeRequestFilterAccountsFilter filters = 4; 46 | optional bool nonempty_txn_signature = 5; 47 | } 48 | 49 | message SubscribeRequestFilterAccountsFilter { 50 | oneof filter { 51 | SubscribeRequestFilterAccountsFilterMemcmp memcmp = 1; 52 | uint64 datasize = 2; 53 | bool token_account_state = 3; 54 | SubscribeRequestFilterAccountsFilterLamports lamports = 4; 55 | } 56 | } 57 | 58 | message SubscribeRequestFilterAccountsFilterMemcmp { 59 | uint64 offset = 1; 60 | oneof data { 61 | bytes bytes = 2; 62 | string base58 = 3; 63 | string base64 = 4; 64 | } 65 | } 66 | 67 | message SubscribeRequestFilterAccountsFilterLamports { 68 | oneof cmp { 69 | uint64 eq = 1; 70 | uint64 ne = 2; 71 | uint64 lt = 3; 72 | uint64 gt = 4; 73 | } 74 | } 75 | 76 | message SubscribeRequestFilterSlots { 77 | optional bool filter_by_commitment = 1; 78 | } 79 | 80 | message SubscribeRequestFilterTransactions { 81 | optional bool vote = 1; 82 | optional bool failed = 2; 83 | optional string signature = 5; 84 | repeated string account_include = 3; 85 | repeated string account_exclude = 4; 86 | repeated string account_required = 6; 87 | } 88 | 89 | message SubscribeRequestFilterBlocks { 90 | repeated string account_include = 1; 91 | optional bool include_transactions = 2; 92 | optional bool include_accounts = 3; 93 | optional bool include_entries = 4; 94 | } 95 | 96 | message SubscribeRequestFilterBlocksMeta {} 97 | 98 | message SubscribeRequestFilterEntry {} 99 | 100 | message SubscribeRequestAccountsDataSlice { 101 | uint64 offset = 1; 102 | uint64 length = 2; 103 | } 104 | 105 | message SubscribeRequestPing { 106 | int32 id = 1; 107 | } 108 | 109 | message SubscribeUpdate { 110 | repeated string filters = 1; 111 | oneof update_oneof { 112 | SubscribeUpdateAccount account = 2; 113 | SubscribeUpdateSlot slot = 3; 114 | SubscribeUpdateTransaction transaction = 4; 115 | SubscribeUpdateTransactionStatus transaction_status = 10; 116 | SubscribeUpdateBlock block = 5; 117 | SubscribeUpdatePing ping = 6; 118 | SubscribeUpdatePong pong = 9; 119 | SubscribeUpdateBlockMeta block_meta = 7; 120 | SubscribeUpdateEntry entry = 8; 121 | } 122 | } 123 | 124 | message SubscribeUpdateAccount { 125 | SubscribeUpdateAccountInfo account = 1; 126 | uint64 slot = 2; 127 | bool is_startup = 3; 128 | } 129 | 130 | message SubscribeUpdateAccountInfo { 131 | bytes pubkey = 1; 132 | uint64 lamports = 2; 133 | bytes owner = 3; 134 | bool executable = 4; 135 | uint64 rent_epoch = 5; 136 | bytes data = 6; 137 | uint64 write_version = 7; 138 | optional bytes txn_signature = 8; 139 | } 140 | 141 | message SubscribeUpdateSlot { 142 | uint64 slot = 1; 143 | optional uint64 parent = 2; 144 | CommitmentLevel status = 3; 145 | optional string dead_error = 4; 146 | } 147 | 148 | message SubscribeUpdateTransaction { 149 | SubscribeUpdateTransactionInfo transaction = 1; 150 | uint64 slot = 2; 151 | } 152 | 153 | message SubscribeUpdateTransactionInfo { 154 | bytes signature = 1; 155 | bool is_vote = 2; 156 | solana.storage.ConfirmedBlock.Transaction transaction = 3; 157 | solana.storage.ConfirmedBlock.TransactionStatusMeta meta = 4; 158 | uint64 index = 5; 159 | } 160 | 161 | message SubscribeUpdateTransactionStatus { 162 | uint64 slot = 1; 163 | bytes signature = 2; 164 | bool is_vote = 3; 165 | uint64 index = 4; 166 | solana.storage.ConfirmedBlock.TransactionError err = 5; 167 | } 168 | 169 | message SubscribeUpdateBlock { 170 | uint64 slot = 1; 171 | string blockhash = 2; 172 | solana.storage.ConfirmedBlock.Rewards rewards = 3; 173 | solana.storage.ConfirmedBlock.UnixTimestamp block_time = 4; 174 | solana.storage.ConfirmedBlock.BlockHeight block_height = 5; 175 | uint64 parent_slot = 7; 176 | string parent_blockhash = 8; 177 | uint64 executed_transaction_count = 9; 178 | repeated SubscribeUpdateTransactionInfo transactions = 6; 179 | uint64 updated_account_count = 10; 180 | repeated SubscribeUpdateAccountInfo accounts = 11; 181 | uint64 entries_count = 12; 182 | repeated SubscribeUpdateEntry entries = 13; 183 | } 184 | 185 | message SubscribeUpdateBlockMeta { 186 | uint64 slot = 1; 187 | string blockhash = 2; 188 | solana.storage.ConfirmedBlock.Rewards rewards = 3; 189 | solana.storage.ConfirmedBlock.UnixTimestamp block_time = 4; 190 | solana.storage.ConfirmedBlock.BlockHeight block_height = 5; 191 | uint64 parent_slot = 6; 192 | string parent_blockhash = 7; 193 | uint64 executed_transaction_count = 8; 194 | uint64 entries_count = 9; 195 | } 196 | 197 | message SubscribeUpdateEntry { 198 | uint64 slot = 1; 199 | uint64 index = 2; 200 | uint64 num_hashes = 3; 201 | bytes hash = 4; 202 | uint64 executed_transaction_count = 5; 203 | uint64 starting_transaction_index = 6; // added in v1.18, for solana 1.17 value is always 0 204 | } 205 | 206 | message SubscribeUpdatePing {} 207 | 208 | message SubscribeUpdatePong { 209 | int32 id = 1; 210 | } 211 | 212 | // non-streaming methods 213 | 214 | message PingRequest { 215 | int32 count = 1; 216 | } 217 | 218 | message PongResponse { 219 | int32 count = 1; 220 | } 221 | 222 | message GetLatestBlockhashRequest { 223 | optional CommitmentLevel commitment = 1; 224 | } 225 | 226 | message GetLatestBlockhashResponse { 227 | uint64 slot = 1; 228 | string blockhash = 2; 229 | uint64 last_valid_block_height = 3; 230 | } 231 | 232 | message GetBlockHeightRequest { 233 | optional CommitmentLevel commitment = 1; 234 | } 235 | 236 | message GetBlockHeightResponse { 237 | uint64 block_height = 1; 238 | } 239 | 240 | message GetSlotRequest { 241 | optional CommitmentLevel commitment = 1; 242 | } 243 | 244 | message GetSlotResponse { 245 | uint64 slot = 1; 246 | } 247 | 248 | message GetVersionRequest {} 249 | 250 | message GetVersionResponse { 251 | string version = 1; 252 | } 253 | 254 | message IsBlockhashValidRequest { 255 | string blockhash = 1; 256 | optional CommitmentLevel commitment = 2; 257 | } 258 | 259 | message IsBlockhashValidResponse { 260 | uint64 slot = 1; 261 | bool valid = 2; 262 | } -------------------------------------------------------------------------------- /learning-examples/bonding-curve-progress/get_bonding_curve_status.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for checking the status of a token's bonding curve on the Solana network using 3 | the Pump.fun program. It allows querying the bonding curve state and completion status. 4 | """ 5 | 6 | import argparse 7 | import asyncio 8 | import os 9 | import struct 10 | from typing import Final 11 | 12 | from construct import Bytes, Flag, Int64ul, Struct 13 | from dotenv import load_dotenv 14 | from solana.rpc.async_api import AsyncClient 15 | from solders.pubkey import Pubkey 16 | 17 | load_dotenv() 18 | 19 | RPC_ENDPOINT = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 20 | 21 | # Change to token you want to query 22 | TOKEN_MINT = "..." 23 | 24 | # Constants 25 | PUMP_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string( 26 | "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 27 | ) 28 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 69 | """Parse bonding curve data.""" 70 | if data[:8] != EXPECTED_DISCRIMINATOR: 71 | raise ValueError("Invalid curve state discriminator") 72 | 73 | total_length = len(data) 74 | 75 | if total_length == 81: # V2: Creator only 76 | parsed = self._STRUCT_V2.parse(data[8:]) 77 | self.__dict__.update(parsed) 78 | # Convert raw bytes to Pubkey for creator field 79 | self.creator = Pubkey.from_bytes(self.creator) 80 | self.is_mayhem_mode = False 81 | 82 | elif total_length >= 82: # V3: Creator + mayhem mode 83 | parsed = self._STRUCT_V3.parse(data[8:]) 84 | self.__dict__.update(parsed) 85 | # Convert raw bytes to Pubkey for creator field 86 | self.creator = Pubkey.from_bytes(self.creator) 87 | 88 | else: 89 | raise ValueError(f"Unexpected bonding curve size: {total_length} bytes") 90 | 91 | 92 | def get_bonding_curve_address(mint: Pubkey, program_id: Pubkey) -> tuple[Pubkey, int]: 93 | """ 94 | Derives the associated bonding curve address for a given mint. 95 | 96 | Args: 97 | mint: The token mint address 98 | program_id: The program ID for the bonding curve 99 | 100 | Returns: 101 | Tuple of (bonding curve address, bump seed) 102 | """ 103 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id) 104 | 105 | 106 | async def get_bonding_curve_state( 107 | conn: AsyncClient, curve_address: Pubkey 108 | ) -> BondingCurveState: 109 | """ 110 | Fetches and validates the state of a bonding curve account. 111 | 112 | Args: 113 | conn: AsyncClient connection to Solana RPC 114 | curve_address: Address of the bonding curve account 115 | 116 | Returns: 117 | BondingCurveState object containing parsed account data 118 | 119 | Raises: 120 | ValueError: If account data is invalid or missing 121 | """ 122 | response = await conn.get_account_info(curve_address, encoding="base64") 123 | if not response.value or not response.value.data: 124 | raise ValueError("Invalid curve state: No data") 125 | 126 | data = response.value.data 127 | if data[:8] != EXPECTED_DISCRIMINATOR: 128 | raise ValueError("Invalid curve state discriminator") 129 | 130 | return BondingCurveState(data) 131 | 132 | 133 | async def check_token_status(mint_address: str) -> None: 134 | """ 135 | Checks and prints the status of a token and its bonding curve. 136 | 137 | Args: 138 | mint_address: The token mint address as a string 139 | """ 140 | try: 141 | mint = Pubkey.from_string(mint_address) 142 | bonding_curve_address, bump = get_bonding_curve_address(mint, PUMP_PROGRAM_ID) 143 | 144 | print("\nToken status:") 145 | print("-" * 50) 146 | print(f"Token mint: {mint}") 147 | print(f"Bonding curve: {bonding_curve_address}") 148 | if bump is not None: 149 | print(f"Bump seed: {bump}") 150 | print("-" * 50) 151 | 152 | # Check completion status 153 | async with AsyncClient(RPC_ENDPOINT) as client: 154 | try: 155 | curve_state = await get_bonding_curve_state( 156 | client, bonding_curve_address 157 | ) 158 | 159 | print("\nBonding curve status:") 160 | print("-" * 50) 161 | print(f"Creator: {curve_state.creator}") 162 | print( 163 | f"Mayhem Mode: {'✅ Enabled' if curve_state.is_mayhem_mode else '❌ Disabled'}" 164 | ) 165 | print( 166 | f"Completed: {'✅ Migrated' if curve_state.complete else '❌ Bonding curve'}" 167 | ) 168 | 169 | print("\nBonding curve reserves:") 170 | print(f"Virtual Token: {curve_state.virtual_token_reserves:,}") 171 | print( 172 | f"Virtual SOL: {curve_state.virtual_sol_reserves:,} lamports" 173 | ) 174 | print(f"Real Token: {curve_state.real_token_reserves:,}") 175 | print( 176 | f"Real SOL: {curve_state.real_sol_reserves:,} lamports" 177 | ) 178 | print(f"Total Supply: {curve_state.token_total_supply:,}") 179 | 180 | if curve_state.complete: 181 | print( 182 | "\nNote: This bonding curve has completed and liquidity has been migrated to PumpSwap." 183 | ) 184 | print("-" * 50) 185 | 186 | except ValueError as e: 187 | print(f"\nError accessing bonding curve: {e}") 188 | 189 | except ValueError as e: 190 | print(f"\nError: Invalid address format - {e}") 191 | except Exception as e: 192 | print(f"\nUnexpected error: {e}") 193 | 194 | 195 | def main() -> None: 196 | """Main entry point for the token status checker.""" 197 | parser = argparse.ArgumentParser(description="Check token bonding curve status") 198 | parser.add_argument( 199 | "mint_address", nargs="?", help="The token mint address", default=TOKEN_MINT 200 | ) 201 | args = parser.parse_args() 202 | 203 | asyncio.run(check_token_status(args.mint_address)) 204 | 205 | 206 | if __name__ == "__main__": 207 | main() 208 | -------------------------------------------------------------------------------- /.cursor/rules/trading-bot.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | alwaysApply: true 3 | --- 4 | 5 | # Trading Bot Specific Rules 6 | 7 | ## Bot Configuration Standards 8 | 9 | ### YAML Configuration Structure 10 | Maintain consistent structure across all bot configuration files: 11 | 12 | ```yaml 13 | # Bot identification 14 | name: "bot-sniper-1" 15 | platform: "pump_fun" # or "lets_bonk" 16 | enabled: true # Allow disabling without removing config 17 | separate_process: true # Run in separate process for isolation 18 | 19 | # Environment and connection 20 | env_file: ".env" 21 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 22 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 23 | private_key: "${SOLANA_PRIVATE_KEY}" 24 | 25 | # Platform-specific configurations 26 | geyser: # For faster data streams 27 | endpoint: "${GEYSER_ENDPOINT}" 28 | api_token: "${GEYSER_API_TOKEN}" 29 | auth_type: "x-token" 30 | 31 | # Trading parameters 32 | trade: 33 | buy_amount: 0.0001 # SOL amount 34 | buy_slippage: 0.3 # 30% 35 | sell_slippage: 0.3 36 | exit_strategy: "time_based" # "tp_sl", "manual" 37 | extreme_fast_mode: true # Skip validations for speed 38 | ``` 39 | 40 | ### Environment Variable Usage 41 | - Use `${VARIABLE_NAME}` syntax for environment interpolation 42 | - Never hardcode sensitive values in YAML files 43 | - Validate all required environment variables on startup 44 | - Provide clear error messages for missing variables 45 | 46 | ## Trading Logic Rules 47 | 48 | ### Transaction Handling 49 | - Always use priority fees for competitive transaction inclusion 50 | - Implement retry mechanisms with exponential backoff 51 | - Cache recent blockhash to avoid repeated RPC calls 52 | - Use compute unit limits to prevent transaction failures 53 | 54 | ```python 55 | # Good transaction building pattern 56 | instructions = [ 57 | set_compute_unit_limit(300_000), 58 | set_compute_unit_price(priority_fee), 59 | # ... trading instructions 60 | ] 61 | ``` 62 | 63 | ### Risk Management 64 | - Implement position size limits 65 | - Use slippage protection on all trades 66 | - Set maximum hold times to prevent stuck positions 67 | - Validate token data before trading 68 | 69 | ```python 70 | # Risk validation example 71 | if token_age > self.max_token_age: 72 | logger.warning(f"Token {mint} too old ({token_age}s), skipping") 73 | return False 74 | 75 | if buy_amount > self.max_position_size: 76 | logger.error(f"Buy amount {buy_amount} exceeds max position size") 77 | return False 78 | ``` 79 | 80 | ### Exit Strategies 81 | Implement multiple exit strategy types: 82 | 83 | 1. **Time-based**: Hold for fixed duration 84 | 2. **Take Profit/Stop Loss**: Price-based exits 85 | 3. **Manual**: No automatic selling 86 | 87 | ```python 88 | class ExitStrategy(Enum): 89 | TIME_BASED = "time_based" 90 | TP_SL = "tp_sl" 91 | MANUAL = "manual" 92 | ``` 93 | 94 | ## Platform Integration Rules 95 | 96 | ### Multi-Platform Support 97 | - Use platform enum for type safety 98 | - Implement platform-specific address providers 99 | - Abstract platform differences in universal components 100 | - Validate platform-listener combinations 101 | 102 | ```python 103 | # Platform validation 104 | if not validate_platform_listener_combination(platform, listener_type): 105 | supported = get_supported_listeners_for_platform(platform) 106 | raise ConfigurationError( 107 | f"Listener '{listener_type}' not supported for {platform.value}. " 108 | f"Supported: {supported}" 109 | ) 110 | ``` 111 | 112 | ### Listener Types 113 | Support multiple data source types: 114 | - **geyser**: Fastest, requires special endpoint 115 | - **logs**: WebSocket log subscription 116 | - **blocks**: Block subscription (not all providers support) 117 | - **pumpportal**: Third-party aggregator 118 | 119 | ## Performance Optimization 120 | 121 | ### Speed vs Accuracy Tradeoffs 122 | - **Extreme Fast Mode**: Skip validations and price checks for speed 123 | - **Normal Mode**: Full validation and price checks 124 | - **Marry Mode**: Only buy, never sell (accumulation strategy) 125 | - **YOLO Mode**: Continuous trading without cooldowns 126 | 127 | ### Caching Strategy 128 | ```python 129 | # Cache expensive operations 130 | self._cached_blockhash: Hash | None = None 131 | self._blockhash_lock = asyncio.Lock() 132 | 133 | # Background blockhash updater 134 | async def start_blockhash_updater(self, interval: float = 5.0): 135 | while True: 136 | try: 137 | blockhash = await self.get_latest_blockhash() 138 | async with self._blockhash_lock: 139 | self._cached_blockhash = blockhash 140 | except Exception as e: 141 | logger.exception(f"Failed to update blockhash: {e}") 142 | await asyncio.sleep(interval) 143 | ``` 144 | 145 | ## Monitoring and Logging 146 | 147 | ### Log File Management 148 | - Create timestamped log files per bot instance 149 | - Format: `{bot_name}_{timestamp}.log` 150 | - Store in `logs/` directory 151 | - Implement log rotation for long-running bots 152 | 153 | ### Trading Event Logging 154 | Log all significant events with context: 155 | 156 | ```python 157 | # Good logging examples 158 | logger.info(f"New token detected: {mint} by {creator}") 159 | logger.info(f"Buy transaction submitted: {signature}") 160 | logger.warning(f"Transaction failed, attempt {attempt}/{max_attempts}") 161 | logger.error(f"Platform {platform.value} not supported") 162 | ``` 163 | 164 | ### Performance Metrics 165 | Track key performance indicators: 166 | - Token detection latency 167 | - Transaction confirmation time 168 | - Success/failure rates 169 | - Slippage and fill rates 170 | 171 | ## Security and Safety Rules 172 | 173 | ### Private Key Management 174 | - Store private keys only in environment variables 175 | - Never log or expose private keys 176 | - Use separate wallets for testing vs production 177 | - Implement wallet balance checks before trading 178 | 179 | ### Input Validation 180 | ```python 181 | # Validate all external inputs 182 | def validate_mint_address(mint_str: str) -> bool: 183 | try: 184 | mint = Pubkey.from_string(mint_str) 185 | return len(str(mint)) == 44 # Valid Solana address length 186 | except Exception: 187 | return False 188 | ``` 189 | 190 | ### Error Recovery 191 | - Implement graceful shutdown on critical errors 192 | - Provide cleanup mechanisms for stuck positions 193 | - Support manual intervention modes 194 | - Log all errors with sufficient context for debugging 195 | 196 | ## Testing and Validation 197 | 198 | ### Learning Examples Usage 199 | Use learning examples for: 200 | - Testing new features before integration 201 | - Validating platform-specific functionality 202 | - Performance benchmarking 203 | - Educational purposes for new developers 204 | 205 | ```bash 206 | # Test platform connectivity 207 | uv run learning-examples/fetch_price.py 208 | 209 | # Validate bonding curve calculations 210 | uv run learning-examples/compute_associated_bonding_curve.py 211 | 212 | # Compare listener performance 213 | uv run learning-examples/listen-new-tokens/compare_listeners.py 214 | ``` 215 | 216 | ### Configuration Testing 217 | - Validate YAML syntax and required fields 218 | - Test environment variable interpolation 219 | - Verify platform-listener compatibility 220 | - Check wallet connectivity and balance 221 | 222 | ## Deployment Guidelines 223 | 224 | ### Production Checklist 225 | 1. Test configuration with learning examples 226 | 2. Verify environment variables are set 227 | 3. Check wallet has sufficient SOL for gas fees 228 | 4. Enable separate processes for isolation 229 | 5. Monitor logs for successful startup 230 | 6. Implement monitoring and alerting 231 | 232 | ### Multi-Bot Management 233 | - Use descriptive bot names in configurations 234 | - Separate log files per bot instance 235 | - Monitor resource usage across all bots 236 | - Implement centralized configuration management 237 | 238 | ```python 239 | # Bot process management 240 | if cfg.get("separate_process", False): 241 | p = multiprocessing.Process( 242 | target=run_bot_process, 243 | args=(str(file),), 244 | name=f"bot-{bot_name}" 245 | ) 246 | p.start() 247 | processes.append(p) 248 | ``` -------------------------------------------------------------------------------- /.kiro/steering/trading-bot.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | inclusion: always 3 | --- 4 | 5 | # Trading Bot Specific Rules 6 | 7 | ## Bot Configuration Standards 8 | 9 | ### YAML Configuration Structure 10 | Maintain consistent structure across all bot configuration files: 11 | 12 | ```yaml 13 | # Bot identification 14 | name: "bot-sniper-1" 15 | platform: "pump_fun" # or "lets_bonk" 16 | enabled: true # Allow disabling without removing config 17 | separate_process: true # Run in separate process for isolation 18 | 19 | # Environment and connection 20 | env_file: ".env" 21 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 22 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 23 | private_key: "${SOLANA_PRIVATE_KEY}" 24 | 25 | # Platform-specific configurations 26 | geyser: # For faster data streams 27 | endpoint: "${GEYSER_ENDPOINT}" 28 | api_token: "${GEYSER_API_TOKEN}" 29 | auth_type: "x-token" 30 | 31 | # Trading parameters 32 | trade: 33 | buy_amount: 0.0001 # SOL amount 34 | buy_slippage: 0.3 # 30% 35 | sell_slippage: 0.3 36 | exit_strategy: "time_based" # "tp_sl", "manual" 37 | extreme_fast_mode: true # Skip validations for speed 38 | ``` 39 | 40 | ### Environment Variable Usage 41 | - Use `${VARIABLE_NAME}` syntax for environment interpolation 42 | - Never hardcode sensitive values in YAML files 43 | - Validate all required environment variables on startup 44 | - Provide clear error messages for missing variables 45 | 46 | ## Trading Logic Rules 47 | 48 | ### Transaction Handling 49 | - Always use priority fees for competitive transaction inclusion 50 | - Implement retry mechanisms with exponential backoff 51 | - Cache recent blockhash to avoid repeated RPC calls 52 | - Use compute unit limits to prevent transaction failures 53 | 54 | ```python 55 | # Good transaction building pattern 56 | instructions = [ 57 | set_compute_unit_limit(300_000), 58 | set_compute_unit_price(priority_fee), 59 | # ... trading instructions 60 | ] 61 | ``` 62 | 63 | ### Risk Management 64 | - Implement position size limits 65 | - Use slippage protection on all trades 66 | - Set maximum hold times to prevent stuck positions 67 | - Validate token data before trading 68 | 69 | ```python 70 | # Risk validation example 71 | if token_age > self.max_token_age: 72 | logger.warning(f"Token {mint} too old ({token_age}s), skipping") 73 | return False 74 | 75 | if buy_amount > self.max_position_size: 76 | logger.error(f"Buy amount {buy_amount} exceeds max position size") 77 | return False 78 | ``` 79 | 80 | ### Exit Strategies 81 | Implement multiple exit strategy types: 82 | 83 | 1. **Time-based**: Hold for fixed duration 84 | 2. **Take Profit/Stop Loss**: Price-based exits 85 | 3. **Manual**: No automatic selling 86 | 87 | ```python 88 | class ExitStrategy(Enum): 89 | TIME_BASED = "time_based" 90 | TP_SL = "tp_sl" 91 | MANUAL = "manual" 92 | ``` 93 | 94 | ## Platform Integration Rules 95 | 96 | ### Multi-Platform Support 97 | - Use platform enum for type safety 98 | - Implement platform-specific address providers 99 | - Abstract platform differences in universal components 100 | - Validate platform-listener combinations 101 | 102 | ```python 103 | # Platform validation 104 | if not validate_platform_listener_combination(platform, listener_type): 105 | supported = get_supported_listeners_for_platform(platform) 106 | raise ConfigurationError( 107 | f"Listener '{listener_type}' not supported for {platform.value}. " 108 | f"Supported: {supported}" 109 | ) 110 | ``` 111 | 112 | ### Listener Types 113 | Support multiple data source types: 114 | - **geyser**: Fastest, requires special endpoint 115 | - **logs**: WebSocket log subscription 116 | - **blocks**: Block subscription (not all providers support) 117 | - **pumpportal**: Third-party aggregator 118 | 119 | ## Performance Optimization 120 | 121 | ### Speed vs Accuracy Tradeoffs 122 | - **Extreme Fast Mode**: Skip validations and price checks for speed 123 | - **Normal Mode**: Full validation and price checks 124 | - **Marry Mode**: Only buy, never sell (accumulation strategy) 125 | - **YOLO Mode**: Continuous trading without cooldowns 126 | 127 | ### Caching Strategy 128 | ```python 129 | # Cache expensive operations 130 | self._cached_blockhash: Hash | None = None 131 | self._blockhash_lock = asyncio.Lock() 132 | 133 | # Background blockhash updater 134 | async def start_blockhash_updater(self, interval: float = 5.0): 135 | while True: 136 | try: 137 | blockhash = await self.get_latest_blockhash() 138 | async with self._blockhash_lock: 139 | self._cached_blockhash = blockhash 140 | except Exception as e: 141 | logger.exception(f"Failed to update blockhash: {e}") 142 | await asyncio.sleep(interval) 143 | ``` 144 | 145 | ## Monitoring and Logging 146 | 147 | ### Log File Management 148 | - Create timestamped log files per bot instance 149 | - Format: `{bot_name}_{timestamp}.log` 150 | - Store in `logs/` directory 151 | - Implement log rotation for long-running bots 152 | 153 | ### Trading Event Logging 154 | Log all significant events with context: 155 | 156 | ```python 157 | # Good logging examples 158 | logger.info(f"New token detected: {mint} by {creator}") 159 | logger.info(f"Buy transaction submitted: {signature}") 160 | logger.warning(f"Transaction failed, attempt {attempt}/{max_attempts}") 161 | logger.error(f"Platform {platform.value} not supported") 162 | ``` 163 | 164 | ### Performance Metrics 165 | Track key performance indicators: 166 | - Token detection latency 167 | - Transaction confirmation time 168 | - Success/failure rates 169 | - Slippage and fill rates 170 | 171 | ## Security and Safety Rules 172 | 173 | ### Private Key Management 174 | - Store private keys only in environment variables 175 | - Never log or expose private keys 176 | - Use separate wallets for testing vs production 177 | - Implement wallet balance checks before trading 178 | 179 | ### Input Validation 180 | ```python 181 | # Validate all external inputs 182 | def validate_mint_address(mint_str: str) -> bool: 183 | try: 184 | mint = Pubkey.from_string(mint_str) 185 | return len(str(mint)) == 44 # Valid Solana address length 186 | except Exception: 187 | return False 188 | ``` 189 | 190 | ### Error Recovery 191 | - Implement graceful shutdown on critical errors 192 | - Provide cleanup mechanisms for stuck positions 193 | - Support manual intervention modes 194 | - Log all errors with sufficient context for debugging 195 | 196 | ## Testing and Validation 197 | 198 | ### Learning Examples Usage 199 | Use learning examples for: 200 | - Testing new features before integration 201 | - Validating platform-specific functionality 202 | - Performance benchmarking 203 | - Educational purposes for new developers 204 | 205 | ```bash 206 | # Test platform connectivity 207 | uv run learning-examples/fetch_price.py 208 | 209 | # Validate bonding curve calculations 210 | uv run learning-examples/compute_associated_bonding_curve.py 211 | 212 | # Compare listener performance 213 | uv run learning-examples/listen-new-tokens/compare_listeners.py 214 | ``` 215 | 216 | ### Configuration Testing 217 | - Validate YAML syntax and required fields 218 | - Test environment variable interpolation 219 | - Verify platform-listener compatibility 220 | - Check wallet connectivity and balance 221 | 222 | ## Deployment Guidelines 223 | 224 | ### Production Checklist 225 | 1. Test configuration with learning examples 226 | 2. Verify environment variables are set 227 | 3. Check wallet has sufficient SOL for gas fees 228 | 4. Enable separate processes for isolation 229 | 5. Monitor logs for successful startup 230 | 6. Implement monitoring and alerting 231 | 232 | ### Multi-Bot Management 233 | - Use descriptive bot names in configurations 234 | - Separate log files per bot instance 235 | - Monitor resource usage across all bots 236 | - Implement centralized configuration management 237 | 238 | ```python 239 | # Bot process management 240 | if cfg.get("separate_process", False): 241 | p = multiprocessing.Process( 242 | target=run_bot_process, 243 | args=(str(file),), 244 | name=f"bot-{bot_name}" 245 | ) 246 | p.start() 247 | processes.append(p) 248 | ``` --------------------------------------------------------------------------------