├── .env.example ├── .github └── workflows │ ├── auto-assign.yml │ └── spam-detection.yml ├── .gitignore ├── LICENSE ├── MAINTAINERS.md ├── README.md ├── bots ├── bot-sniper-1-geyser.yaml ├── bot-sniper-2-logs.yaml └── bot-sniper-3-blocks.yaml ├── idl ├── pump_fun_idl.json ├── pump_swap_idl.json └── raydium_amm_idl.json ├── learning-examples ├── blockSubscribe-transactions │ └── raw_create_tx_from_blockSubscribe.json ├── blockSubscribe_extract_transactions.py ├── bonding-curve-progress │ ├── get_bonding_curve_status.py │ ├── get_graduating_tokens.py │ └── poll_bonding_curve_progress.py ├── calculate_discriminator.py ├── cleanup_accounts.py ├── compute_associated_bonding_curve.py ├── decode_from_blockSubscribe.py ├── decode_from_getAccountInfo.py ├── decode_from_getTransaction.py ├── decoded_buy_tx_from_getTransaction.json ├── decoded_create_tx_from_getTransaction.json ├── fetch_price.py ├── listen-migrations │ ├── compare_migration_listeners.py │ ├── listen_blocksubscribe_old_raydium.py │ ├── listen_logsubscribe.py │ └── listen_programsubscribe.py ├── listen-new-tokens │ ├── compare_listeners.py │ ├── generated │ │ ├── __init__.py │ │ ├── geyser_pb2.py │ │ ├── geyser_pb2.pyi │ │ ├── geyser_pb2_grpc.py │ │ ├── solana_storage_pb2.py │ │ ├── solana_storage_pb2.pyi │ │ └── solana_storage_pb2_grpc.py │ ├── listen_blocksubscribe.py │ ├── listen_geyser.py │ ├── listen_logsubscribe+abc.py │ ├── listen_logsubscribe.py │ ├── listen_pumpportal.py │ └── proto │ │ ├── geyser.proto │ │ └── solana-storage.proto ├── manual_buy.py ├── manual_sell.py ├── pumpswap │ ├── get_pumpswap_pools.py │ ├── manual_buy_pumpswap.py │ └── manual_sell_pumpswap.py ├── raw_bondingCurve_from_getAccountInfo.json ├── raw_buy_tx_from_getTransaction.json └── raw_create_tx_from_getTransaction.json ├── pyproject.toml ├── src ├── __init__.py ├── bot_runner.py ├── cleanup │ ├── __init__.py │ ├── manager.py │ └── modes.py ├── config_loader.py ├── core │ ├── __init__.py │ ├── client.py │ ├── curve.py │ ├── priority_fee │ │ ├── __init__.py │ │ ├── dynamic_fee.py │ │ ├── fixed_fee.py │ │ └── manager.py │ ├── pubkeys.py │ └── wallet.py ├── geyser │ ├── generated │ │ ├── geyser_pb2.py │ │ ├── geyser_pb2.pyi │ │ ├── geyser_pb2_grpc.py │ │ ├── solana_storage_pb2.py │ │ ├── solana_storage_pb2.pyi │ │ └── solana_storage_pb2_grpc.py │ └── proto │ │ ├── geyser.proto │ │ └── solana-storage.proto ├── monitoring │ ├── __init__.py │ ├── base_listener.py │ ├── block_event_processor.py │ ├── block_listener.py │ ├── geyser_event_processor.py │ ├── geyser_listener.py │ ├── logs_event_processor.py │ └── logs_listener.py ├── trading │ ├── __init__.py │ ├── base.py │ ├── buyer.py │ ├── seller.py │ └── trader.py └── utils │ ├── __init__.py │ └── logger.py ├── tests ├── compare_listeners.py ├── test_block_listener.py ├── test_geyser_listener.py └── test_logs_listener.py └── trades └── trades.log /.env.example: -------------------------------------------------------------------------------- 1 | SOLANA_NODE_RPC_ENDPOINT=... 2 | SOLANA_NODE_WSS_ENDPOINT=... 3 | 4 | GEYSER_ENDPOINT= 5 | GEYSER_API_TOKEN= 6 | GEYSER_AUTH_TYPE=x-token or basic 7 | 8 | SOLANA_PRIVATE_KEY= -------------------------------------------------------------------------------- /.github/workflows/auto-assign.yml: -------------------------------------------------------------------------------- 1 | name: Auto Assign Issues 2 | 3 | permissions: 4 | issues: write 5 | 6 | on: 7 | issues: 8 | types: [opened] 9 | 10 | jobs: 11 | assign_issue: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Assign issue to maintainer 15 | uses: actions/github-script@v6 16 | with: 17 | github-token: ${{ secrets.GITHUB_TOKEN }} 18 | script: | 19 | await github.rest.issues.addAssignees({ 20 | owner: context.repo.owner, 21 | repo: context.repo.repo, 22 | issue_number: context.issue.number, 23 | assignees: ['akegaviar'] 24 | }); -------------------------------------------------------------------------------- /.github/workflows/spam-detection.yml: -------------------------------------------------------------------------------- 1 | name: Suspicious Comment Detection 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | pull_request_review_comment: 7 | types: [created] 8 | 9 | jobs: 10 | check_comment: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Check for suspicious patterns 14 | uses: actions/github-script@v6 15 | with: 16 | github-token: ${{ secrets.GITHUB_TOKEN }} 17 | script: | 18 | try { 19 | const comment = context.payload.comment; 20 | const body = comment.body.toLowerCase(); 21 | const author = comment.user.login; 22 | 23 | // Suspicious patterns 24 | const suspiciousPatterns = [ 25 | 'support team', 26 | 'customer service', 27 | 'telegram', 28 | 'whatsapp', 29 | 'contact us', 30 | 'click here', 31 | 'support group', 32 | 't.me/', 33 | 'wa.me/', 34 | 'support chat', 35 | 'live chat', 36 | 'support ticket', 37 | 'ticket id', 38 | 'live support', 39 | 'support line', 40 | 'support agent', 41 | 'support network', 42 | 'dedicated support', 43 | 'personalized assistance', 44 | 'opened for you', 45 | 'kindly talk to', 46 | 'we apologize', 47 | 'live chat with an agent', 48 | 'chat button', 49 | 'dapp portal', 50 | 'decentralized dapp', 51 | 'access the portal', 52 | 'report your request', 53 | 'start a conversation', 54 | 'click the chat', 55 | 'for assistance', 56 | 'reach out to', 57 | 'through the chat', 58 | 'portal', 59 | 'help center', 60 | 'ticket', 61 | 'this will be review', 62 | 'bringing this to our notice', 63 | 'initiate a chat', 64 | 'regards', 65 | 'hello @', 66 | 'thanks for bringing', 67 | ]; 68 | 69 | // Add pattern weight scoring 70 | const patternWeights = { 71 | 'ticket id': 2, 72 | 'support team': 2, 73 | 'live support': 2, 74 | 'help center': 2, 75 | // Regular patterns have weight of 1 76 | }; 77 | 78 | // Calculate spam score 79 | let spamScore = 0; 80 | const foundPatterns = suspiciousPatterns.filter(pattern => { 81 | if (body.includes(pattern)) { 82 | spamScore += patternWeights[pattern] || 1; 83 | return true; 84 | } 85 | return false; 86 | }); 87 | 88 | // Check for external links (excluding common legitimate domains) 89 | const hasExternalLinks = body.includes('http') || body.includes('www'); 90 | const hasGithubLinks = body.includes('github.com'); 91 | const suspiciousLinks = hasExternalLinks && !hasGithubLinks; 92 | 93 | // Trigger on either multiple patterns or high spam score 94 | if (foundPatterns.length > 2 || spamScore >= 3) { 95 | try { 96 | // Create a warning comment 97 | await github.rest.issues.createComment({ 98 | owner: context.repo.owner, 99 | repo: context.repo.repo, 100 | issue_number: context.payload.issue ? context.payload.issue.number : context.payload.pull_request.number, 101 | body: warningMessage 102 | }); 103 | } catch (e) { 104 | console.log('Failed to create comment:', e); 105 | } 106 | 107 | try { 108 | // Add 'potential-scam' label 109 | await github.rest.issues.addLabels({ 110 | owner: context.repo.owner, 111 | repo: context.repo.repo, 112 | issue_number: context.payload.issue ? context.payload.issue.number : context.payload.pull_request.number, 113 | labels: ['potential-scam'] 114 | }); 115 | } catch (e) { 116 | console.log('Failed to add label:', e); 117 | } 118 | } 119 | } catch (e) { 120 | console.log('Workflow error:', e); 121 | // Still mark as failure but with more context 122 | core.setFailed(`Workflow failed: ${e.message}`); 123 | } 124 | 125 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | trades/* 2 | 3 | .vscode 4 | .pylintrc 5 | .ruff_cache 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | cover/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | .pybuilder/ 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | # For a library or package, you might want to ignore these files since the code is 93 | # intended to run in multiple environments; otherwise, check them in: 94 | # .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | trades/trades.log 170 | -------------------------------------------------------------------------------- /MAINTAINERS.md: -------------------------------------------------------------------------------- 1 | The maintainers are: 2 | * [@akegaviar](https://github.com/akegaviar) (primary contact, issue manager) 3 | * [@smypmsa](https://github.com/smypmsa) 4 | -------------------------------------------------------------------------------- /bots/bot-sniper-1-geyser.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-1" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: false # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Geyser configuration (fastest method for getting updates) 15 | geyser: 16 | endpoint: "${GEYSER_ENDPOINT}" 17 | api_token: "${GEYSER_API_TOKEN}" 18 | auth_type: "x-token" # or "basic" 19 | 20 | # Trading parameters 21 | # Control trade execution: amount of SOL per trade and acceptable price deviation 22 | trade: 23 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 24 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 25 | sell_slippage: 0.3 26 | 27 | # EXTREME FAST mode configuration 28 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 29 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 30 | extreme_fast_mode: true 31 | extreme_fast_token_amount: 20 # Amount of tokens to buy 32 | 33 | # Priority fee configuration 34 | # Manage transaction speed and cost on the Solana network. 35 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 36 | priority_fees: 37 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 38 | enable_fixed: true # Use fixed amount below 39 | fixed_amount: 1_000_000 # Base fee in microlamports 40 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 41 | hard_cap: 1_000_000 # Maximum allowable fee in microlamports to prevent excessive spending 42 | 43 | # Filters for token selection 44 | filters: 45 | match_string: null # Only process tokens with this string in name/symbol 46 | bro_address: null # Only trade tokens created by this user address 47 | listener_type: "geyser" # Method for detecting new tokens: "logs", "blocks", or "geyser" 48 | max_token_age: 0.001 # Maximum token age in seconds for processing 49 | marry_mode: false # Only buy tokens, skip selling 50 | yolo_mode: false # Continuously trade tokens 51 | 52 | # Retry and timeout settings 53 | retries: 54 | max_attempts: 1 # Number of attempts for transaction submission 55 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 56 | wait_after_buy: 15 # Holding period after buy transaction 57 | wait_before_new_token: 15 # Pause between token trades 58 | 59 | # Token and account management 60 | cleanup: 61 | # Cleanup mode determines when to manage token accounts. Options: 62 | # "disabled": no cleanup will occur. 63 | # "on_fail": only clean up if a buy transaction fails. 64 | # "after_sell": clean up after selling. 65 | # "post_session": clean up all empty accounts after a trading session ends. 66 | mode: "post_session" 67 | force_close_with_burn: false # Force burning remaining tokens before closing account 68 | with_priority_fee: false # Use priority fees for cleanup transactions 69 | 70 | # Node provider configuration (not implemented) 71 | node: 72 | max_rps: 25 # Maximum requests per second 73 | -------------------------------------------------------------------------------- /bots/bot-sniper-2-logs.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-2" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: false # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Geyser configuration (fastest method for getting updates) 15 | geyser: 16 | endpoint: "${GEYSER_ENDPOINT}" 17 | api_token: "${GEYSER_API_TOKEN}" 18 | auth_type: "basic" # or "x-token" 19 | 20 | # Trading parameters 21 | # Control trade execution: amount of SOL per trade and acceptable price deviation 22 | trade: 23 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 24 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 25 | sell_slippage: 0.3 26 | 27 | # EXTREME FAST mode configuration 28 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 29 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 30 | extreme_fast_mode: true 31 | extreme_fast_token_amount: 20 # Amount of tokens to buy 32 | 33 | # Priority fee configuration 34 | # Manage transaction speed and cost on the Solana network. 35 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 36 | priority_fees: 37 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 38 | enable_fixed: true # Use fixed amount below 39 | fixed_amount: 200_000 # Base fee in microlamports 40 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 41 | hard_cap: 200_000 # Maximum allowable fee in microlamports to prevent excessive spending 42 | 43 | # Filters for token selection 44 | filters: 45 | match_string: null # Only process tokens with this string in name/symbol 46 | bro_address: null # Only trade tokens created by this user address 47 | listener_type: "logs" # Method for detecting new tokens: "logs", "blocks", or "geyser" 48 | max_token_age: 0.001 # Maximum token age in seconds for processing 49 | marry_mode: false # Only buy tokens, skip selling 50 | yolo_mode: false # Continuously trade tokens 51 | 52 | # Retry and timeout settings 53 | retries: 54 | max_attempts: 1 # Number of attempts for transaction submission 55 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 56 | wait_after_buy: 15 # Holding period after buy transaction 57 | wait_before_new_token: 15 # Pause between token trades 58 | 59 | # Token and account management 60 | cleanup: 61 | # Cleanup mode determines when to manage token accounts. Options: 62 | # "disabled": no cleanup will occur. 63 | # "on_fail": only clean up if a buy transaction fails. 64 | # "after_sell": clean up after selling. 65 | # "post_session": clean up all empty accounts after a trading session ends. 66 | mode: "post_session" 67 | force_close_with_burn: false # Force burning remaining tokens before closing account 68 | with_priority_fee: false # Use priority fees for cleanup transactions 69 | 70 | # Node provider configuration (not implemented) 71 | node: 72 | max_rps: 25 # Maximum requests per second 73 | -------------------------------------------------------------------------------- /bots/bot-sniper-3-blocks.yaml: -------------------------------------------------------------------------------- 1 | # This file defines comprehensive parameters and settings for the trading bot. 2 | # Carefully review and adjust values to match your trading strategy and risk tolerance. 3 | 4 | # Bot identification and connection settings 5 | name: "bot-sniper-2" 6 | env_file: ".env" 7 | rpc_endpoint: "${SOLANA_NODE_RPC_ENDPOINT}" 8 | wss_endpoint: "${SOLANA_NODE_WSS_ENDPOINT}" 9 | private_key: "${SOLANA_PRIVATE_KEY}" 10 | 11 | enabled: true # You can turn off the bot w/o removing its config 12 | separate_process: true 13 | 14 | # Geyser configuration (fastest method for getting updates) 15 | geyser: 16 | endpoint: "${GEYSER_ENDPOINT}" 17 | api_token: "${GEYSER_API_TOKEN}" 18 | auth_type: "basic" # or "x-token" 19 | 20 | # Trading parameters 21 | # Control trade execution: amount of SOL per trade and acceptable price deviation 22 | trade: 23 | buy_amount: 0.0001 # Amount of SOL to spend when buying (in SOL) 24 | buy_slippage: 0.3 # Maximum acceptable price deviation (0.3 = 30%) 25 | sell_slippage: 0.3 26 | 27 | # EXTREME FAST mode configuration 28 | # When enabled, skips waiting for the bonding curve to stabilize and RPC price check. 29 | # The bot buys the specified number of tokens directly, making the process faster but less precise. 30 | extreme_fast_mode: true 31 | extreme_fast_token_amount: 20 # Amount of tokens to buy 32 | 33 | # Priority fee configuration 34 | # Manage transaction speed and cost on the Solana network. 35 | # Note: dynamic mode requires an additional RPC call, which slows down the buying process. 36 | priority_fees: 37 | enable_dynamic: false # Use latest transactions to estimate required fee (getRecentPrioritizationFees) 38 | enable_fixed: true # Use fixed amount below 39 | fixed_amount: 200_000 # Base fee in microlamports 40 | extra_percentage: 0.0 # Percentage increase on riority fee regardless of the calculation method (0.1 = 10%) 41 | hard_cap: 200_000 # Maximum allowable fee in microlamports to prevent excessive spending 42 | 43 | # Filters for token selection 44 | filters: 45 | match_string: null # Only process tokens with this string in name/symbol 46 | bro_address: null # Only trade tokens created by this user address 47 | listener_type: "blocks" # Method for detecting new tokens: "logs", "blocks", or "geyser" 48 | max_token_age: 0.001 # Maximum token age in seconds for processing 49 | marry_mode: false # Only buy tokens, skip selling 50 | yolo_mode: false # Continuously trade tokens 51 | 52 | # Retry and timeout settings 53 | retries: 54 | max_attempts: 1 # Number of attempts for transaction submission 55 | wait_after_creation: 15 # Seconds to wait after token creation (only if EXTREME FAST is disabled) 56 | wait_after_buy: 15 # Holding period after buy transaction 57 | wait_before_new_token: 15 # Pause between token trades 58 | 59 | # Token and account management 60 | cleanup: 61 | # Cleanup mode determines when to manage token accounts. Options: 62 | # "disabled": no cleanup will occur. 63 | # "on_fail": only clean up if a buy transaction fails. 64 | # "after_sell": clean up after selling. 65 | # "post_session": clean up all empty accounts after a trading session ends. 66 | mode: "post_session" 67 | force_close_with_burn: false # Force burning remaining tokens before closing account 68 | with_priority_fee: false # Use priority fees for cleanup transactions 69 | 70 | # Node provider configuration (not implemented) 71 | node: 72 | max_rps: 25 # Maximum requests per second 73 | -------------------------------------------------------------------------------- /learning-examples/blockSubscribe_extract_transactions.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import hashlib 3 | import json 4 | import os 5 | 6 | import websockets 7 | from solders.pubkey import Pubkey 8 | 9 | PUMP_PROGRAM = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 10 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 11 | 12 | 13 | async def save_transaction(tx_data, tx_signature): 14 | os.makedirs("blockSubscribe-transactions", exist_ok=True) 15 | hashed_signature = hashlib.sha256(tx_signature.encode()).hexdigest() 16 | file_path = os.path.join("blockSubscribe-transactions", f"{hashed_signature}.json") 17 | with open(file_path, "w") as f: 18 | json.dump(tx_data, f, indent=2) 19 | print(f"Saved transaction: {hashed_signature[:8]}...") 20 | 21 | 22 | async def listen_for_transactions(): 23 | async with websockets.connect(WSS_ENDPOINT) as websocket: 24 | subscription_message = json.dumps( 25 | { 26 | "jsonrpc": "2.0", 27 | "id": 1, 28 | "method": "blockSubscribe", 29 | "params": [ 30 | {"mentionsAccountOrProgram": str(PUMP_PROGRAM)}, 31 | { 32 | "commitment": "confirmed", 33 | "encoding": "base64", 34 | "showRewards": False, 35 | "transactionDetails": "full", 36 | "maxSupportedTransactionVersion": 0, 37 | }, 38 | ], 39 | }, 40 | ) 41 | await websocket.send(subscription_message) 42 | print(f"Subscribed to blocks mentioning program: {PUMP_PROGRAM}") 43 | 44 | while True: 45 | try: 46 | response = await websocket.recv() 47 | data = json.loads(response) 48 | 49 | if "method" in data and data["method"] == "blockNotification": 50 | if "params" in data and "result" in data["params"]: 51 | block_data = data["params"]["result"] 52 | if "value" in block_data and "block" in block_data["value"]: 53 | block = block_data["value"]["block"] 54 | if "transactions" in block: 55 | transactions = block["transactions"] 56 | for tx in transactions: 57 | if isinstance(tx, dict) and "transaction" in tx: 58 | if ( 59 | isinstance(tx["transaction"], list) 60 | and len(tx["transaction"]) > 0 61 | ): 62 | tx_signature = tx["transaction"][0] 63 | elif ( 64 | isinstance(tx["transaction"], dict) 65 | and "signatures" in tx["transaction"] 66 | ): 67 | tx_signature = tx["transaction"][ 68 | "signatures" 69 | ][0] 70 | else: 71 | continue 72 | await save_transaction(tx, tx_signature) 73 | elif "result" in data: 74 | print("Subscription confirmed") 75 | except Exception as e: 76 | print(f"An error occurred: {e!s}") 77 | 78 | 79 | if __name__ == "__main__": 80 | asyncio.run(listen_for_transactions()) 81 | -------------------------------------------------------------------------------- /learning-examples/bonding-curve-progress/get_bonding_curve_status.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for checking the status of a token's bonding curve on the Solana network using 3 | the Pump.fun program. It allows querying the bonding curve state and completion status. 4 | 5 | Note: creator fee upgrade introduced updates in bonding curve structure. 6 | https://github.com/pump-fun/pump-public-docs/blob/main/docs/PUMP_CREATOR_FEE_README.md 7 | """ 8 | 9 | import asyncio 10 | import os 11 | import struct 12 | from typing import Final 13 | 14 | from construct import Bytes, Flag, Int64ul, Struct 15 | from dotenv import load_dotenv 16 | from solana.rpc.async_api import AsyncClient 17 | from solders.pubkey import Pubkey 18 | 19 | load_dotenv() 20 | 21 | RPC_ENDPOINT = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 22 | 23 | # Change to token you want to query 24 | TOKEN_MINT = "xWrzYY4c1LnbSkLrd2LDUg9vw7YtVyJhGmw7MABpump" 25 | 26 | # Constants 27 | PUMP_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 28 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 65 | """Parse bonding curve data.""" 66 | if data[:8] != EXPECTED_DISCRIMINATOR: 67 | raise ValueError("Invalid curve state discriminator") 68 | 69 | if len(data) < 150: 70 | parsed = self._STRUCT_1.parse(data[8:]) 71 | self.__dict__.update(parsed) 72 | 73 | else: 74 | parsed = self._STRUCT_1.parse(data[8:]) 75 | self.__dict__.update(parsed) 76 | # Convert raw bytes to Pubkey for creator field 77 | if hasattr(self, 'creator') and isinstance(self.creator, bytes): 78 | self.creator = Pubkey.from_bytes(self.creator) 79 | 80 | 81 | def get_associated_bonding_curve_address( 82 | mint: Pubkey, program_id: Pubkey 83 | ) -> tuple[Pubkey, int]: 84 | """ 85 | Derives the associated bonding curve address for a given mint. 86 | 87 | Args: 88 | mint: The token mint address 89 | program_id: The program ID for the bonding curve 90 | 91 | Returns: 92 | Tuple of (bonding curve address, bump seed) 93 | """ 94 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id) 95 | 96 | 97 | async def get_bonding_curve_state( 98 | conn: AsyncClient, curve_address: Pubkey 99 | ) -> BondingCurveState: 100 | """ 101 | Fetches and validates the state of a bonding curve account. 102 | 103 | Args: 104 | conn: AsyncClient connection to Solana RPC 105 | curve_address: Address of the bonding curve account 106 | 107 | Returns: 108 | BondingCurveState object containing parsed account data 109 | 110 | Raises: 111 | ValueError: If account data is invalid or missing 112 | """ 113 | response = await conn.get_account_info(curve_address, encoding="base64") 114 | if not response.value or not response.value.data: 115 | raise ValueError("Invalid curve state: No data") 116 | 117 | data = response.value.data 118 | if data[:8] != EXPECTED_DISCRIMINATOR: 119 | raise ValueError("Invalid curve state discriminator") 120 | 121 | return BondingCurveState(data) 122 | 123 | 124 | async def check_token_status(mint_address: str) -> None: 125 | """ 126 | Checks and prints the status of a token and its bonding curve. 127 | 128 | Args: 129 | mint_address: The token mint address as a string 130 | """ 131 | try: 132 | mint = Pubkey.from_string(mint_address) 133 | 134 | # Get the associated bonding curve address 135 | bonding_curve_address, bump = get_associated_bonding_curve_address( 136 | mint, PUMP_PROGRAM_ID 137 | ) 138 | 139 | print("\nToken status:") 140 | print("-" * 50) 141 | print(f"Token mint: {mint}") 142 | print(f"Associated bonding curve: {bonding_curve_address}") 143 | print(f"Bump seed: {bump}") 144 | print("-" * 50) 145 | 146 | # Check completion status 147 | async with AsyncClient(RPC_ENDPOINT) as client: 148 | try: 149 | curve_state = await get_bonding_curve_state( 150 | client, bonding_curve_address 151 | ) 152 | 153 | print("\nBonding curve status:") 154 | print("-" * 50) 155 | print( 156 | f"Completion status: {'Completed' if curve_state.complete else 'Not completed'}" 157 | ) 158 | if curve_state.complete: 159 | print( 160 | "\nNote: This bonding curve has completed and liquidity has been migrated to PumpSwap." 161 | ) 162 | print("-" * 50) 163 | 164 | except ValueError as e: 165 | print(f"\nError accessing bonding curve: {e}") 166 | 167 | except ValueError as e: 168 | print(f"\nError: Invalid address format - {e}") 169 | except Exception as e: 170 | print(f"\nUnexpected error: {e}") 171 | 172 | 173 | def main() -> None: 174 | """Main entry point for the token status checker.""" 175 | #parser = argparse.ArgumentParser(description="Check token bonding curve status") 176 | #parser.add_argument("mint_address", help="The token mint address" 177 | #args = parser.parse_args() 178 | 179 | asyncio.run(check_token_status(TOKEN_MINT)) 180 | 181 | 182 | if __name__ == "__main__": 183 | main() 184 | -------------------------------------------------------------------------------- /learning-examples/bonding-curve-progress/get_graduating_tokens.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for querying and analyzing soon-to-gradute tokens in the Pump.fun program. 3 | It includes functionality to fetch bonding curves based on token reserves and 4 | find associated SPL token accounts. 5 | 6 | Note: getProgramAccounts may be slow as it is a pretty heavy method for RPC. 7 | """ 8 | 9 | import asyncio 10 | import os 11 | import struct 12 | from typing import Final 13 | 14 | from dotenv import load_dotenv 15 | from solana.rpc.async_api import AsyncClient 16 | from solana.rpc.types import MemcmpOpts, TokenAccountOpts 17 | from solders.pubkey import Pubkey 18 | 19 | load_dotenv() 20 | 21 | # Constants 22 | RPC_ENDPOINT: Final[str] = os.environ.get("SOLANA_NODE_RPC_ENDPOINT") 23 | PUMP_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 24 | TOKEN_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string("TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA") 25 | 26 | # The 8-byte discriminator for bonding curve accounts in Pump.fun 27 | BONDING_CURVE_DISCRIMINATOR_BYTES: Final[bytes] = bytes.fromhex("17b7f83760d8ac60") 28 | 29 | 30 | async def get_bonding_curves_by_reserves(client: AsyncClient | None = None) -> list: 31 | """ 32 | Fetch bonding curve accounts with real token reserves below a threshold. 33 | 34 | Args: 35 | client: Optional AsyncClient instance. If None, a new one will be created. 36 | 37 | Returns: 38 | List of bonding curve accounts matching the criteria 39 | """ 40 | # Define the reserve threshold (100 trillion in token base units) 41 | threshold: int = 100_000_000_000_000 42 | threshold_bytes: bytes = threshold.to_bytes(8, "little") 43 | msb_prefix: bytes = threshold_bytes[6:] # Most significant bytes for pre-filtering 44 | 45 | should_close_client: bool = client is None 46 | try: 47 | if should_close_client: 48 | client = AsyncClient(RPC_ENDPOINT, commitment="processed", timeout=180) 49 | await client.is_connected() 50 | 51 | # Define on-chain filters for getProgramAccounts 52 | filters = [ 53 | MemcmpOpts(offset=0, bytes=BONDING_CURVE_DISCRIMINATOR_BYTES), # Match bonding curve accounts 54 | MemcmpOpts(offset=30, bytes=msb_prefix), # Pre-filter by real token reserves MSB 55 | MemcmpOpts(offset=48, bytes=b"\x00"), # Ensure complete flag is False 56 | ] 57 | 58 | # Query accounts matching filters 59 | response = await client.get_program_accounts( 60 | PUMP_PROGRAM_ID, 61 | encoding="base64", 62 | filters=filters 63 | ) 64 | 65 | result = [] 66 | for acc in response.value: 67 | raw = acc.account.data 68 | 69 | # Extract real_token_reserves (u64 = 8 bytes, little-endian) 70 | offset: int = 24 # real_token_reserves field offset 71 | real_token_reserves: int = struct.unpack(" 0: 111 | return response.value[0].account 112 | else: 113 | print(f"No token accounts found for {bonding_curve_address}") 114 | return None 115 | except Exception as e: 116 | print(f"Error finding associated token account: {e}") 117 | return None 118 | finally: 119 | if should_close_client and client: 120 | await client.close() 121 | 122 | 123 | def get_mint_address(data: bytes) -> str: 124 | """ 125 | Extract the mint address from SPL token account data. 126 | 127 | Args: 128 | data: The token account data as bytes 129 | 130 | Returns: 131 | The mint address as a base58-encoded string 132 | """ 133 | return str(Pubkey(data[:32])) 134 | 135 | 136 | async def main() -> None: 137 | """Main entry point for querying and processing bonding curves.""" 138 | async with AsyncClient(RPC_ENDPOINT, commitment="processed", timeout=120) as client: 139 | await client.is_connected() 140 | 141 | bonding_curves = await get_bonding_curves_by_reserves(client) 142 | print(f"Total matches: {len(bonding_curves)}") 143 | print("=" * 50) 144 | 145 | for bonding_curve in bonding_curves: 146 | # Find the SPL token account owned by the bonding curve 147 | associated_token_account = await find_associated_bonding_curve( 148 | str(bonding_curve.pubkey), client 149 | ) 150 | 151 | if associated_token_account: 152 | mint_address = get_mint_address(associated_token_account.data) 153 | print(f"Bonding curve: {bonding_curve.pubkey}") 154 | print(f"Mint address: {mint_address}") 155 | print("=" * 50) 156 | 157 | # For demonstration, only process the first curve 158 | break 159 | 160 | 161 | if __name__ == "__main__": 162 | asyncio.run(main()) 163 | -------------------------------------------------------------------------------- /learning-examples/bonding-curve-progress/poll_bonding_curve_progress.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for tracking the progress of a bonding curve for a Pump.fun token. 3 | It continuously polls the bonding curve state and prints updates at regular intervals. 4 | """ 5 | 6 | import asyncio 7 | import os 8 | import struct 9 | from typing import Final 10 | 11 | from dotenv import load_dotenv 12 | from solana.rpc.async_api import AsyncClient 13 | from solders.pubkey import Pubkey 14 | 15 | load_dotenv() 16 | 17 | # Constants 18 | RPC_URL: Final[str] = os.getenv("SOLANA_NODE_RPC_ENDPOINT") 19 | TOKEN_MINT: Final[str] = "xWrzYY4c1LnbSkLrd2LDUg9vw7YtVyJhGmw7MABpump" 20 | PUMP_PROGRAM_ID: Final[Pubkey] = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 21 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 22 | TOKEN_DECIMALS: Final[int] = 6 23 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" Pubkey: 28 | """ 29 | Derive the bonding curve PDA address from a mint address. 30 | 31 | Args: 32 | mint: The token mint address 33 | program_id: The program ID for the bonding curve 34 | 35 | Returns: 36 | The bonding curve address 37 | """ 38 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id)[0] 39 | 40 | 41 | async def get_account_data(client: AsyncClient, pubkey: Pubkey) -> bytes: 42 | """ 43 | Fetch raw account data for a given public key. 44 | 45 | Args: 46 | client: AsyncClient connection to Solana RPC 47 | pubkey: The public key of the account to fetch 48 | 49 | Returns: 50 | The raw account data as bytes 51 | 52 | Raises: 53 | ValueError: If the account is not found or has no data 54 | """ 55 | resp = await client.get_account_info(pubkey, encoding="base64") 56 | if not resp.value or not resp.value.data: 57 | raise ValueError(f"Account {pubkey} not found or has no data") 58 | 59 | return resp.value.data 60 | 61 | 62 | def parse_curve_state(data: bytes) -> dict: 63 | """ 64 | Decode bonding curve account data into a readable format. 65 | 66 | Args: 67 | data: The raw bonding curve account data 68 | 69 | Returns: 70 | A dictionary containing parsed bonding curve fields 71 | 72 | Raises: 73 | ValueError: If the account discriminator is invalid 74 | """ 75 | if data[:8] != EXPECTED_DISCRIMINATOR: 76 | raise ValueError("Invalid discriminator for bonding curve") 77 | 78 | fields = struct.unpack_from(" None: 90 | """ 91 | Print the current status of the bonding curve in a readable format. 92 | 93 | Args: 94 | state: The parsed bonding curve state dictionary 95 | """ 96 | progress = 0 97 | if state["complete"]: 98 | progress = 100.0 99 | else: 100 | # Pump.fun constants (already converted to human-readable format) 101 | TOTAL_SUPPLY = 1_000_000_000 # 1B tokens 102 | RESERVED_TOKENS = 206_900_000 # 206.9M tokens reserved for migration 103 | 104 | initial_real_token_reserves = TOTAL_SUPPLY - RESERVED_TOKENS # 793.1M tokens 105 | 106 | if initial_real_token_reserves > 0: 107 | left_tokens = state["real_token_reserves"] 108 | progress = 100 - (left_tokens * 100) / initial_real_token_reserves 109 | 110 | print("=" * 30) 111 | print(f"Complete: {'✅' if state['complete'] else '❌'}") 112 | print(f"Progress: {progress:.2f}%") 113 | print(f"Token reserves: {state['real_token_reserves']:.4f}") 114 | print(f"SOL reserves: {state['real_sol_reserves']:.4f}") 115 | print("=" * 30, "\n") 116 | 117 | 118 | async def track_curve() -> None: 119 | """ 120 | Continuously track and display the state of a bonding curve. 121 | """ 122 | if not RPC_URL or not TOKEN_MINT: 123 | print("❌ Set SOLANA_NODE_RPC_ENDPOINT and TOKEN_MINT in .env") 124 | return 125 | 126 | mint_pubkey: Pubkey = Pubkey.from_string(TOKEN_MINT) 127 | curve_pubkey: Pubkey = get_associated_bonding_curve_address(mint_pubkey, PUMP_PROGRAM_ID) 128 | 129 | print("Tracking bonding curve for:", mint_pubkey) 130 | print("Curve address:", curve_pubkey, "\n") 131 | 132 | async with AsyncClient(RPC_URL) as client: 133 | while True: 134 | try: 135 | data = await get_account_data(client, curve_pubkey) 136 | state = parse_curve_state(data) 137 | print_curve_status(state) 138 | except Exception as e: 139 | print(f"⚠️ Error: {e}") 140 | 141 | await asyncio.sleep(POLL_INTERVAL) 142 | 143 | 144 | if __name__ == "__main__": 145 | asyncio.run(track_curve()) 146 | -------------------------------------------------------------------------------- /learning-examples/calculate_discriminator.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import struct 3 | 4 | # https://book.anchor-lang.com/anchor_bts/discriminator.html 5 | # Set the instruction name here 6 | instruction_name = "account:BondingCurve" 7 | 8 | 9 | def calculate_discriminator(instruction_name): 10 | # Create a SHA256 hash object 11 | sha = hashlib.sha256() 12 | 13 | # Update the hash with the instruction name 14 | sha.update(instruction_name.encode("utf-8")) 15 | 16 | # Get the first 8 bytes of the hash 17 | discriminator_bytes = sha.digest()[:8] 18 | 19 | # Convert the bytes to a 64-bit unsigned integer (little-endian) 20 | discriminator = struct.unpack(" 0: 33 | logger.info(f"Burning {balance} tokens from account {account}...") 34 | burn_ix = burn( 35 | BurnParams( 36 | account=account, 37 | mint=mint, 38 | owner=wallet.pubkey, 39 | amount=balance, 40 | program_id=SystemAddresses.TOKEN_PROGRAM, 41 | ) 42 | ) 43 | await client.build_and_send_transaction([burn_ix], wallet.keypair) 44 | logger.info(f"Burned tokens from {account}") 45 | 46 | # If account exists, attempt to close it 47 | if info.value: 48 | logger.info(f"Closing account: {account}") 49 | close_params = CloseAccountParams( 50 | account=account, 51 | dest=wallet.pubkey, 52 | owner=wallet.pubkey, 53 | program_id=SystemAddresses.TOKEN_PROGRAM, 54 | ) 55 | ix = close_account(close_params) 56 | 57 | tx_sig = await client.build_and_send_transaction( 58 | [ix], 59 | wallet.keypair, 60 | skip_preflight=True, 61 | ) 62 | await client.confirm_transaction(tx_sig) 63 | logger.info(f"Closed successfully: {account}") 64 | else: 65 | logger.info(f"Account does not exist or already closed: {account}") 66 | 67 | except Exception as e: 68 | logger.error(f"Error while processing account {account}: {e}") 69 | 70 | 71 | async def main(): 72 | try: 73 | client = SolanaClient(RPC_ENDPOINT) 74 | wallet = Wallet(PRIVATE_KEY) 75 | 76 | # Get user's ATA for the token 77 | ata = wallet.get_associated_token_address(MINT_ADDRESS) 78 | await close_account_if_exists(client, wallet, ata, MINT_ADDRESS) 79 | 80 | except Exception as e: 81 | logger.error(f"Unexpected error: {e}") 82 | finally: 83 | await client.close() 84 | 85 | 86 | if __name__ == "__main__": 87 | asyncio.run(main()) 88 | -------------------------------------------------------------------------------- /learning-examples/compute_associated_bonding_curve.py: -------------------------------------------------------------------------------- 1 | from solders.pubkey import Pubkey 2 | 3 | # Global constants 4 | PUMP_PROGRAM = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 5 | SYSTEM_TOKEN_PROGRAM = Pubkey.from_string("TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA") 6 | SYSTEM_ASSOCIATED_TOKEN_ACCOUNT_PROGRAM = Pubkey.from_string( 7 | "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL" 8 | ) 9 | 10 | def get_bonding_curve_address(mint: Pubkey, program_id: Pubkey) -> tuple[Pubkey, int]: 11 | """ 12 | Derives the bonding curve address for a given mint 13 | """ 14 | return Pubkey.find_program_address([b"bonding-curve", bytes(mint)], program_id) 15 | 16 | 17 | def find_associated_bonding_curve(mint: Pubkey, bonding_curve: Pubkey) -> Pubkey: 18 | """ 19 | Find the associated bonding curve for a given mint and bonding curve. 20 | This uses the standard ATA derivation. 21 | """ 22 | 23 | derived_address, _ = Pubkey.find_program_address( 24 | [ 25 | bytes(bonding_curve), 26 | bytes(SYSTEM_TOKEN_PROGRAM), 27 | bytes(mint), 28 | ], 29 | SYSTEM_ASSOCIATED_TOKEN_ACCOUNT_PROGRAM, 30 | ) 31 | return derived_address 32 | 33 | 34 | def main(): 35 | mint_address = input("Enter the token mint address: ") 36 | 37 | try: 38 | mint = Pubkey.from_string(mint_address) 39 | 40 | bonding_curve_address, bump = get_bonding_curve_address( 41 | mint, PUMP_PROGRAM 42 | ) 43 | 44 | # Calculate the associated bonding curve 45 | associated_bonding_curve = find_associated_bonding_curve( 46 | mint, bonding_curve_address 47 | ) 48 | 49 | print("\nResults:") 50 | print("-" * 50) 51 | print(f"Token Mint: {mint}") 52 | print(f"Bonding Curve: {bonding_curve_address}") 53 | print(f"Associated Bonding Curve: {associated_bonding_curve}") 54 | print(f"Bonding Curve Bump: {bump}") 55 | print("-" * 50) 56 | 57 | except ValueError as e: 58 | print(f"Error: Invalid address format - {e!s}") 59 | 60 | 61 | if __name__ == "__main__": 62 | main() 63 | -------------------------------------------------------------------------------- /learning-examples/decode_from_blockSubscribe.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | import json 4 | import struct 5 | import sys 6 | 7 | from solders.transaction import Transaction, VersionedTransaction 8 | 9 | 10 | def load_idl(file_path): 11 | with open(file_path) as f: 12 | return json.load(f) 13 | 14 | 15 | def load_transaction(file_path): 16 | with open(file_path) as f: 17 | data = json.load(f) 18 | return data 19 | 20 | 21 | def decode_instruction(ix_data, ix_def): 22 | args = {} 23 | offset = 8 # Skip 8-byte discriminator 24 | 25 | for arg in ix_def["args"]: 26 | if arg["type"] == "u64": 27 | value = struct.unpack_from(" None: 36 | """Parse bonding curve data.""" 37 | if data[:8] != EXPECTED_DISCRIMINATOR: 38 | raise ValueError("Invalid curve state discriminator") 39 | 40 | if len(data) < 150: 41 | parsed = self._STRUCT_1.parse(data[8:]) 42 | self.__dict__.update(parsed) 43 | 44 | else: 45 | parsed = self._STRUCT_2.parse(data[8:]) 46 | self.__dict__.update(parsed) 47 | # Convert raw bytes to Pubkey for creator field 48 | if hasattr(self, 'creator') and isinstance(self.creator, bytes): 49 | self.creator = Pubkey.from_bytes(self.creator) 50 | 51 | 52 | def calculate_bonding_curve_price(curve_state: BondingCurveState) -> float: 53 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 54 | raise ValueError("Invalid reserve state") 55 | 56 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 57 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 58 | ) 59 | 60 | 61 | def decode_bonding_curve_data(raw_data: str) -> BondingCurveState: 62 | decoded_data = base64.b64decode(raw_data) 63 | if decoded_data[:8] != EXPECTED_DISCRIMINATOR: 64 | raise ValueError("Invalid curve state discriminator") 65 | return BondingCurveState(decoded_data) 66 | 67 | 68 | # Load the JSON data 69 | with open("learning-examples/raw_bondingCurve_from_getAccountInfo.json") as file: 70 | json_data = json.load(file) 71 | 72 | # Extract the base64 encoded data 73 | encoded_data = json_data["result"]["value"]["data"][0] 74 | 75 | # Decode the data 76 | bonding_curve_state = decode_bonding_curve_data(encoded_data) 77 | 78 | # Calculate and print the token price 79 | token_price_sol = calculate_bonding_curve_price(bonding_curve_state) 80 | 81 | print("Bonding Curve State:") 82 | print(f" Virtual Token Reserves: {bonding_curve_state.virtual_token_reserves}") 83 | print(f" Virtual SOL Reserves: {bonding_curve_state.virtual_sol_reserves}") 84 | print(f" Real Token Reserves: {bonding_curve_state.real_token_reserves}") 85 | print(f" Real SOL Reserves: {bonding_curve_state.real_sol_reserves}") 86 | print(f" Token Total Supply: {bonding_curve_state.token_total_supply}") 87 | print(f" Complete: {bonding_curve_state.complete}") 88 | print(f"\nToken Price: {token_price_sol:.10f} SOL") 89 | -------------------------------------------------------------------------------- /learning-examples/decode_from_getTransaction.py: -------------------------------------------------------------------------------- 1 | import json 2 | import struct 3 | import sys 4 | 5 | import base58 6 | 7 | tx_file_path = "" 8 | 9 | if len(sys.argv) != 2: 10 | tx_file_path = "learning-examples/raw_buy_tx_from_getTransaction.json" 11 | print(f"No path provided, using the path: {tx_file_path}") 12 | else: 13 | tx_file_path = sys.argv[1] 14 | 15 | # Load the IDL 16 | with open("idl/pump_fun_idl.json") as f: 17 | idl = json.load(f) 18 | 19 | # Load the transaction log 20 | with open(tx_file_path) as f: 21 | tx_log = json.load(f) 22 | 23 | # Extract the transaction data 24 | tx_data = tx_log["result"]["transaction"] 25 | 26 | print(json.dumps(tx_data, indent=2)) 27 | 28 | 29 | def decode_create_instruction(data): 30 | # The Create instruction has 3 string arguments: name, symbol, uri 31 | offset = 8 # Skip the 8-byte discriminator 32 | results = [] 33 | for _ in range(3): 34 | length = struct.unpack_from(" None: 34 | parsed = self._STRUCT.parse(data[8:]) 35 | self.__dict__.update(parsed) 36 | 37 | 38 | async def get_bonding_curve_state( 39 | conn: AsyncClient, curve_address: Pubkey 40 | ) -> BondingCurveState: 41 | response = await conn.get_account_info(curve_address, encoding="base64") 42 | if not response.value or not response.value.data: 43 | raise ValueError("Invalid curve state: No data") 44 | 45 | data = response.value.data 46 | if data[:8] != EXPECTED_DISCRIMINATOR: 47 | raise ValueError("Invalid curve state discriminator") 48 | 49 | return BondingCurveState(data) 50 | 51 | 52 | def calculate_bonding_curve_price(curve_state: BondingCurveState) -> float: 53 | if curve_state.virtual_token_reserves <= 0 or curve_state.virtual_sol_reserves <= 0: 54 | raise ValueError("Invalid reserve state") 55 | 56 | return (curve_state.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 57 | curve_state.virtual_token_reserves / 10**TOKEN_DECIMALS 58 | ) 59 | 60 | 61 | async def main() -> None: 62 | try: 63 | async with AsyncClient(RPC_ENDPOINT) as conn: 64 | curve_address = Pubkey.from_string(CURVE_ADDRESS) 65 | bonding_curve_state = await get_bonding_curve_state(conn, curve_address) 66 | token_price_sol = calculate_bonding_curve_price(bonding_curve_state) 67 | 68 | print("Token price:") 69 | print(f" {token_price_sol:.10f} SOL") 70 | except ValueError as e: 71 | print(f"Error: {e}") 72 | except Exception as e: 73 | print(f"An unexpected error occurred: {e}") 74 | 75 | 76 | if __name__ == "__main__": 77 | asyncio.run(main()) 78 | -------------------------------------------------------------------------------- /learning-examples/listen-migrations/listen_blocksubscribe_old_raydium.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import os 4 | 5 | import websockets 6 | from dotenv import load_dotenv 7 | from solders.pubkey import Pubkey 8 | 9 | load_dotenv() 10 | 11 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 12 | PUMP_MIGRATOR_ID = Pubkey.from_string("39azUYFWPz3VHgKCf3VChUwbpURdCHRxjWVowf5jUJjg") 13 | 14 | 15 | def process_initialize2_transaction(data): 16 | """Process and decode an initialize2 transaction""" 17 | try: 18 | signature = data["transaction"]["signatures"][0] 19 | account_keys = data["transaction"]["message"]["accountKeys"] 20 | 21 | # Check raydium_amm_idl.json for the account keys 22 | # The token address is typically the 19th account (index 18) 23 | # The liquidity pool address is typically the 3rd account (index 2) 24 | if len(account_keys) > 18: 25 | token_address = account_keys[18] 26 | liquidity_address = account_keys[2] 27 | 28 | print(f"\nSignature: {signature}") 29 | print(f"Token Address: {token_address}") 30 | print(f"Liquidity Address: {liquidity_address}") 31 | print("=" * 50) 32 | else: 33 | print(f"\nError: Not enough account keys (found {len(account_keys)})") 34 | 35 | except Exception as e: 36 | print(f"\nError: {e!s}") 37 | 38 | 39 | async def listen_for_events(): 40 | while True: 41 | try: 42 | async with websockets.connect(WSS_ENDPOINT) as websocket: 43 | subscription_message = json.dumps( 44 | { 45 | "jsonrpc": "2.0", 46 | "id": 1, 47 | "method": "blockSubscribe", 48 | "params": [ 49 | { 50 | "mentionsAccountOrProgram": str( 51 | PUMP_MIGRATOR_ID 52 | ) 53 | }, 54 | { 55 | "commitment": "confirmed", 56 | "encoding": "json", 57 | "showRewards": False, 58 | "transactionDetails": "full", 59 | "maxSupportedTransactionVersion": 0, 60 | }, 61 | ], 62 | } 63 | ) 64 | 65 | await websocket.send(subscription_message) 66 | response = await websocket.recv() 67 | print(f"Subscription response: {response}") 68 | print("\nListening for Raydium pool initialization events...") 69 | 70 | while True: 71 | try: 72 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 73 | data = json.loads(response) 74 | 75 | if "method" in data and data["method"] == "blockNotification": 76 | if "params" in data and "result" in data["params"]: 77 | block_data = data["params"]["result"] 78 | if ( 79 | "value" in block_data 80 | and "block" in block_data["value"] 81 | ): 82 | block = block_data["value"]["block"] 83 | if "transactions" in block: 84 | for tx in block["transactions"]: 85 | logs = tx.get("meta", {}).get( 86 | "logMessages", [] 87 | ) 88 | 89 | # Check for initialize2 instruction 90 | for log in logs: 91 | if ( 92 | "Program log: initialize2: InitializeInstruction2" 93 | in log 94 | ): 95 | print( 96 | "Found initialize2 instruction!" 97 | ) 98 | process_initialize2_transaction(tx) 99 | break 100 | 101 | except TimeoutError: 102 | print("\nChecking connection...") 103 | print("Connection alive") 104 | continue 105 | 106 | except Exception as e: 107 | print(f"\nConnection error: {e!s}") 108 | print("Retrying in 5 seconds...") 109 | await asyncio.sleep(5) 110 | 111 | 112 | if __name__ == "__main__": 113 | asyncio.run(listen_for_events()) 114 | -------------------------------------------------------------------------------- /learning-examples/listen-migrations/listen_logsubscribe.py: -------------------------------------------------------------------------------- 1 | """ 2 | Listens for 'Migrate' instructions from a Solana migration program via WebSocket. 3 | Parses and logs transaction details (e.g., mint, liquidity, token accounts) for successful migrations. 4 | 5 | Note: skips transactions with truncated logs (no Program data in the logs -> no parsed data). 6 | To cover those cases, please use an additional RPC call (get transaction data) or additional listener not based on logs. 7 | """ 8 | 9 | import asyncio 10 | import base64 11 | import json 12 | import os 13 | import struct 14 | 15 | import base58 16 | import websockets 17 | from dotenv import load_dotenv 18 | from solders.pubkey import Pubkey 19 | 20 | load_dotenv() 21 | 22 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 23 | MIGRATION_PROGRAM_ID = Pubkey.from_string("39azUYFWPz3VHgKCf3VChUwbpURdCHRxjWVowf5jUJjg") 24 | 25 | 26 | def parse_migrate_instruction(data): 27 | if len(data) < 8: 28 | print(f"[ERROR] Data length too short: {len(data)} bytes") 29 | return None 30 | 31 | offset = 8 32 | parsed_data = {} 33 | 34 | fields = [ 35 | ("timestamp", "i64"), 36 | ("index", "u16"), 37 | ("creator", "publicKey"), 38 | ("baseMint", "publicKey"), 39 | ("quoteMint", "publicKey"), 40 | ("baseMintDecimals", "u8"), 41 | ("quoteMintDecimals", "u8"), 42 | ("baseAmountIn", "u64"), 43 | ("quoteAmountIn", "u64"), 44 | ("poolBaseAmount", "u64"), 45 | ("poolQuoteAmount", "u64"), 46 | ("minimumLiquidity", "u64"), 47 | ("initialLiquidity", "u64"), 48 | ("lpTokenAmountOut", "u64"), 49 | ("poolBump", "u8"), 50 | ("pool", "publicKey"), 51 | ("lpMint", "publicKey"), 52 | ("userBaseTokenAccount", "publicKey"), 53 | ("userQuoteTokenAccount", "publicKey"), 54 | ] 55 | 56 | try: 57 | for field_name, field_type in fields: 58 | if field_type == "publicKey": 59 | value = data[offset:offset + 32] 60 | parsed_data[field_name] = base58.b58encode(value).decode("utf-8") 61 | offset += 32 62 | elif field_type in {"u64", "i64"}: 63 | value = struct.unpack("={GRPC_GENERATED_VERSION}.' 22 | + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' 23 | + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' 24 | ) 25 | -------------------------------------------------------------------------------- /learning-examples/listen-new-tokens/listen_blocksubscribe.py: -------------------------------------------------------------------------------- 1 | """ 2 | Listens to Solana blocks for Pump.fun 'create' instructions via WebSocket. 3 | Decodes transaction data to extract mint, bonding curve, and user details. 4 | 5 | It is usually slower than other listeners. 6 | """ 7 | 8 | import asyncio 9 | import base64 10 | import json 11 | import os 12 | import struct 13 | 14 | import base58 15 | import websockets 16 | from dotenv import load_dotenv 17 | from solders.pubkey import Pubkey 18 | from solders.transaction import VersionedTransaction 19 | 20 | load_dotenv() 21 | 22 | WSS_ENDPOINT = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 23 | PUMP_PROGRAM_ID = Pubkey.from_string("6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P") 24 | 25 | 26 | def load_idl(file_path): 27 | with open(file_path) as f: 28 | return json.load(f) 29 | 30 | 31 | def decode_create_instruction(ix_data, ix_def, accounts): 32 | args = {} 33 | offset = 8 # Skip 8-byte discriminator 34 | 35 | for arg in ix_def["args"]: 36 | if arg["type"] == "string": 37 | length = struct.unpack_from(" dict: 58 | """Decode a create instruction from transaction data.""" 59 | # Skip past the 8-byte discriminator prefix 60 | offset = 8 61 | 62 | # Extract account keys in base58 format 63 | def get_account_key(index): 64 | if index >= len(accounts): 65 | return "N/A" 66 | account_index = accounts[index] 67 | return base58.b58encode(keys[account_index]).decode() 68 | 69 | # Read string fields (prefixed with length) 70 | def read_string(): 71 | nonlocal offset 72 | # Get string length (4-byte uint) 73 | length = struct.unpack_from(" Pubkey: 28 | """ 29 | Find the associated bonding curve for a given mint and bonding curve. 30 | This uses the standard ATA derivation. 31 | """ 32 | derived_address, _ = Pubkey.find_program_address( 33 | [ 34 | bytes(bonding_curve), 35 | bytes(TOKEN_PROGRAM_ID), 36 | bytes(mint), 37 | ], 38 | ASSOCIATED_TOKEN_PROGRAM_ID, 39 | ) 40 | return derived_address 41 | 42 | 43 | def parse_create_instruction(data): 44 | if len(data) < 8: 45 | return None 46 | offset = 8 47 | parsed_data = {} 48 | 49 | # Parse fields based on CreateEvent structure 50 | fields = [ 51 | ("name", "string"), 52 | ("symbol", "string"), 53 | ("uri", "string"), 54 | ("mint", "publicKey"), 55 | ("bondingCurve", "publicKey"), 56 | ("user", "publicKey"), 57 | ("creator", "publicKey"), 58 | ] 59 | 60 | try: 61 | for field_name, field_type in fields: 62 | if field_type == "string": 63 | length = struct.unpack("=2.1.1", 10 | "borsh-construct>=0.1.0", 11 | "construct>=2.10.67", 12 | "construct-typing>=0.5.2", 13 | "solana==0.36.6", 14 | "solders>=0.26.0", 15 | "websockets>=15.0", 16 | "python-dotenv>=1.0.1", 17 | "aiohttp>=3.11.13", 18 | "grpcio>=1.71.0", 19 | "grpcio-tools>=1.71.0", 20 | "protobuf>=5.29.4", 21 | "pyyaml>=6.0.2", 22 | "uvloop>=0.21.0", 23 | ] 24 | 25 | [project.optional-dependencies] 26 | dev = [ 27 | "ruff>=0.10.0" 28 | ] 29 | 30 | [project.scripts] 31 | pump_bot = "bot_runner:main" 32 | 33 | [tool.ruff] 34 | exclude = [ 35 | ".bzr", 36 | ".direnv", 37 | ".eggs", 38 | ".git", 39 | ".git-rewrite", 40 | ".hg", 41 | ".ipynb_checkpoints", 42 | ".mypy_cache", 43 | ".nox", 44 | ".pants.d", 45 | ".pyenv", 46 | ".pytest_cache", 47 | ".pytype", 48 | ".ruff_cache", 49 | ".svn", 50 | ".tox", 51 | ".venv", 52 | ".vscode", 53 | "__pypackages__", 54 | "_build", 55 | "buck-out", 56 | "build", 57 | "dist", 58 | "node_modules", 59 | "site-packages", 60 | "venv", 61 | ] 62 | 63 | line-length = 88 64 | indent-width = 4 65 | target-version = "py311" 66 | 67 | [tool.ruff.lint] 68 | select = [ 69 | "E", "F", "I", "UP", "N", "B", "A", "C4", "T10", "ARG", "PTH", 70 | "ANN", # type annotations 71 | "S", # security best practices 72 | "BLE", # blind except statements 73 | "FBT", # boolean trap parameters 74 | "C90", # complexity metrics 75 | "TRY", # exception handling best practices 76 | "SLF", # private member access 77 | "TCH", # type checking issues 78 | "RUF", # Ruff-specific rules 79 | "ERA", # eradicate commented-out code 80 | "PL", # pylint conventions 81 | ] 82 | ignore = ["E501"] 83 | 84 | [tool.ruff.format] 85 | quote-style = "double" 86 | indent-style = "space" 87 | line-ending = "auto" 88 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chainstacklabs/pump-fun-bot/2338c7a5dd78787a8d98ba78445ad017da204921/src/__init__.py -------------------------------------------------------------------------------- /src/bot_runner.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import multiprocessing 4 | from datetime import datetime 5 | from pathlib import Path 6 | 7 | import uvloop 8 | 9 | asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) 10 | 11 | from config_loader import load_bot_config, print_config_summary 12 | from trading.trader import PumpTrader 13 | from utils.logger import setup_file_logging 14 | 15 | 16 | def setup_logging(bot_name: str): 17 | """ 18 | Set up logging to file for a specific bot instance. 19 | 20 | Args: 21 | bot_name: Name of the bot for the log file 22 | """ 23 | log_dir = Path("logs") 24 | log_dir.mkdir(exist_ok=True) 25 | 26 | timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") 27 | log_filename = log_dir / f"{bot_name}_{timestamp}.log" 28 | 29 | setup_file_logging(str(log_filename)) 30 | 31 | async def start_bot(config_path: str): 32 | """ 33 | Start a trading bot with the configuration from the specified path. 34 | 35 | Args: 36 | config_path: Path to the YAML configuration file 37 | """ 38 | cfg = load_bot_config(config_path) 39 | setup_logging(cfg["name"]) 40 | print_config_summary(cfg) 41 | 42 | trader = PumpTrader( 43 | # Connection settings 44 | rpc_endpoint=cfg["rpc_endpoint"], 45 | wss_endpoint=cfg["wss_endpoint"], 46 | private_key=cfg["private_key"], 47 | 48 | # Trade parameters 49 | buy_amount=cfg["trade"]["buy_amount"], 50 | buy_slippage=cfg["trade"]["buy_slippage"], 51 | sell_slippage=cfg["trade"]["sell_slippage"], 52 | 53 | # Extreme fast mode settings 54 | extreme_fast_mode=cfg["trade"].get("extreme_fast_mode", False), 55 | extreme_fast_token_amount=cfg["trade"].get("extreme_fast_token_amount", 30), 56 | 57 | # Listener configuration 58 | listener_type=cfg["filters"]["listener_type"], 59 | 60 | # Geyser configuration (if applicable) 61 | geyser_endpoint=cfg.get("geyser", {}).get("endpoint"), 62 | geyser_api_token=cfg.get("geyser", {}).get("api_token"), 63 | geyser_auth_type=cfg.get("geyser", {}).get("auth_type"), 64 | 65 | # Priority fee configuration 66 | enable_dynamic_priority_fee=cfg.get("priority_fees", {}).get("enable_dynamic", False), 67 | enable_fixed_priority_fee=cfg.get("priority_fees", {}).get("enable_fixed", True), 68 | fixed_priority_fee=cfg.get("priority_fees", {}).get("fixed_amount", 500000), 69 | extra_priority_fee=cfg.get("priority_fees", {}).get("extra_percentage", 0.0), 70 | hard_cap_prior_fee=cfg.get("priority_fees", {}).get("hard_cap", 500000), 71 | 72 | # Retry and timeout settings 73 | max_retries=cfg.get("retries", {}).get("max_attempts", 10), 74 | wait_time_after_creation=cfg.get("retries", {}).get("wait_after_creation", 15), 75 | wait_time_after_buy=cfg.get("retries", {}).get("wait_after_buy", 15), 76 | wait_time_before_new_token=cfg.get("retries", {}).get("wait_before_new_token", 15), 77 | max_token_age=cfg.get("timing", {}).get("max_token_age", 0.001), 78 | token_wait_timeout=cfg.get("timing", {}).get("token_wait_timeout", 30), 79 | 80 | # Cleanup settings 81 | cleanup_mode=cfg.get("cleanup", {}).get("mode", "disabled"), 82 | cleanup_force_close_with_burn=cfg.get("cleanup", {}).get("force_close_with_burn", False), 83 | cleanup_with_priority_fee=cfg.get("cleanup", {}).get("with_priority_fee", False), 84 | 85 | # Trading filters 86 | match_string=cfg["filters"].get("match_string"), 87 | bro_address=cfg["filters"].get("bro_address"), 88 | marry_mode=cfg["filters"].get("marry_mode", False), 89 | yolo_mode=cfg["filters"].get("yolo_mode", False), 90 | ) 91 | 92 | await trader.start() 93 | 94 | def run_all_bots(): 95 | """ 96 | Run all bots defined in YAML files in the 'bots' directory. 97 | Only runs bots that have enabled=True (or where enabled is not specified). 98 | Bots can be run in separate processes based on their configuration. 99 | """ 100 | bot_dir = Path("bots") 101 | if not bot_dir.exists(): 102 | logging.error(f"Bot directory '{bot_dir}' not found") 103 | return 104 | 105 | bot_files = list(bot_dir.glob("*.yaml")) 106 | if not bot_files: 107 | logging.error(f"No bot configuration files found in '{bot_dir}'") 108 | return 109 | 110 | logging.info(f"Found {len(bot_files)} bot configuration files") 111 | 112 | processes = [] 113 | skipped_bots = 0 114 | 115 | for file in bot_files: 116 | try: 117 | cfg = load_bot_config(str(file)) 118 | bot_name = cfg.get("name", file.stem) 119 | 120 | # Skip bots with enabled=False 121 | if not cfg.get("enabled", True): 122 | logging.info(f"Skipping disabled bot '{bot_name}'") 123 | skipped_bots += 1 124 | continue 125 | 126 | if cfg.get("separate_process", False): 127 | logging.info(f"Starting bot '{bot_name}' in separate process") 128 | p = multiprocessing.Process( 129 | target=lambda path=str(file): asyncio.run(start_bot(path)), 130 | name=f"bot-{bot_name}" 131 | ) 132 | p.start() 133 | processes.append(p) 134 | else: 135 | logging.info(f"Starting bot '{bot_name}' in main process") 136 | asyncio.run(start_bot(str(file))) 137 | except Exception as e: 138 | logging.exception(f"Failed to start bot from {file}: {e}") 139 | 140 | logging.info(f"Started {len(bot_files) - skipped_bots} bots, skipped {skipped_bots} disabled bots") 141 | 142 | for p in processes: 143 | p.join() 144 | logging.info(f"Process {p.name} completed") 145 | 146 | 147 | def main() -> None: 148 | logging.basicConfig( 149 | level=logging.INFO, 150 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' 151 | ) 152 | 153 | run_all_bots() 154 | 155 | 156 | if __name__ == "__main__": 157 | main() -------------------------------------------------------------------------------- /src/cleanup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chainstacklabs/pump-fun-bot/2338c7a5dd78787a8d98ba78445ad017da204921/src/cleanup/__init__.py -------------------------------------------------------------------------------- /src/cleanup/manager.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from solders.pubkey import Pubkey 4 | from spl.token.instructions import BurnParams, CloseAccountParams, burn, close_account 5 | 6 | from core.client import SolanaClient 7 | from core.priority_fee.manager import PriorityFeeManager 8 | from core.pubkeys import SystemAddresses 9 | from core.wallet import Wallet 10 | from utils.logger import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | class AccountCleanupManager: 16 | """Handles safe cleanup of token accounts (ATA) after trading sessions.""" 17 | def __init__( 18 | self, 19 | client: SolanaClient, 20 | wallet: Wallet, 21 | priority_fee_manager: PriorityFeeManager, 22 | use_priority_fee: bool = False, 23 | force_burn: bool = False, 24 | ): 25 | """ 26 | Args: 27 | client: Solana RPC client 28 | wallet: Wallet for signing transactions 29 | """ 30 | self.client = client 31 | self.wallet = wallet 32 | self.priority_fee_manager = priority_fee_manager 33 | self.use_priority_fee = use_priority_fee 34 | self.close_with_force_burn = force_burn 35 | 36 | async def cleanup_ata(self, mint: Pubkey) -> None: 37 | """ 38 | Attempt to burn any remaining tokens and close the ATA. 39 | Skips if account doesn't exist or is already empty/closed. 40 | """ 41 | ata = self.wallet.get_associated_token_address(mint) 42 | solana_client = await self.client.get_client() 43 | 44 | priority_fee = ( 45 | await self.priority_fee_manager.calculate_priority_fee([ata]) 46 | if self.use_priority_fee 47 | else None 48 | ) 49 | 50 | logger.info("Waiting for 15 seconds for RPC node to synchronize...") 51 | await asyncio.sleep(15) 52 | 53 | try: 54 | info = await solana_client.get_account_info(ata, encoding="base64") 55 | if not info.value: 56 | logger.info(f"ATA {ata} does not exist or already closed.") 57 | return 58 | 59 | balance = await self.client.get_token_account_balance(ata) 60 | instructions = [] 61 | 62 | if balance > 0 and self.close_with_force_burn: 63 | logger.info(f"Burning {balance} tokens from ATA {ata} (mint: {mint})...") 64 | burn_ix = burn( 65 | BurnParams( 66 | account=ata, 67 | mint=mint, 68 | owner=self.wallet.pubkey, 69 | amount=balance, 70 | program_id=SystemAddresses.TOKEN_PROGRAM, 71 | ) 72 | ) 73 | instructions.append(burn_ix) 74 | 75 | elif balance > 0: 76 | logger.info( 77 | f"Skipping ATA {ata} with non-zero balance ({balance} tokens) " 78 | f"because CLEANUP_FORCE_CLOSE_WITH_BURN is disabled." 79 | ) 80 | return 81 | 82 | # Include close account instruction 83 | logger.info(f"Closing ATA: {ata}") 84 | close_ix = close_account( 85 | CloseAccountParams( 86 | account=ata, 87 | dest=self.wallet.pubkey, 88 | owner=self.wallet.pubkey, 89 | program_id=SystemAddresses.TOKEN_PROGRAM, 90 | ) 91 | ) 92 | instructions.append(close_ix) 93 | 94 | # Send both burn and close instructions in the same transaction 95 | if instructions: 96 | tx_sig = await self.client.build_and_send_transaction( 97 | instructions, 98 | self.wallet.keypair, 99 | skip_preflight=True, 100 | priority_fee=priority_fee, 101 | ) 102 | await self.client.confirm_transaction(tx_sig) 103 | logger.info(f"Closed successfully: {ata}") 104 | 105 | except Exception as e: 106 | logger.warning(f"Cleanup failed for ATA {ata}: {e!s}") 107 | -------------------------------------------------------------------------------- /src/cleanup/modes.py: -------------------------------------------------------------------------------- 1 | from cleanup.manager import AccountCleanupManager 2 | from utils.logger import get_logger 3 | 4 | logger = get_logger(__name__) 5 | 6 | 7 | def should_cleanup_after_failure(cleanup_mode) -> bool: 8 | return cleanup_mode == "on_fail" 9 | 10 | 11 | def should_cleanup_after_sell(cleanup_mode) -> bool: 12 | return cleanup_mode == "after_sell" 13 | 14 | 15 | def should_cleanup_post_session(cleanup_mode) -> bool: 16 | return cleanup_mode == "post_session" 17 | 18 | 19 | async def handle_cleanup_after_failure( 20 | client, wallet, mint, priority_fee_manager, cleanup_mode, cleanup_with_prior_fee, force_burn 21 | ): 22 | if should_cleanup_after_failure(cleanup_mode): 23 | logger.info("[Cleanup] Triggered by failed buy transaction.") 24 | manager = AccountCleanupManager(client, wallet, priority_fee_manager, cleanup_with_prior_fee, force_burn) 25 | await manager.cleanup_ata(mint) 26 | 27 | async def handle_cleanup_after_sell( 28 | client, wallet, mint, priority_fee_manager, cleanup_mode, cleanup_with_prior_fee, force_burn 29 | ): 30 | if should_cleanup_after_sell(cleanup_mode): 31 | logger.info("[Cleanup] Triggered after token sell.") 32 | manager = AccountCleanupManager(client, wallet, priority_fee_manager, cleanup_with_prior_fee, force_burn) 33 | await manager.cleanup_ata(mint) 34 | 35 | async def handle_cleanup_post_session( 36 | client, wallet, mints, priority_fee_manager, cleanup_mode, cleanup_with_prior_fee, force_burn 37 | ): 38 | if should_cleanup_post_session(cleanup_mode): 39 | logger.info("[Cleanup] Triggered post trading session.") 40 | manager = AccountCleanupManager(client, wallet, priority_fee_manager, cleanup_with_prior_fee, force_burn) 41 | for mint in mints: 42 | await manager.cleanup_ata(mint) 43 | -------------------------------------------------------------------------------- /src/config_loader.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Any 3 | 4 | import yaml 5 | from dotenv import load_dotenv 6 | 7 | REQUIRED_FIELDS = [ 8 | "name", "rpc_endpoint", "wss_endpoint", "private_key", 9 | "trade.buy_amount", "trade.buy_slippage", "trade.sell_slippage", 10 | "filters.listener_type", "filters.max_token_age" 11 | ] 12 | 13 | CONFIG_VALIDATION_RULES = [ 14 | # (path, type, min_value, max_value, error_message) 15 | ("trade.buy_amount", (int, float), 0, float('inf'), "trade.buy_amount must be a positive number"), 16 | ("trade.buy_slippage", float, 0, 1, "trade.buy_slippage must be between 0 and 1"), 17 | ("trade.sell_slippage", float, 0, 1, "trade.sell_slippage must be between 0 and 1"), 18 | ("priority_fees.fixed_amount", int, 0, float('inf'), "priority_fees.fixed_amount must be a non-negative integer"), 19 | ("priority_fees.extra_percentage", float, 0, 1, "priority_fees.extra_percentage must be between 0 and 1"), 20 | ("priority_fees.hard_cap", int, 0, float('inf'), "priority_fees.hard_cap must be a non-negative integer"), 21 | ("retries.max_attempts", int, 0, 100, "retries.max_attempts must be between 0 and 100"), 22 | ("filters.max_token_age", (int, float), 0, float('inf'), "filters.max_token_age must be a non-negative number") 23 | ] 24 | 25 | # Valid values for enum-like fields 26 | VALID_VALUES = { 27 | "filters.listener_type": ["logs", "blocks", "geyser"], 28 | "cleanup.mode": ["disabled", "on_fail", "after_sell", "post_session"] 29 | } 30 | 31 | 32 | def load_bot_config(path: str) -> dict: 33 | """ 34 | Load and validate a bot configuration from a YAML file. 35 | 36 | Args: 37 | path: Path to the YAML configuration file (relative or absolute) 38 | 39 | Returns: 40 | Validated configuration dictionary 41 | 42 | Raises: 43 | FileNotFoundError: If the configuration file doesn't exist 44 | ValueError: If the configuration is invalid 45 | """ 46 | with open(path) as f: 47 | config = yaml.safe_load(f) 48 | 49 | env_file = config.get("env_file") 50 | if env_file: 51 | env_path = os.path.join(os.path.dirname(path), env_file) 52 | if os.path.exists(env_path): 53 | load_dotenv(env_path, override=True) 54 | else: 55 | # If not found relative to config, try relative to current working directory 56 | load_dotenv(env_file, override=True) 57 | 58 | resolve_env_vars(config) 59 | validate_config(config) 60 | 61 | return config 62 | 63 | def resolve_env_vars(config: dict) -> None: 64 | """ 65 | Recursively resolve environment variables in the configuration. 66 | 67 | Args: 68 | config: Configuration dictionary to process 69 | """ 70 | def resolve_env(value): 71 | if isinstance(value, str) and value.startswith("${") and value.endswith("}"): 72 | env_var = value[2:-1] 73 | env_value = os.getenv(env_var) 74 | if env_value is None: 75 | raise ValueError(f"Environment variable '{env_var}' not found") 76 | return env_value 77 | return value 78 | 79 | def resolve_all(d): 80 | for k, v in d.items(): 81 | if isinstance(v, dict): 82 | resolve_all(v) 83 | else: 84 | d[k] = resolve_env(v) 85 | 86 | resolve_all(config) 87 | 88 | def get_nested_value(config: dict, path: str) -> Any: 89 | """ 90 | Get a nested value from the configuration using dot notation. 91 | 92 | Args: 93 | config: Configuration dictionary 94 | path: Path to the value using dot notation (e.g., "trade.buy_amount") 95 | 96 | Returns: 97 | The value at the specified path 98 | 99 | Raises: 100 | ValueError: If the path doesn't exist in the configuration 101 | """ 102 | keys = path.split(".") 103 | value = config 104 | for key in keys: 105 | if not isinstance(value, dict) or key not in value: 106 | raise ValueError(f"Missing required config key: {path}") 107 | value = value[key] 108 | return value 109 | 110 | def validate_config(config: dict) -> None: 111 | """ 112 | Validate the configuration against defined rules. 113 | 114 | Args: 115 | config: Configuration dictionary to validate 116 | 117 | Raises: 118 | ValueError: If the configuration is invalid 119 | """ 120 | for field in REQUIRED_FIELDS: 121 | get_nested_value(config, field) 122 | 123 | for path, expected_type, min_val, max_val, error_msg in CONFIG_VALIDATION_RULES: 124 | try: 125 | value = get_nested_value(config, path) 126 | 127 | if not isinstance(value, expected_type): 128 | raise ValueError(f"Type error: {error_msg}") 129 | 130 | if isinstance(value, (int, float)) and not (min_val <= value <= max_val): 131 | raise ValueError(f"Range error: {error_msg}") 132 | 133 | except ValueError as e: 134 | # Re-raise if it's our own error 135 | if str(e).startswith(("Type error:", "Range error:")): 136 | raise 137 | # Otherwise, the field might be missing 138 | continue 139 | 140 | # Validate enum-like fields 141 | for path, valid_values in VALID_VALUES.items(): 142 | try: 143 | value = get_nested_value(config, path) 144 | if value not in valid_values: 145 | raise ValueError(f"{path} must be one of {valid_values}") 146 | except ValueError: 147 | # Skip if the field is missing 148 | continue 149 | 150 | # Cannot enable both dynamic and fixed priority fees 151 | try: 152 | dynamic = get_nested_value(config, "priority_fees.enable_dynamic") 153 | fixed = get_nested_value(config, "priority_fees.enable_fixed") 154 | if dynamic and fixed: 155 | raise ValueError("Cannot enable both dynamic and fixed priority fees simultaneously") 156 | except ValueError: 157 | # Skip if one of the fields is missing 158 | pass 159 | 160 | def print_config_summary(config: dict) -> None: 161 | """ 162 | Print a summary of the loaded configuration. 163 | 164 | Args: 165 | config: Configuration dictionary 166 | """ 167 | print(f"Bot name: {config.get('name', 'unnamed')}") 168 | print(f"Listener type: {config.get('filters', {}).get('listener_type', 'not configured')}") 169 | 170 | trade = config.get('trade', {}) 171 | print("Trade settings:") 172 | print(f" - Buy amount: {trade.get('buy_amount', 'not configured')} SOL") 173 | print(f" - Buy slippage: {trade.get('buy_slippage', 'not configured') * 100}%") 174 | print(f" - Extreme fast mode: {'enabled' if trade.get('extreme_fast_mode') else 'disabled'}") 175 | 176 | fees = config.get('priority_fees', {}) 177 | print("Priority fees:") 178 | if fees.get('enable_dynamic'): 179 | print(" - Dynamic fees enabled") 180 | elif fees.get('enable_fixed'): 181 | print(f" - Fixed fee: {fees.get('fixed_amount', 'not configured')} microlamports") 182 | 183 | print("Configuration loaded successfully!") 184 | 185 | 186 | if __name__ == "__main__": 187 | config = load_bot_config("bots/bot-sniper.yaml") 188 | print_config_summary(config) -------------------------------------------------------------------------------- /src/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chainstacklabs/pump-fun-bot/2338c7a5dd78787a8d98ba78445ad017da204921/src/core/__init__.py -------------------------------------------------------------------------------- /src/core/curve.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bonding curve operations for pump.fun tokens. 3 | """ 4 | 5 | import struct 6 | from typing import Final 7 | 8 | from construct import Bytes, Flag, Int64ul, Struct 9 | from solders.pubkey import Pubkey 10 | 11 | from core.client import SolanaClient 12 | from core.pubkeys import LAMPORTS_PER_SOL, TOKEN_DECIMALS 13 | from utils.logger import get_logger 14 | 15 | logger = get_logger(__name__) 16 | 17 | # Discriminator for the bonding curve account 18 | EXPECTED_DISCRIMINATOR: Final[bytes] = struct.pack(" None: 35 | """Parse bonding curve data. 36 | 37 | Args: 38 | data: Raw account data 39 | 40 | Raises: 41 | ValueError: If data cannot be parsed 42 | """ 43 | if data[:8] != EXPECTED_DISCRIMINATOR: 44 | raise ValueError("Invalid curve state discriminator") 45 | 46 | parsed = self._STRUCT.parse(data[8:]) 47 | self.__dict__.update(parsed) 48 | 49 | # Convert raw bytes to Pubkey for creator field 50 | if hasattr(self, 'creator') and isinstance(self.creator, bytes): 51 | self.creator = Pubkey.from_bytes(self.creator) 52 | 53 | def calculate_price(self) -> float: 54 | """Calculate token price in SOL. 55 | 56 | Returns: 57 | Token price in SOL 58 | 59 | Raises: 60 | ValueError: If reserve state is invalid 61 | """ 62 | if self.virtual_token_reserves <= 0 or self.virtual_sol_reserves <= 0: 63 | raise ValueError("Invalid reserve state") 64 | 65 | return (self.virtual_sol_reserves / LAMPORTS_PER_SOL) / ( 66 | self.virtual_token_reserves / 10**TOKEN_DECIMALS 67 | ) 68 | 69 | @property 70 | def token_reserves(self) -> float: 71 | """Get token reserves in decimal form.""" 72 | return self.virtual_token_reserves / 10**TOKEN_DECIMALS 73 | 74 | @property 75 | def sol_reserves(self) -> float: 76 | """Get SOL reserves in decimal form.""" 77 | return self.virtual_sol_reserves / LAMPORTS_PER_SOL 78 | 79 | 80 | class BondingCurveManager: 81 | """Manager for bonding curve operations.""" 82 | 83 | def __init__(self, client: SolanaClient): 84 | """Initialize with Solana client. 85 | 86 | Args: 87 | client: Solana client for RPC calls 88 | """ 89 | self.client = client 90 | 91 | async def get_curve_state(self, curve_address: Pubkey) -> BondingCurveState: 92 | """Get the state of a bonding curve. 93 | 94 | Args: 95 | curve_address: Address of the bonding curve account 96 | 97 | Returns: 98 | Bonding curve state 99 | 100 | Raises: 101 | ValueError: If curve data is invalid 102 | """ 103 | try: 104 | account = await self.client.get_account_info(curve_address) 105 | if not account.data: 106 | raise ValueError(f"No data in bonding curve account {curve_address}") 107 | 108 | return BondingCurveState(account.data) 109 | 110 | except Exception as e: 111 | logger.error(f"Failed to get curve state: {e!s}") 112 | raise ValueError(f"Invalid curve state: {e!s}") 113 | 114 | async def calculate_price(self, curve_address: Pubkey) -> float: 115 | """Calculate the current price of a token. 116 | 117 | Args: 118 | curve_address: Address of the bonding curve account 119 | 120 | Returns: 121 | Token price in SOL 122 | """ 123 | curve_state = await self.get_curve_state(curve_address) 124 | return curve_state.calculate_price() 125 | 126 | async def calculate_expected_tokens( 127 | self, curve_address: Pubkey, sol_amount: float 128 | ) -> float: 129 | """Calculate the expected token amount for a given SOL input. 130 | 131 | Args: 132 | curve_address: Address of the bonding curve account 133 | sol_amount: Amount of SOL to spend 134 | 135 | Returns: 136 | Expected token amount 137 | """ 138 | curve_state = await self.get_curve_state(curve_address) 139 | price = curve_state.calculate_price() 140 | return sol_amount / price 141 | -------------------------------------------------------------------------------- /src/core/priority_fee/__init__.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class PriorityFeePlugin(ABC): 5 | """Base class for priority fee calculation plugins.""" 6 | 7 | @abstractmethod 8 | async def get_priority_fee(self) -> int | None: 9 | """ 10 | Calculate the priority fee. 11 | 12 | Returns: 13 | Optional[int]: Priority fee in lamports, or None if no fee should be applied. 14 | """ 15 | pass 16 | -------------------------------------------------------------------------------- /src/core/priority_fee/dynamic_fee.py: -------------------------------------------------------------------------------- 1 | import statistics 2 | 3 | from solders.pubkey import Pubkey 4 | 5 | from core.client import SolanaClient 6 | from core.priority_fee import PriorityFeePlugin 7 | from utils.logger import get_logger 8 | 9 | logger = get_logger(__name__) 10 | 11 | 12 | class DynamicPriorityFee(PriorityFeePlugin): 13 | """Dynamic priority fee plugin using getRecentPrioritizationFees.""" 14 | 15 | def __init__(self, client: SolanaClient): 16 | """ 17 | Initialize the dynamic fee plugin. 18 | 19 | Args: 20 | client: Solana RPC client for network requests. 21 | """ 22 | self.client = client 23 | 24 | async def get_priority_fee( 25 | self, accounts: list[Pubkey] | None = None 26 | ) -> int | None: 27 | """ 28 | Fetch the recent priority fee using getRecentPrioritizationFees. 29 | 30 | Args: 31 | accounts: List of accounts to consider for the fee calculation. 32 | If None, the fee is calculated without specific account constraints. 33 | 34 | Returns: 35 | Optional[int]: Median priority fee in microlamports, or None if the request fails. 36 | """ 37 | try: 38 | body = { 39 | "jsonrpc": "2.0", 40 | "id": 1, 41 | "method": "getRecentPrioritizationFees", 42 | "params": [[str(account) for account in accounts]] if accounts else [], 43 | } 44 | 45 | response = await self.client.post_rpc(body) 46 | if not response or "result" not in response: 47 | logger.error( 48 | "Failed to fetch recent prioritization fees: invalid response" 49 | ) 50 | return None 51 | 52 | fees = [fee["prioritizationFee"] for fee in response["result"]] 53 | if not fees: 54 | logger.warning("No prioritization fees found in the response") 55 | return None 56 | 57 | # Get the 70th percentile of fees for faster processing 58 | # It means you're paying a fee that's higher than 70% of other transactions 59 | # Higher percentile = faster transactions but more expensive 60 | # Lower percentile = cheaper but slower transactions 61 | prior_fee = int(statistics.quantiles(fees, n=10)[-3]) # 70th percentile 62 | 63 | return prior_fee 64 | 65 | except Exception as e: 66 | logger.error( 67 | f"Failed to fetch recent priority fee: {str(e)}", exc_info=True 68 | ) 69 | return None 70 | -------------------------------------------------------------------------------- /src/core/priority_fee/fixed_fee.py: -------------------------------------------------------------------------------- 1 | from . import PriorityFeePlugin 2 | 3 | 4 | class FixedPriorityFee(PriorityFeePlugin): 5 | """Fixed priority fee plugin.""" 6 | 7 | def __init__(self, fixed_fee: int): 8 | """ 9 | Initialize the fixed fee plugin. 10 | 11 | Args: 12 | fixed_fee: Fixed priority fee in microlamports. 13 | """ 14 | self.fixed_fee = fixed_fee 15 | 16 | async def get_priority_fee(self) -> int | None: 17 | """ 18 | Return the fixed priority fee. 19 | 20 | Returns: 21 | Optional[int]: Fixed priority fee in microlamports, or None if fixed_fee is 0. 22 | """ 23 | if self.fixed_fee == 0: 24 | return None 25 | return self.fixed_fee 26 | -------------------------------------------------------------------------------- /src/core/priority_fee/manager.py: -------------------------------------------------------------------------------- 1 | from solders.pubkey import Pubkey 2 | 3 | from core.client import SolanaClient 4 | from core.priority_fee.dynamic_fee import DynamicPriorityFee 5 | from core.priority_fee.fixed_fee import FixedPriorityFee 6 | from utils.logger import get_logger 7 | 8 | logger = get_logger(__name__) 9 | 10 | 11 | class PriorityFeeManager: 12 | """Manager for priority fee calculation and validation.""" 13 | 14 | def __init__( 15 | self, 16 | client: SolanaClient, 17 | enable_dynamic_fee: bool, 18 | enable_fixed_fee: bool, 19 | fixed_fee: int, 20 | extra_fee: float, 21 | hard_cap: int, 22 | ): 23 | """ 24 | Initialize the priority fee manager. 25 | 26 | Args: 27 | client: Solana RPC client for dynamic fee calculation. 28 | enable_dynamic_fee: Whether to enable dynamic fee calculation. 29 | enable_fixed_fee: Whether to enable fixed fee. 30 | fixed_fee: Fixed priority fee in microlamports. 31 | extra_fee: Percentage increase to apply to the base fee. 32 | hard_cap: Maximum allowed priority fee in microlamports. 33 | """ 34 | self.client = client 35 | self.enable_dynamic_fee = enable_dynamic_fee 36 | self.enable_fixed_fee = enable_fixed_fee 37 | self.fixed_fee = fixed_fee 38 | self.extra_fee = extra_fee 39 | self.hard_cap = hard_cap 40 | 41 | # Initialize plugins 42 | self.dynamic_fee_plugin = DynamicPriorityFee(client) 43 | self.fixed_fee_plugin = FixedPriorityFee(fixed_fee) 44 | 45 | async def calculate_priority_fee( 46 | self, accounts: list[Pubkey] | None = None 47 | ) -> int | None: 48 | """ 49 | Calculate the priority fee based on the configuration. 50 | 51 | Args: 52 | accounts: List of accounts to consider for dynamic fee calculation. 53 | If None, the fee is calculated without specific account constraints. 54 | 55 | Returns: 56 | Optional[int]: Calculated priority fee in microlamports, or None if no fee should be applied. 57 | """ 58 | base_fee = await self._get_base_fee(accounts) 59 | if base_fee is None: 60 | return None 61 | 62 | # Apply extra fee (percentage increase) 63 | final_fee = int(base_fee * (1 + self.extra_fee)) 64 | 65 | # Enforce hard cap 66 | if final_fee > self.hard_cap: 67 | logger.warning( 68 | f"Calculated priority fee {final_fee} exceeds hard cap {self.hard_cap}. Applying hard cap." 69 | ) 70 | final_fee = self.hard_cap 71 | 72 | return final_fee 73 | 74 | async def _get_base_fee(self, accounts: list[Pubkey] | None = None) -> int | None: 75 | """ 76 | Determine the base fee based on the configuration. 77 | 78 | Returns: 79 | Optional[int]: Base fee in microlamports, or None if no fee should be applied. 80 | """ 81 | # Prefer dynamic fee if both are enabled 82 | if self.enable_dynamic_fee: 83 | dynamic_fee = await self.dynamic_fee_plugin.get_priority_fee(accounts) 84 | if dynamic_fee is not None: 85 | return dynamic_fee 86 | 87 | # Fall back to fixed fee if enabled 88 | if self.enable_fixed_fee: 89 | return await self.fixed_fee_plugin.get_priority_fee() 90 | 91 | # No priority fee if both are disabled 92 | return None 93 | -------------------------------------------------------------------------------- /src/core/pubkeys.py: -------------------------------------------------------------------------------- 1 | """ 2 | System and program addresses for Solana and pump.fun interactions. 3 | """ 4 | 5 | from dataclasses import dataclass 6 | from typing import Final 7 | 8 | from solders.pubkey import Pubkey 9 | 10 | LAMPORTS_PER_SOL: Final[int] = 1_000_000_000 11 | TOKEN_DECIMALS: Final[int] = 6 12 | 13 | 14 | @dataclass 15 | class SystemAddresses: 16 | """System-level Solana addresses.""" 17 | 18 | PROGRAM: Final[Pubkey] = Pubkey.from_string("11111111111111111111111111111111") 19 | TOKEN_PROGRAM: Final[Pubkey] = Pubkey.from_string( 20 | "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" 21 | ) 22 | ASSOCIATED_TOKEN_PROGRAM: Final[Pubkey] = Pubkey.from_string( 23 | "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL" 24 | ) 25 | RENT: Final[Pubkey] = Pubkey.from_string( 26 | "SysvarRent111111111111111111111111111111111" 27 | ) 28 | SOL: Final[Pubkey] = Pubkey.from_string( 29 | "So11111111111111111111111111111111111111112" 30 | ) 31 | 32 | 33 | @dataclass 34 | class PumpAddresses: 35 | """Pump.fun program addresses.""" 36 | 37 | PROGRAM: Final[Pubkey] = Pubkey.from_string( 38 | "6EF8rrecthR5Dkzon8Nwu78hRvfCKubJ14M5uBEwF6P" 39 | ) 40 | GLOBAL: Final[Pubkey] = Pubkey.from_string( 41 | "4wTV1YmiEkRvAtNtsSGPtUrqRYQMe5SKy2uB4Jjaxnjf" 42 | ) 43 | EVENT_AUTHORITY: Final[Pubkey] = Pubkey.from_string( 44 | "Ce6TQqeHC9p8KetsN6JsjHK7UTZk7nasjjnr7XxXp9F1" 45 | ) 46 | FEE: Final[Pubkey] = Pubkey.from_string( 47 | "CebN5WGQ4jvEPvsVU4EoHEpgzq1VV7AbicfhtW4xC9iM" 48 | ) 49 | LIQUIDITY_MIGRATOR: Final[Pubkey] = Pubkey.from_string( 50 | "39azUYFWPz3VHgKCf3VChUwbpURdCHRxjWVowf5jUJjg" 51 | ) 52 | -------------------------------------------------------------------------------- /src/core/wallet.py: -------------------------------------------------------------------------------- 1 | """ 2 | Wallet management for Solana transactions. 3 | """ 4 | 5 | import base58 6 | from solders.keypair import Keypair 7 | from solders.pubkey import Pubkey 8 | from spl.token.instructions import get_associated_token_address 9 | 10 | 11 | class Wallet: 12 | """Manages a Solana wallet for trading operations.""" 13 | 14 | def __init__(self, private_key: str): 15 | """Initialize wallet from private key. 16 | 17 | Args: 18 | private_key: Base58 encoded private key 19 | """ 20 | self._private_key = private_key 21 | self._keypair = self._load_keypair(private_key) 22 | 23 | @property 24 | def pubkey(self) -> Pubkey: 25 | """Get the public key of the wallet.""" 26 | return self._keypair.pubkey() 27 | 28 | @property 29 | def keypair(self) -> Keypair: 30 | """Get the keypair for signing transactions.""" 31 | return self._keypair 32 | 33 | def get_associated_token_address(self, mint: Pubkey) -> Pubkey: 34 | """Get the associated token account address for a mint. 35 | 36 | Args: 37 | mint: Token mint address 38 | 39 | Returns: 40 | Associated token account address 41 | """ 42 | return get_associated_token_address(self.pubkey, mint) 43 | 44 | @staticmethod 45 | def _load_keypair(private_key: str) -> Keypair: 46 | """Load keypair from private key. 47 | 48 | Args: 49 | private_key: Base58 encoded private key 50 | 51 | Returns: 52 | Solana keypair 53 | """ 54 | private_key_bytes = base58.b58decode(private_key) 55 | return Keypair.from_bytes(private_key_bytes) 56 | -------------------------------------------------------------------------------- /src/geyser/generated/solana_storage_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | import warnings 5 | 6 | 7 | GRPC_GENERATED_VERSION = '1.71.0' 8 | GRPC_VERSION = grpc.__version__ 9 | _version_not_supported = False 10 | 11 | try: 12 | from grpc._utilities import first_version_is_lower 13 | _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) 14 | except ImportError: 15 | _version_not_supported = True 16 | 17 | if _version_not_supported: 18 | raise RuntimeError( 19 | f'The grpc package installed is at version {GRPC_VERSION},' 20 | + f' but the generated code in solana_storage_pb2_grpc.py depends on' 21 | + f' grpcio>={GRPC_GENERATED_VERSION}.' 22 | + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' 23 | + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' 24 | ) 25 | -------------------------------------------------------------------------------- /src/geyser/proto/solana-storage.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package solana.storage.ConfirmedBlock; 4 | 5 | option go_package = "github.com/rpcpool/yellowstone-grpc/examples/golang/proto"; 6 | 7 | message ConfirmedBlock { 8 | string previous_blockhash = 1; 9 | string blockhash = 2; 10 | uint64 parent_slot = 3; 11 | repeated ConfirmedTransaction transactions = 4; 12 | repeated Reward rewards = 5; 13 | UnixTimestamp block_time = 6; 14 | BlockHeight block_height = 7; 15 | NumPartitions num_partitions = 8; 16 | } 17 | 18 | message ConfirmedTransaction { 19 | Transaction transaction = 1; 20 | TransactionStatusMeta meta = 2; 21 | } 22 | 23 | message Transaction { 24 | repeated bytes signatures = 1; 25 | Message message = 2; 26 | } 27 | 28 | message Message { 29 | MessageHeader header = 1; 30 | repeated bytes account_keys = 2; 31 | bytes recent_blockhash = 3; 32 | repeated CompiledInstruction instructions = 4; 33 | bool versioned = 5; 34 | repeated MessageAddressTableLookup address_table_lookups = 6; 35 | } 36 | 37 | message MessageHeader { 38 | uint32 num_required_signatures = 1; 39 | uint32 num_readonly_signed_accounts = 2; 40 | uint32 num_readonly_unsigned_accounts = 3; 41 | } 42 | 43 | message MessageAddressTableLookup { 44 | bytes account_key = 1; 45 | bytes writable_indexes = 2; 46 | bytes readonly_indexes = 3; 47 | } 48 | 49 | message TransactionStatusMeta { 50 | TransactionError err = 1; 51 | uint64 fee = 2; 52 | repeated uint64 pre_balances = 3; 53 | repeated uint64 post_balances = 4; 54 | repeated InnerInstructions inner_instructions = 5; 55 | bool inner_instructions_none = 10; 56 | repeated string log_messages = 6; 57 | bool log_messages_none = 11; 58 | repeated TokenBalance pre_token_balances = 7; 59 | repeated TokenBalance post_token_balances = 8; 60 | repeated Reward rewards = 9; 61 | repeated bytes loaded_writable_addresses = 12; 62 | repeated bytes loaded_readonly_addresses = 13; 63 | ReturnData return_data = 14; 64 | bool return_data_none = 15; 65 | 66 | // Sum of compute units consumed by all instructions. 67 | // Available since Solana v1.10.35 / v1.11.6. 68 | // Set to `None` for txs executed on earlier versions. 69 | optional uint64 compute_units_consumed = 16; 70 | } 71 | 72 | message TransactionError { 73 | bytes err = 1; 74 | } 75 | 76 | message InnerInstructions { 77 | uint32 index = 1; 78 | repeated InnerInstruction instructions = 2; 79 | } 80 | 81 | message InnerInstruction { 82 | uint32 program_id_index = 1; 83 | bytes accounts = 2; 84 | bytes data = 3; 85 | 86 | // Invocation stack height of an inner instruction. 87 | // Available since Solana v1.14.6 88 | // Set to `None` for txs executed on earlier versions. 89 | optional uint32 stack_height = 4; 90 | } 91 | 92 | message CompiledInstruction { 93 | uint32 program_id_index = 1; 94 | bytes accounts = 2; 95 | bytes data = 3; 96 | } 97 | 98 | message TokenBalance { 99 | uint32 account_index = 1; 100 | string mint = 2; 101 | UiTokenAmount ui_token_amount = 3; 102 | string owner = 4; 103 | string program_id = 5; 104 | } 105 | 106 | message UiTokenAmount { 107 | double ui_amount = 1; 108 | uint32 decimals = 2; 109 | string amount = 3; 110 | string ui_amount_string = 4; 111 | } 112 | 113 | message ReturnData { 114 | bytes program_id = 1; 115 | bytes data = 2; 116 | } 117 | 118 | enum RewardType { 119 | Unspecified = 0; 120 | Fee = 1; 121 | Rent = 2; 122 | Staking = 3; 123 | Voting = 4; 124 | } 125 | 126 | message Reward { 127 | string pubkey = 1; 128 | int64 lamports = 2; 129 | uint64 post_balance = 3; 130 | RewardType reward_type = 4; 131 | string commission = 5; 132 | } 133 | 134 | message Rewards { 135 | repeated Reward rewards = 1; 136 | NumPartitions num_partitions = 2; 137 | } 138 | 139 | message UnixTimestamp { 140 | int64 timestamp = 1; 141 | } 142 | 143 | message BlockHeight { 144 | uint64 block_height = 1; 145 | } 146 | 147 | message NumPartitions { 148 | uint64 num_partitions = 1; 149 | } -------------------------------------------------------------------------------- /src/monitoring/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chainstacklabs/pump-fun-bot/2338c7a5dd78787a8d98ba78445ad017da204921/src/monitoring/__init__.py -------------------------------------------------------------------------------- /src/monitoring/base_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base class for WebSocket token listeners. 3 | """ 4 | 5 | from abc import ABC, abstractmethod 6 | from collections.abc import Awaitable, Callable 7 | 8 | from trading.base import TokenInfo 9 | 10 | 11 | class BaseTokenListener(ABC): 12 | """Base abstract class for token listeners.""" 13 | 14 | @abstractmethod 15 | async def listen_for_tokens( 16 | self, 17 | token_callback: Callable[[TokenInfo], Awaitable[None]], 18 | match_string: str | None = None, 19 | creator_address: str | None = None, 20 | ) -> None: 21 | """ 22 | Listen for new token creations. 23 | 24 | Args: 25 | token_callback: Callback function for new tokens 26 | match_string: Optional string to match in token name/symbol 27 | creator_address: Optional creator address to filter by 28 | """ 29 | pass 30 | -------------------------------------------------------------------------------- /src/monitoring/block_event_processor.py: -------------------------------------------------------------------------------- 1 | """ 2 | Event processing for pump.fun tokens. 3 | """ 4 | 5 | import base64 6 | import json 7 | import struct 8 | from typing import Any 9 | 10 | import base58 11 | from solders.pubkey import Pubkey 12 | from solders.transaction import VersionedTransaction 13 | 14 | from core.pubkeys import PumpAddresses 15 | from trading.base import TokenInfo 16 | from utils.logger import get_logger 17 | 18 | logger = get_logger(__name__) 19 | 20 | 21 | class PumpEventProcessor: 22 | """Processes events from pump.fun program.""" 23 | 24 | # Discriminator for create instruction 25 | CREATE_DISCRIMINATOR = 8576854823835016728 26 | 27 | def __init__(self, pump_program: Pubkey): 28 | """Initialize event processor. 29 | 30 | Args: 31 | pump_program: Pump.fun program address 32 | """ 33 | self.pump_program = pump_program 34 | self._idl = self._load_idl() 35 | 36 | def _load_idl(self) -> dict[str, Any]: 37 | """Load IDL from file. 38 | 39 | Returns: 40 | IDL as dictionary 41 | """ 42 | try: 43 | with open("idl/pump_fun_idl.json") as f: 44 | return json.load(f) 45 | except Exception as e: 46 | logger.error(f"Failed to load IDL: {e!s}") 47 | # Create a minimal IDL with just what we need 48 | return { 49 | "instructions": [ 50 | { 51 | "name": "create", 52 | "args": [ 53 | {"name": "name", "type": "string"}, 54 | {"name": "symbol", "type": "string"}, 55 | {"name": "uri", "type": "string"}, 56 | ], 57 | } 58 | ] 59 | } 60 | 61 | def process_transaction(self, tx_data: str) -> TokenInfo | None: 62 | """Process a transaction and extract token info. 63 | 64 | Args: 65 | tx_data: Base64 encoded transaction data 66 | 67 | Returns: 68 | TokenInfo if a token creation is found, None otherwise 69 | """ 70 | try: 71 | tx_data_decoded = base64.b64decode(tx_data) 72 | transaction = VersionedTransaction.from_bytes(tx_data_decoded) 73 | 74 | for ix in transaction.message.instructions: 75 | # Check if instruction is from pump.fun program 76 | program_id_index = ix.program_id_index 77 | if program_id_index >= len(transaction.message.account_keys): 78 | continue 79 | 80 | program_id = transaction.message.account_keys[program_id_index] 81 | 82 | if str(program_id) != str(self.pump_program): 83 | continue 84 | 85 | ix_data = bytes(ix.data) 86 | 87 | # Check if it's a create instruction 88 | if len(ix_data) < 8: 89 | continue 90 | 91 | discriminator = struct.unpack(" dict[str, Any]: 141 | """Decode create instruction data. 142 | 143 | Args: 144 | ix_data: Instruction data bytes 145 | ix_def: Instruction definition from IDL 146 | accounts: List of account pubkeys 147 | 148 | Returns: 149 | Decoded instruction arguments 150 | """ 151 | args = {} 152 | offset = 8 # Skip 8-byte discriminator 153 | 154 | for arg in ix_def["args"]: 155 | if arg["type"] == "string": 156 | length = struct.unpack_from(" Pubkey: 177 | """ 178 | Find the creator vault for a creator. 179 | 180 | Args: 181 | creator: Creator address 182 | 183 | Returns: 184 | Creator vault address 185 | """ 186 | derived_address, _ = Pubkey.find_program_address( 187 | [ 188 | b"creator-vault", 189 | bytes(creator) 190 | ], 191 | PumpAddresses.PROGRAM, 192 | ) 193 | return derived_address 194 | -------------------------------------------------------------------------------- /src/monitoring/block_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | WebSocket monitoring for pump.fun tokens. 3 | """ 4 | 5 | import asyncio 6 | import json 7 | from collections.abc import Awaitable, Callable 8 | 9 | import websockets 10 | from solders.pubkey import Pubkey 11 | 12 | from monitoring.base_listener import BaseTokenListener 13 | from monitoring.block_event_processor import PumpEventProcessor 14 | from trading.base import TokenInfo 15 | from utils.logger import get_logger 16 | 17 | logger = get_logger(__name__) 18 | 19 | 20 | class BlockListener(BaseTokenListener): 21 | """WebSocket listener for pump.fun token creation events using blockSubscribe.""" 22 | 23 | def __init__(self, wss_endpoint: str, pump_program: Pubkey): 24 | """Initialize token listener. 25 | 26 | Args: 27 | wss_endpoint: WebSocket endpoint URL 28 | pump_program: Pump.fun program address 29 | """ 30 | self.wss_endpoint = wss_endpoint 31 | self.pump_program = pump_program 32 | self.event_processor = PumpEventProcessor(pump_program) 33 | self.ping_interval = 20 # seconds 34 | 35 | async def listen_for_tokens( 36 | self, 37 | token_callback: Callable[[TokenInfo], Awaitable[None]], 38 | match_string: str | None = None, 39 | creator_address: str | None = None, 40 | ) -> None: 41 | """Listen for new token creations. 42 | 43 | Args: 44 | token_callback: Callback function for new tokens 45 | match_string: Optional string to match in token name/symbol 46 | creator_address: Optional creator address to filter by 47 | """ 48 | while True: 49 | try: 50 | async with websockets.connect(self.wss_endpoint) as websocket: 51 | await self._subscribe_to_program(websocket) 52 | ping_task = asyncio.create_task(self._ping_loop(websocket)) 53 | 54 | try: 55 | while True: 56 | token_info = await self._wait_for_token_creation(websocket) 57 | if not token_info: 58 | continue 59 | 60 | logger.info( 61 | f"New token detected: {token_info.name} ({token_info.symbol})" 62 | ) 63 | 64 | if match_string and not ( 65 | match_string.lower() in token_info.name.lower() 66 | or match_string.lower() in token_info.symbol.lower() 67 | ): 68 | logger.info( 69 | f"Token does not match filter '{match_string}'. Skipping..." 70 | ) 71 | continue 72 | 73 | if ( 74 | creator_address 75 | and str(token_info.user) != creator_address 76 | ): 77 | logger.info( 78 | f"Token not created by {creator_address}. Skipping..." 79 | ) 80 | continue 81 | 82 | await token_callback(token_info) 83 | 84 | except websockets.exceptions.ConnectionClosed: 85 | logger.warning("WebSocket connection closed. Reconnecting...") 86 | ping_task.cancel() 87 | 88 | except Exception as e: 89 | logger.error(f"WebSocket connection error: {e!s}") 90 | logger.info("Reconnecting in 5 seconds...") 91 | await asyncio.sleep(5) 92 | 93 | async def _subscribe_to_program(self, websocket) -> None: 94 | """Subscribe to blocks mentioning the pump.fun program. 95 | 96 | Args: 97 | websocket: Active WebSocket connection 98 | """ 99 | subscription_message = json.dumps( 100 | { 101 | "jsonrpc": "2.0", 102 | "id": 1, 103 | "method": "blockSubscribe", 104 | "params": [ 105 | {"mentionsAccountOrProgram": str(self.pump_program)}, 106 | { 107 | "commitment": "confirmed", 108 | "encoding": "base64", # base64 is faster than other encoding options 109 | "showRewards": False, 110 | "transactionDetails": "full", 111 | "maxSupportedTransactionVersion": 0, 112 | }, 113 | ], 114 | } 115 | ) 116 | 117 | await websocket.send(subscription_message) 118 | logger.info(f"Subscribed to blocks mentioning program: {self.pump_program}") 119 | 120 | async def _ping_loop(self, websocket) -> None: 121 | """Keep connection alive with pings. 122 | 123 | Args: 124 | websocket: Active WebSocket connection 125 | """ 126 | try: 127 | while True: 128 | await asyncio.sleep(self.ping_interval) 129 | try: 130 | pong_waiter = await websocket.ping() 131 | await asyncio.wait_for(pong_waiter, timeout=10) 132 | except TimeoutError: 133 | logger.warning("Ping timeout - server not responding") 134 | # Force reconnection 135 | await websocket.close() 136 | return 137 | except asyncio.CancelledError: 138 | pass 139 | except Exception as e: 140 | logger.error(f"Ping error: {e!s}") 141 | 142 | async def _wait_for_token_creation(self, websocket) -> TokenInfo | None: 143 | """Wait for token creation event. 144 | 145 | Args: 146 | websocket: Active WebSocket connection 147 | 148 | Returns: 149 | TokenInfo if a token creation is found, None otherwise 150 | """ 151 | try: 152 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 153 | data = json.loads(response) 154 | 155 | if "method" not in data or data["method"] != "blockNotification": 156 | return None 157 | 158 | if "params" not in data or "result" not in data["params"]: 159 | return None 160 | 161 | block_data = data["params"]["result"] 162 | if "value" not in block_data or "block" not in block_data["value"]: 163 | return None 164 | 165 | block = block_data["value"]["block"] 166 | if "transactions" not in block: 167 | return None 168 | 169 | for tx in block["transactions"]: 170 | if not isinstance(tx, dict) or "transaction" not in tx: 171 | continue 172 | 173 | token_info = self.event_processor.process_transaction( 174 | tx["transaction"][0] 175 | ) 176 | if token_info: 177 | return token_info 178 | 179 | except TimeoutError: 180 | logger.debug("No data received for 30 seconds") 181 | except websockets.exceptions.ConnectionClosed: 182 | logger.warning("WebSocket connection closed") 183 | raise 184 | except Exception as e: 185 | logger.error(f"Error processing WebSocket message: {e!s}") 186 | 187 | return None 188 | -------------------------------------------------------------------------------- /src/monitoring/geyser_event_processor.py: -------------------------------------------------------------------------------- 1 | """ 2 | Event processing for pump.fun tokens using Geyser data. 3 | """ 4 | 5 | import struct 6 | from typing import Final 7 | 8 | import base58 9 | from solders.pubkey import Pubkey 10 | 11 | from core.pubkeys import PumpAddresses 12 | from trading.base import TokenInfo 13 | from utils.logger import get_logger 14 | 15 | logger = get_logger(__name__) 16 | 17 | 18 | class GeyserEventProcessor: 19 | """Processes token creation events from Geyser stream.""" 20 | 21 | CREATE_DISCRIMINATOR: Final[bytes] = struct.pack(" TokenInfo | None: 32 | """Process transaction data and extract token creation info. 33 | 34 | Args: 35 | instruction_data: Raw instruction data 36 | accounts: List of account indices 37 | keys: List of account public keys 38 | 39 | Returns: 40 | TokenInfo if token creation found, None otherwise 41 | """ 42 | if not instruction_data.startswith(self.CREATE_DISCRIMINATOR): 43 | return None 44 | 45 | try: 46 | # Skip past the 8-byte discriminator 47 | offset = 8 48 | 49 | # Helper to read strings (prefixed with length) 50 | def read_string(): 51 | nonlocal offset 52 | # Get string length (4-byte uint) 53 | length = struct.unpack_from("= len(accounts): 69 | return None 70 | account_index = accounts[index] 71 | if account_index >= len(keys): 72 | return None 73 | return Pubkey.from_bytes(keys[account_index]) 74 | 75 | name = read_string() 76 | symbol = read_string() 77 | uri = read_string() 78 | creator = read_pubkey() 79 | 80 | mint = get_account_key(0) 81 | bonding_curve = get_account_key(2) 82 | associated_bonding_curve = get_account_key(3) 83 | user = get_account_key(7) 84 | 85 | creator_vault = self._find_creator_vault(creator) 86 | 87 | if not all([mint, bonding_curve, associated_bonding_curve, user]): 88 | logger.warning("Missing required account keys in token creation") 89 | return None 90 | 91 | return TokenInfo( 92 | name=name, 93 | symbol=symbol, 94 | uri=uri, 95 | mint=mint, 96 | bonding_curve=bonding_curve, 97 | associated_bonding_curve=associated_bonding_curve, 98 | user=user, 99 | creator=creator, 100 | creator_vault=creator_vault, 101 | ) 102 | 103 | except Exception as e: 104 | logger.error(f"Failed to process transaction data: {e}") 105 | return None 106 | 107 | def _find_creator_vault(self, creator: Pubkey) -> Pubkey: 108 | """ 109 | Find the creator vault for a creator. 110 | 111 | Args: 112 | creator: Creator address 113 | 114 | Returns: 115 | Creator vault address 116 | """ 117 | derived_address, _ = Pubkey.find_program_address( 118 | [ 119 | b"creator-vault", 120 | bytes(creator) 121 | ], 122 | PumpAddresses.PROGRAM, 123 | ) 124 | return derived_address -------------------------------------------------------------------------------- /src/monitoring/geyser_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Geyser monitoring for pump.fun tokens. 3 | """ 4 | 5 | import asyncio 6 | from collections.abc import Awaitable, Callable 7 | 8 | import grpc 9 | from solders.pubkey import Pubkey 10 | 11 | from geyser.generated import geyser_pb2, geyser_pb2_grpc 12 | from monitoring.base_listener import BaseTokenListener 13 | from monitoring.geyser_event_processor import GeyserEventProcessor 14 | from trading.base import TokenInfo 15 | from utils.logger import get_logger 16 | 17 | logger = get_logger(__name__) 18 | 19 | 20 | class GeyserListener(BaseTokenListener): 21 | """Geyser listener for pump.fun token creation events.""" 22 | 23 | def __init__(self, geyser_endpoint: str, geyser_api_token: str, geyser_auth_type: str, pump_program: Pubkey): 24 | """Initialize token listener. 25 | 26 | Args: 27 | geyser_endpoint: Geyser gRPC endpoint URL 28 | geyser_api_token: API token for authentication 29 | geyser_auth_type: authentication type ('x-token' or 'basic') 30 | pump_program: Pump.fun program address 31 | """ 32 | self.geyser_endpoint = geyser_endpoint 33 | self.geyser_api_token = geyser_api_token 34 | valid_auth_types = {"x-token", "basic"} 35 | self.auth_type: str = (geyser_auth_type or "x-token").lower() 36 | if self.auth_type not in valid_auth_types: 37 | raise ValueError( 38 | f"Unsupported auth_type={self.auth_type!r}. " 39 | f"Expected one of {valid_auth_types}" 40 | ) 41 | self.pump_program = pump_program 42 | self.event_processor = GeyserEventProcessor(pump_program) 43 | 44 | async def _create_geyser_connection(self): 45 | """Establish a secure connection to the Geyser endpoint.""" 46 | if self.auth_type == "x-token": 47 | auth = grpc.metadata_call_credentials( 48 | lambda _, callback: callback((("x-token", self.geyser_api_token),), None) 49 | ) 50 | else: # Default to basic auth 51 | auth = grpc.metadata_call_credentials( 52 | lambda _, callback: callback((("authorization", f"Basic {self.geyser_api_token}"),), None) 53 | ) 54 | creds = grpc.composite_channel_credentials( 55 | grpc.ssl_channel_credentials(), auth 56 | ) 57 | channel = grpc.aio.secure_channel(self.geyser_endpoint, creds) 58 | return geyser_pb2_grpc.GeyserStub(channel), channel 59 | 60 | def _create_subscription_request(self): 61 | """Create a subscription request for Pump.fun transactions.""" 62 | request = geyser_pb2.SubscribeRequest() 63 | request.transactions["pump_filter"].account_include.append(str(self.pump_program)) 64 | request.transactions["pump_filter"].failed = False 65 | request.commitment = geyser_pb2.CommitmentLevel.PROCESSED 66 | return request 67 | 68 | async def listen_for_tokens( 69 | self, 70 | token_callback: Callable[[TokenInfo], Awaitable[None]], 71 | match_string: str | None = None, 72 | creator_address: str | None = None, 73 | ) -> None: 74 | """Listen for new token creations using Geyser subscription. 75 | 76 | Args: 77 | token_callback: Callback function for new tokens 78 | match_string: Optional string to match in token name/symbol 79 | creator_address: Optional creator address to filter by 80 | """ 81 | while True: 82 | try: 83 | stub, channel = await self._create_geyser_connection() 84 | request = self._create_subscription_request() 85 | 86 | logger.info(f"Connected to Geyser endpoint: {self.geyser_endpoint}") 87 | logger.info(f"Monitoring for transactions involving program: {self.pump_program}") 88 | 89 | try: 90 | async for update in stub.Subscribe(iter([request])): 91 | token_info = await self._process_update(update) 92 | if not token_info: 93 | continue 94 | 95 | logger.info( 96 | f"New token detected: {token_info.name} ({token_info.symbol})" 97 | ) 98 | 99 | if match_string and not ( 100 | match_string.lower() in token_info.name.lower() 101 | or match_string.lower() in token_info.symbol.lower() 102 | ): 103 | logger.info( 104 | f"Token does not match filter '{match_string}'. Skipping..." 105 | ) 106 | continue 107 | 108 | if ( 109 | creator_address 110 | and str(token_info.user) != creator_address 111 | ): 112 | logger.info( 113 | f"Token not created by {creator_address}. Skipping..." 114 | ) 115 | continue 116 | 117 | await token_callback(token_info) 118 | 119 | except grpc.aio.AioRpcError as e: 120 | logger.error(f"gRPC error: {e.details()}") 121 | await asyncio.sleep(5) 122 | 123 | finally: 124 | await channel.close() 125 | 126 | except Exception as e: 127 | logger.error(f"Geyser connection error: {e}") 128 | logger.info("Reconnecting in 10 seconds...") 129 | await asyncio.sleep(10) 130 | 131 | async def _process_update(self, update) -> TokenInfo | None: 132 | """Process a Geyser update and extract token creation info. 133 | 134 | Args: 135 | update: Geyser update from the subscription 136 | 137 | Returns: 138 | TokenInfo if a token creation is found, None otherwise 139 | """ 140 | try: 141 | if not update.HasField("transaction"): 142 | return None 143 | 144 | tx = update.transaction.transaction.transaction 145 | msg = getattr(tx, "message", None) 146 | if msg is None: 147 | return None 148 | 149 | for ix in msg.instructions: 150 | # Skip non-Pump.fun program instructions 151 | program_idx = ix.program_id_index 152 | if program_idx >= len(msg.account_keys): 153 | continue 154 | 155 | program_id = msg.account_keys[program_idx] 156 | if bytes(program_id) != bytes(self.pump_program): 157 | continue 158 | 159 | # Process instruction data 160 | token_info = self.event_processor.process_transaction_data( 161 | ix.data, ix.accounts, msg.account_keys 162 | ) 163 | if token_info: 164 | return token_info 165 | 166 | return None 167 | 168 | except Exception as e: 169 | logger.error(f"Error processing Geyser update: {e}") 170 | return None 171 | -------------------------------------------------------------------------------- /src/monitoring/logs_event_processor.py: -------------------------------------------------------------------------------- 1 | """ 2 | Event processing for pump.fun tokens using logsSubscribe data. 3 | """ 4 | 5 | import base64 6 | import struct 7 | from typing import Final 8 | 9 | import base58 10 | from solders.pubkey import Pubkey 11 | 12 | from core.pubkeys import PumpAddresses, SystemAddresses 13 | from trading.base import TokenInfo 14 | from utils.logger import get_logger 15 | 16 | logger = get_logger(__name__) 17 | 18 | 19 | class LogsEventProcessor: 20 | """Processes events from pump.fun program logs.""" 21 | 22 | # Discriminator for create instruction to avoid non-create transactions 23 | CREATE_DISCRIMINATOR: Final[int] = 8530921459188068891 24 | 25 | def __init__(self, pump_program: Pubkey): 26 | """Initialize event processor. 27 | 28 | Args: 29 | pump_program: Pump.fun program address 30 | """ 31 | self.pump_program = pump_program 32 | 33 | def process_program_logs(self, logs: list[str], signature: str) -> TokenInfo | None: 34 | """Process program logs and extract token info. 35 | 36 | Args: 37 | logs: List of log strings from the notification 38 | signature: Transaction signature 39 | 40 | Returns: 41 | TokenInfo if a token creation is found, None otherwise 42 | """ 43 | # Check if this is a token creation 44 | if not any("Program log: Instruction: Create" in log for log in logs): 45 | return None 46 | 47 | # Skip swaps as the first condition may pass them 48 | if any("Program log: Instruction: CreateTokenAccount" in log for log in logs): 49 | return None 50 | 51 | # Find and process program data 52 | for log in logs: 53 | if "Program data:" in log: 54 | try: 55 | encoded_data = log.split(": ")[1] 56 | decoded_data = base64.b64decode(encoded_data) 57 | parsed_data = self._parse_create_instruction(decoded_data) 58 | 59 | if parsed_data and "name" in parsed_data: 60 | mint = Pubkey.from_string(parsed_data["mint"]) 61 | bonding_curve = Pubkey.from_string(parsed_data["bondingCurve"]) 62 | associated_curve = self._find_associated_bonding_curve( 63 | mint, bonding_curve 64 | ) 65 | creator = Pubkey.from_string(parsed_data["creator"]) 66 | creator_vault = self._find_creator_vault(creator) 67 | 68 | return TokenInfo( 69 | name=parsed_data["name"], 70 | symbol=parsed_data["symbol"], 71 | uri=parsed_data["uri"], 72 | mint=mint, 73 | bonding_curve=bonding_curve, 74 | associated_bonding_curve=associated_curve, 75 | user=Pubkey.from_string(parsed_data["user"]), 76 | creator=creator, 77 | creator_vault=creator_vault, 78 | ) 79 | except Exception as e: 80 | logger.error(f"Failed to process log data: {e}") 81 | 82 | return None 83 | 84 | def _parse_create_instruction(self, data: bytes) -> dict | None: 85 | """Parse the create instruction data. 86 | 87 | Args: 88 | data: Raw instruction data 89 | 90 | Returns: 91 | Dictionary of parsed data or None if parsing fails 92 | """ 93 | if len(data) < 8: 94 | return None 95 | 96 | # Check for the correct instruction discriminator 97 | discriminator = struct.unpack(" Pubkey: 137 | """ 138 | Find the associated bonding curve for a given mint and bonding curve. 139 | This uses the standard ATA derivation. 140 | 141 | Args: 142 | mint: Token mint address 143 | bonding_curve: Bonding curve address 144 | 145 | Returns: 146 | Associated bonding curve address 147 | """ 148 | derived_address, _ = Pubkey.find_program_address( 149 | [ 150 | bytes(bonding_curve), 151 | bytes(SystemAddresses.TOKEN_PROGRAM), 152 | bytes(mint), 153 | ], 154 | SystemAddresses.ASSOCIATED_TOKEN_PROGRAM, 155 | ) 156 | return derived_address 157 | 158 | def _find_creator_vault(self, creator: Pubkey) -> Pubkey: 159 | """ 160 | Find the creator vault for a creator. 161 | 162 | Args: 163 | creator: Creator address 164 | 165 | Returns: 166 | Creator vault address 167 | """ 168 | derived_address, _ = Pubkey.find_program_address( 169 | [ 170 | b"creator-vault", 171 | bytes(creator) 172 | ], 173 | PumpAddresses.PROGRAM, 174 | ) 175 | return derived_address 176 | -------------------------------------------------------------------------------- /src/monitoring/logs_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | WebSocket monitoring for pump.fun tokens using logsSubscribe. 3 | """ 4 | 5 | import asyncio 6 | import json 7 | from collections.abc import Awaitable, Callable 8 | 9 | import websockets 10 | from solders.pubkey import Pubkey 11 | 12 | from monitoring.base_listener import BaseTokenListener 13 | from monitoring.logs_event_processor import LogsEventProcessor 14 | from trading.base import TokenInfo 15 | from utils.logger import get_logger 16 | 17 | logger = get_logger(__name__) 18 | 19 | 20 | class LogsListener(BaseTokenListener): 21 | """WebSocket listener for pump.fun token creation events using logsSubscribe.""" 22 | 23 | def __init__(self, wss_endpoint: str, pump_program: Pubkey): 24 | """Initialize token listener. 25 | 26 | Args: 27 | wss_endpoint: WebSocket endpoint URL 28 | pump_program: Pump.fun program address 29 | """ 30 | self.wss_endpoint = wss_endpoint 31 | self.pump_program = pump_program 32 | self.event_processor = LogsEventProcessor(pump_program) 33 | self.ping_interval = 20 # seconds 34 | 35 | async def listen_for_tokens( 36 | self, 37 | token_callback: Callable[[TokenInfo], Awaitable[None]], 38 | match_string: str | None = None, 39 | creator_address: str | None = None, 40 | ) -> None: 41 | """Listen for new token creations using logsSubscribe. 42 | 43 | Args: 44 | token_callback: Callback function for new tokens 45 | match_string: Optional string to match in token name/symbol 46 | creator_address: Optional creator address to filter by 47 | """ 48 | while True: 49 | try: 50 | async with websockets.connect(self.wss_endpoint) as websocket: 51 | await self._subscribe_to_logs(websocket) 52 | ping_task = asyncio.create_task(self._ping_loop(websocket)) 53 | 54 | try: 55 | while True: 56 | token_info = await self._wait_for_token_creation(websocket) 57 | if not token_info: 58 | continue 59 | 60 | logger.info( 61 | f"New token detected: {token_info.name} ({token_info.symbol})" 62 | ) 63 | 64 | if match_string and not ( 65 | match_string.lower() in token_info.name.lower() 66 | or match_string.lower() in token_info.symbol.lower() 67 | ): 68 | logger.info( 69 | f"Token does not match filter '{match_string}'. Skipping..." 70 | ) 71 | continue 72 | 73 | if ( 74 | creator_address 75 | and str(token_info.user) != creator_address 76 | ): 77 | logger.info( 78 | f"Token not created by {creator_address}. Skipping..." 79 | ) 80 | continue 81 | 82 | await token_callback(token_info) 83 | 84 | except websockets.exceptions.ConnectionClosed: 85 | logger.warning("WebSocket connection closed. Reconnecting...") 86 | ping_task.cancel() 87 | 88 | except Exception as e: 89 | logger.error(f"WebSocket connection error: {str(e)}") 90 | logger.info("Reconnecting in 5 seconds...") 91 | await asyncio.sleep(5) 92 | 93 | async def _subscribe_to_logs(self, websocket) -> None: 94 | """Subscribe to logs mentioning the pump.fun program. 95 | 96 | Args: 97 | websocket: Active WebSocket connection 98 | """ 99 | subscription_message = json.dumps( 100 | { 101 | "jsonrpc": "2.0", 102 | "id": 1, 103 | "method": "logsSubscribe", 104 | "params": [ 105 | {"mentions": [str(self.pump_program)]}, 106 | {"commitment": "processed"}, 107 | ], 108 | } 109 | ) 110 | 111 | await websocket.send(subscription_message) 112 | logger.info(f"Subscribed to logs mentioning program: {self.pump_program}") 113 | 114 | # Wait for subscription confirmation 115 | response = await websocket.recv() 116 | response_data = json.loads(response) 117 | if "result" in response_data: 118 | logger.info(f"Subscription confirmed with ID: {response_data['result']}") 119 | else: 120 | logger.warning(f"Unexpected subscription response: {response}") 121 | 122 | async def _ping_loop(self, websocket) -> None: 123 | """Keep connection alive with pings. 124 | 125 | Args: 126 | websocket: Active WebSocket connection 127 | """ 128 | try: 129 | while True: 130 | await asyncio.sleep(self.ping_interval) 131 | try: 132 | pong_waiter = await websocket.ping() 133 | await asyncio.wait_for(pong_waiter, timeout=10) 134 | except asyncio.TimeoutError: 135 | logger.warning("Ping timeout - server not responding") 136 | # Force reconnection 137 | await websocket.close() 138 | return 139 | except asyncio.CancelledError: 140 | pass 141 | except Exception as e: 142 | logger.error(f"Ping error: {str(e)}") 143 | 144 | async def _wait_for_token_creation(self, websocket) -> TokenInfo | None: 145 | try: 146 | response = await asyncio.wait_for(websocket.recv(), timeout=30) 147 | data = json.loads(response) 148 | 149 | if "method" not in data or data["method"] != "logsNotification": 150 | return None 151 | 152 | log_data = data["params"]["result"]["value"] 153 | logs = log_data.get("logs", []) 154 | signature = log_data.get("signature", "unknown") 155 | 156 | # Use the processor to extract token info 157 | return self.event_processor.process_program_logs(logs, signature) 158 | 159 | except asyncio.TimeoutError: 160 | logger.debug("No data received for 30 seconds") 161 | except websockets.exceptions.ConnectionClosed: 162 | logger.warning("WebSocket connection closed") 163 | raise 164 | except Exception as e: 165 | logger.error(f"Error processing WebSocket message: {str(e)}") 166 | 167 | return None 168 | -------------------------------------------------------------------------------- /src/trading/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chainstacklabs/pump-fun-bot/2338c7a5dd78787a8d98ba78445ad017da204921/src/trading/__init__.py -------------------------------------------------------------------------------- /src/trading/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base interfaces for trading operations. 3 | """ 4 | 5 | from abc import ABC, abstractmethod 6 | from dataclasses import dataclass 7 | from typing import Any 8 | 9 | from solders.pubkey import Pubkey 10 | 11 | from core.pubkeys import PumpAddresses 12 | 13 | 14 | @dataclass 15 | class TokenInfo: 16 | """Token information.""" 17 | 18 | name: str 19 | symbol: str 20 | uri: str 21 | mint: Pubkey 22 | bonding_curve: Pubkey 23 | associated_bonding_curve: Pubkey 24 | user: Pubkey 25 | creator: Pubkey 26 | creator_vault: Pubkey 27 | 28 | @classmethod 29 | def from_dict(cls, data: dict[str, Any]) -> "TokenInfo": 30 | """Create TokenInfo from dictionary. 31 | 32 | Args: 33 | data: Dictionary with token data 34 | 35 | Returns: 36 | TokenInfo instance 37 | """ 38 | return cls( 39 | name=data["name"], 40 | symbol=data["symbol"], 41 | uri=data["uri"], 42 | mint=Pubkey.from_string(data["mint"]), 43 | bonding_curve=Pubkey.from_string(data["bondingCurve"]), 44 | associated_bonding_curve=Pubkey.from_string(data["associatedBondingCurve"]), 45 | user=Pubkey.from_string(data["user"]), 46 | creator=Pubkey.from_string(data["creator"]), 47 | creator_vault=Pubkey.from_string(data["creator_vault"]), 48 | ) 49 | 50 | def to_dict(self) -> dict[str, str]: 51 | """Convert to dictionary. 52 | 53 | Returns: 54 | Dictionary representation 55 | """ 56 | return { 57 | "name": self.name, 58 | "symbol": self.symbol, 59 | "uri": self.uri, 60 | "mint": str(self.mint), 61 | "bondingCurve": str(self.bonding_curve), 62 | "associatedBondingCurve": str(self.associated_bonding_curve), 63 | "user": str(self.user), 64 | "creator": str(self.creator), 65 | "creatorVault": str(self.creator_vault), 66 | } 67 | 68 | 69 | @dataclass 70 | class TradeResult: 71 | """Result of a trading operation.""" 72 | 73 | success: bool 74 | tx_signature: str | None = None 75 | error_message: str | None = None 76 | amount: float | None = None 77 | price: float | None = None 78 | 79 | 80 | class Trader(ABC): 81 | """Base interface for trading operations.""" 82 | 83 | @abstractmethod 84 | async def execute(self, *args, **kwargs) -> TradeResult: 85 | """Execute trading operation. 86 | 87 | Returns: 88 | TradeResult with operation outcome 89 | """ 90 | pass 91 | 92 | def _get_relevant_accounts(self, token_info: TokenInfo) -> list[Pubkey]: 93 | """ 94 | Get the list of accounts relevant for calculating the priority fee. 95 | 96 | Args: 97 | token_info: Token information for the buy/sell operation. 98 | 99 | Returns: 100 | list[Pubkey]: List of relevant accounts. 101 | """ 102 | return [ 103 | token_info.mint, # Token mint address 104 | token_info.bonding_curve, # Bonding curve address 105 | PumpAddresses.PROGRAM, # Pump.fun program address 106 | PumpAddresses.FEE, # Pump.fun fee account 107 | ] 108 | -------------------------------------------------------------------------------- /src/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chainstacklabs/pump-fun-bot/2338c7a5dd78787a8d98ba78445ad017da204921/src/utils/__init__.py -------------------------------------------------------------------------------- /src/utils/logger.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging utilities for the pump.fun trading bot. 3 | """ 4 | 5 | import logging 6 | 7 | # Global dict to store loggers 8 | _loggers: dict[str, logging.Logger] = {} 9 | 10 | 11 | def get_logger(name: str, level: int = logging.INFO) -> logging.Logger: 12 | """Get or create a logger with the given name. 13 | 14 | Args: 15 | name: Logger name, typically __name__ 16 | level: Logging level 17 | 18 | Returns: 19 | Configured logger 20 | """ 21 | global _loggers 22 | 23 | if name in _loggers: 24 | return _loggers[name] 25 | 26 | logger = logging.getLogger(name) 27 | logger.setLevel(level) 28 | 29 | _loggers[name] = logger 30 | return logger 31 | 32 | 33 | def setup_file_logging( 34 | filename: str = "pump_trading.log", level: int = logging.INFO 35 | ) -> None: 36 | """Set up file logging for all loggers. 37 | 38 | Args: 39 | filename: Log file path 40 | level: Logging level for file handler 41 | """ 42 | root_logger = logging.getLogger() 43 | 44 | # Check if file handler with same filename already exists 45 | for handler in root_logger.handlers: 46 | if isinstance(handler, logging.FileHandler) and handler.baseFilename == filename: 47 | return # File handler already added 48 | 49 | formatter = logging.Formatter( 50 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s", 51 | datefmt="%Y-%m-%d %H:%M:%S", 52 | ) 53 | 54 | file_handler = logging.FileHandler(filename) 55 | file_handler.setLevel(level) 56 | file_handler.setFormatter(formatter) 57 | 58 | root_logger.addHandler(file_handler) 59 | -------------------------------------------------------------------------------- /tests/test_block_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test script for BlockListener 3 | Tests websocket monitoring for new pump.fun tokens using blockSubscribe 4 | """ 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | import sys 10 | from pathlib import Path 11 | 12 | from dotenv import load_dotenv 13 | 14 | sys.path.append(str(Path(__file__).parent.parent / "src")) 15 | 16 | from core.pubkeys import PumpAddresses 17 | from monitoring.block_listener import BlockListener 18 | from trading.base import TokenInfo 19 | 20 | load_dotenv() 21 | 22 | logging.basicConfig( 23 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 24 | ) 25 | logger = logging.getLogger("block-listener-test") 26 | 27 | 28 | class TestTokenCallback: 29 | def __init__(self): 30 | self.detected_tokens = [] 31 | 32 | async def on_token_created(self, token_info: TokenInfo) -> None: 33 | """Process detected token""" 34 | logger.info(f"New token detected: {token_info.name} ({token_info.symbol})") 35 | logger.info(f"Mint: {token_info.mint}") 36 | self.detected_tokens.append(token_info) 37 | print(f"\n{'=' * 50}") 38 | print(f"NEW TOKEN: {token_info.name}") 39 | print(f"Symbol: {token_info.symbol}") 40 | print(f"Mint: {token_info.mint}") 41 | print(f"URI: {token_info.uri}") 42 | print(f"User: {token_info.user}") 43 | print(f"Creator: {token_info.creator}") 44 | print(f"Bonding Curve: {token_info.bonding_curve}") 45 | print(f"Associated Bonding Curve: {token_info.associated_bonding_curve}") 46 | print(f"{'=' * 50}\n") 47 | 48 | 49 | async def test_block_listener( 50 | match_string: str | None = None, 51 | creator_address: str | None = None, 52 | test_duration: int = 60, 53 | ): 54 | """Test the block listener functionality""" 55 | wss_endpoint = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 56 | if not wss_endpoint: 57 | logger.error("SOLANA_NODE_WSS_ENDPOINT environment variable is not set") 58 | return [] 59 | 60 | logger.info(f"Connecting to WebSocket: {wss_endpoint}") 61 | listener = BlockListener(wss_endpoint, PumpAddresses.PROGRAM) 62 | callback = TestTokenCallback() 63 | 64 | if match_string: 65 | logger.info(f"Filtering tokens matching: {match_string}") 66 | if creator_address: 67 | logger.info(f"Filtering tokens by creator: {creator_address}") 68 | 69 | listen_task = asyncio.create_task( 70 | listener.listen_for_tokens( 71 | callback.on_token_created, 72 | match_string=match_string, 73 | creator_address=creator_address, 74 | ) 75 | ) 76 | 77 | logger.info(f"Listening for {test_duration} seconds...") 78 | try: 79 | await asyncio.sleep(test_duration) 80 | except KeyboardInterrupt: 81 | logger.info("Test interrupted by user") 82 | finally: 83 | listen_task.cancel() 84 | try: 85 | await listen_task 86 | except asyncio.CancelledError: 87 | pass 88 | 89 | logger.info(f"Detected {len(callback.detected_tokens)} tokens") 90 | for token in callback.detected_tokens: 91 | logger.info(f" - {token.name} ({token.symbol}): {token.mint}") 92 | 93 | return callback.detected_tokens 94 | 95 | 96 | if __name__ == "__main__": 97 | match_string = None # Update if you want to filter tokens by name/symbol 98 | creator_address = None # Update if you want to filter tokens by creator address 99 | test_duration = 30 100 | 101 | logger.info("Starting block listener test (using blockSubscribe)") 102 | asyncio.run(test_block_listener(match_string, creator_address, test_duration)) 103 | -------------------------------------------------------------------------------- /tests/test_geyser_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test script for GeyserListener 3 | Tests gRPC monitoring for new pump.fun tokens using Geyser 4 | """ 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | import sys 10 | from pathlib import Path 11 | 12 | from dotenv import load_dotenv 13 | 14 | sys.path.append(str(Path(__file__).parent.parent / "src")) 15 | 16 | from core.pubkeys import PumpAddresses 17 | from monitoring.geyser_listener import GeyserListener 18 | from trading.base import TokenInfo 19 | 20 | load_dotenv() 21 | 22 | logging.basicConfig( 23 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 24 | ) 25 | logger = logging.getLogger("geyser-listener-test") 26 | 27 | 28 | class TestTokenCallback: 29 | def __init__(self): 30 | self.detected_tokens = [] 31 | 32 | async def on_token_created(self, token_info: TokenInfo) -> None: 33 | """Process detected token""" 34 | logger.info(f"New token detected: {token_info.name} ({token_info.symbol})") 35 | logger.info(f"Mint: {token_info.mint}") 36 | self.detected_tokens.append(token_info) 37 | print(f"\n{'=' * 50}") 38 | print(f"NEW TOKEN: {token_info.name}") 39 | print(f"Symbol: {token_info.symbol}") 40 | print(f"Mint: {token_info.mint}") 41 | print(f"URI: {token_info.uri}") 42 | print(f"User: {token_info.user}") 43 | print(f"Creator: {token_info.creator}") 44 | print(f"Bonding Curve: {token_info.bonding_curve}") 45 | print(f"Associated Bonding Curve: {token_info.associated_bonding_curve}") 46 | print(f"{'=' * 50}\n") 47 | 48 | 49 | async def test_geyser_listener( 50 | match_string: str | None = None, 51 | creator_address: str | None = None, 52 | test_duration: int = 60, 53 | ): 54 | """Test the Geyser listener functionality""" 55 | geyser_endpoint = os.environ.get("GEYSER_ENDPOINT") 56 | geyser_api_token = os.environ.get("GEYSER_API_TOKEN") 57 | geyser_auth_type = os.environ.get("GEYSER_AUTH_TYPE", "x-token") 58 | 59 | if not geyser_endpoint: 60 | logger.error("GEYSER_ENDPOINT environment variable is not set") 61 | return [] 62 | 63 | if not geyser_api_token: 64 | logger.error("GEYSER_API_TOKEN environment variable is not set") 65 | return [] 66 | 67 | logger.info(f"Connecting to Geyser API: {geyser_endpoint}") 68 | listener = GeyserListener(geyser_endpoint, geyser_api_token, geyser_auth_type, PumpAddresses.PROGRAM) 69 | callback = TestTokenCallback() 70 | 71 | if match_string: 72 | logger.info(f"Filtering tokens matching: {match_string}") 73 | if creator_address: 74 | logger.info(f"Filtering tokens by creator: {creator_address}") 75 | 76 | listen_task = asyncio.create_task( 77 | listener.listen_for_tokens( 78 | callback.on_token_created, 79 | match_string=match_string, 80 | creator_address=creator_address, 81 | ) 82 | ) 83 | 84 | logger.info(f"Listening for {test_duration} seconds...") 85 | try: 86 | await asyncio.sleep(test_duration) 87 | except KeyboardInterrupt: 88 | logger.info("Test interrupted by user") 89 | finally: 90 | listen_task.cancel() 91 | try: 92 | await listen_task 93 | except asyncio.CancelledError: 94 | pass 95 | 96 | logger.info(f"Detected {len(callback.detected_tokens)} tokens") 97 | for token in callback.detected_tokens: 98 | logger.info(f" - {token.name} ({token.symbol}): {token.mint}") 99 | 100 | return callback.detected_tokens 101 | 102 | 103 | if __name__ == "__main__": 104 | match_string = None # Update if you want to filter tokens by name/symbol 105 | creator_address = None # Update if you want to filter tokens by creator address 106 | test_duration = 30 107 | 108 | logger.info("Starting Geyser listener test (using Geyser API)") 109 | asyncio.run(test_geyser_listener(match_string, creator_address, test_duration)) 110 | -------------------------------------------------------------------------------- /tests/test_logs_listener.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test script for LogsListener 3 | Tests websocket monitoring for new pump.fun tokens using logsSubscribe 4 | """ 5 | 6 | import asyncio 7 | import logging 8 | import os 9 | import sys 10 | from pathlib import Path 11 | 12 | from dotenv import load_dotenv 13 | 14 | sys.path.append(str(Path(__file__).parent.parent / "src")) 15 | 16 | from core.pubkeys import PumpAddresses 17 | from monitoring.logs_listener import LogsListener 18 | from trading.base import TokenInfo 19 | 20 | load_dotenv() 21 | 22 | logging.basicConfig( 23 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 24 | ) 25 | logger = logging.getLogger("logs-listener-test") 26 | 27 | 28 | class TestTokenCallback: 29 | def __init__(self): 30 | self.detected_tokens = [] 31 | 32 | async def on_token_created(self, token_info: TokenInfo) -> None: 33 | """Process detected token""" 34 | logger.info(f"New token detected: {token_info.name} ({token_info.symbol})") 35 | logger.info(f"Mint: {token_info.mint}") 36 | self.detected_tokens.append(token_info) 37 | print(f"\n{'=' * 50}") 38 | print(f"NEW TOKEN: {token_info.name}") 39 | print(f"Symbol: {token_info.symbol}") 40 | print(f"Mint: {token_info.mint}") 41 | print(f"URI: {token_info.uri}") 42 | print(f"User: {token_info.user}") 43 | print(f"Creator: {token_info.creator}") 44 | print(f"Bonding Curve: {token_info.bonding_curve}") 45 | print(f"Associated Bonding Curve: {token_info.associated_bonding_curve}") 46 | print(f"{'=' * 50}\n") 47 | 48 | 49 | async def test_logs_listener( 50 | match_string: str | None = None, 51 | creator_address: str | None = None, 52 | test_duration: int = 60, 53 | ): 54 | """Test the logs listener functionality""" 55 | wss_endpoint = os.environ.get("SOLANA_NODE_WSS_ENDPOINT") 56 | if not wss_endpoint: 57 | logger.error("SOLANA_NODE_WSS_ENDPOINT environment variable is not set") 58 | return [] 59 | 60 | logger.info(f"Connecting to WebSocket: {wss_endpoint}") 61 | listener = LogsListener(wss_endpoint, PumpAddresses.PROGRAM) 62 | callback = TestTokenCallback() 63 | 64 | if match_string: 65 | logger.info(f"Filtering tokens matching: {match_string}") 66 | if creator_address: 67 | logger.info(f"Filtering tokens by creator: {creator_address}") 68 | 69 | listen_task = asyncio.create_task( 70 | listener.listen_for_tokens( 71 | callback.on_token_created, 72 | match_string=match_string, 73 | creator_address=creator_address, 74 | ) 75 | ) 76 | 77 | logger.info(f"Listening for {test_duration} seconds...") 78 | try: 79 | await asyncio.sleep(test_duration) 80 | except KeyboardInterrupt: 81 | logger.info("Test interrupted by user") 82 | finally: 83 | listen_task.cancel() 84 | try: 85 | await listen_task 86 | except asyncio.CancelledError: 87 | pass 88 | 89 | logger.info(f"Detected {len(callback.detected_tokens)} tokens") 90 | for token in callback.detected_tokens: 91 | logger.info(f" - {token.name} ({token.symbol}): {token.mint}") 92 | 93 | return callback.detected_tokens 94 | 95 | 96 | if __name__ == "__main__": 97 | match_string = None # Update if you want to filter tokens by name/symbol 98 | creator_address = None # Update if you want to filter tokens by creator address 99 | test_duration = 30 100 | 101 | logger.info("Starting logs listener test (using logsSubscribe)") 102 | asyncio.run(test_logs_listener(match_string, creator_address, test_duration)) 103 | -------------------------------------------------------------------------------- /trades/trades.log: -------------------------------------------------------------------------------- 1 | {"timestamp": "2025-04-24T20:30:13.087092", "action": "buy", "token_address": "DWMUmRQUZPCBA1gwdDxTJuz6JHnQkREiWMyQpsKWGp9v", "symbol": "U8", "price": 5e-06, "amount": 20, "tx_hash": "3JvdfCep45PUB6rCcH4dB2NuwvFP8n67SCUxqJMt4MuN5ekHYc6J27aCUfwNUK3hh5rSyKNYAWXya5vQAT2qQivB"} 2 | {"timestamp": "2025-04-24T20:30:32.759177", "action": "sell", "token_address": "DWMUmRQUZPCBA1gwdDxTJuz6JHnQkREiWMyQpsKWGp9v", "symbol": "U8", "price": 3.805530050663904e-08, "amount": 20.0, "tx_hash": "5cveLfU7XhPNCPMCZfTXyugJpmAQNmi7zr81PSqs8DsP1T2swYFjJwaB5hNSf3kFPfRzgzd7QZBVaZLd5MqsJevB"} 3 | --------------------------------------------------------------------------------