├── .gitignore ├── src ├── lib.rs ├── cache.rs ├── config.rs ├── main.rs ├── circuit_breaker.rs ├── polymarket.rs ├── position_tracker.rs ├── kalshi.rs ├── discovery.rs ├── execution.rs └── polymarket_clob.rs ├── Cargo.toml ├── scripts └── build_sports_cache.py └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .env 3 | .env.keys 4 | kalshi_private_key.txt 5 | claude.md 6 | .DS_Store 7 | positions.json 8 | .discovery_cache.json 9 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // src/lib.rs 2 | 3 | pub mod cache; 4 | pub mod circuit_breaker; 5 | pub mod config; 6 | pub mod discovery; 7 | pub mod execution; 8 | pub mod kalshi; 9 | pub mod polymarket; 10 | pub mod polymarket_clob; 11 | pub mod position_tracker; 12 | pub mod types; -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "arb-bot" 3 | version = "2.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow = "1.0" 8 | base64 = "0.22" 9 | chrono = "0.4" 10 | dotenvy = "0.15" 11 | ethers = { version = "2.0", features = ["legacy"] } 12 | futures-util = "0.3" 13 | hmac = "0.12" 14 | rand = "0.8" 15 | reqwest = { version = "0.11", features = ["json", "blocking"] } 16 | rsa = { version = "0.9", features = ["sha2"] } 17 | pkcs1 = { version = "0.7", features = ["pem"] } 18 | serde = { version = "1.0", features = ["derive", "rc"] } 19 | serde_json = "1.0" 20 | sha2 = "0.10" 21 | tokio = { version = "1.0", features = ["full"] } 22 | tokio-tungstenite = { version = "0.21", features = ["native-tls"] } 23 | tracing = "0.1" 24 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 25 | rustc-hash = "2.0" 26 | tiny-keccak = { version = "2.0", features = ["keccak"] } 27 | governor = "0.6" 28 | nonzero_ext = "0.3" 29 | arrayvec = "0.7" 30 | wide = "0.7" 31 | 32 | [dev-dependencies] 33 | criterion = { version = "0.5", features = ["html_reports"] } 34 | hex = "0.4" 35 | 36 | [profile.release] 37 | opt-level = 3 38 | lto = true 39 | codegen-units = 1 -------------------------------------------------------------------------------- /scripts/build_sports_cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Sports neg_risk cache warmer (async version). 4 | """ 5 | 6 | import asyncio 7 | import json 8 | from pathlib import Path 9 | import aiohttp 10 | 11 | GAMMA_API = "https://gamma-api.polymarket.com" 12 | GAMES_TAG = "100639" 13 | CACHE_FILE = ".clob_market_cache.json" 14 | CONCURRENT_REQUESTS = 5 # tune based on rate limits 15 | 16 | 17 | async def fetch_page(session: aiohttp.ClientSession, offset: int) -> list: 18 | async with session.get( 19 | f"{GAMMA_API}/events", 20 | params={"tag_id": GAMES_TAG, "active": "true", "closed": "false", "limit": 100, "offset": offset}, 21 | timeout=aiohttp.ClientTimeout(total=30) 22 | ) as resp: 23 | return await resp.json() 24 | 25 | 26 | async def main(): 27 | neg_risk_map = {} 28 | 29 | async with aiohttp.ClientSession() as session: 30 | # First request to estimate total pages 31 | first_page = await fetch_page(session, 0) 32 | if not first_page: 33 | return 34 | 35 | # Process first page 36 | all_events = first_page 37 | 38 | # Fetch remaining pages concurrently 39 | if len(first_page) == 100: 40 | # Optimistically fetch next several pages in parallel 41 | offsets = list(range(100, 2000, 100)) # adjust max based on typical event count 42 | 43 | sem = asyncio.Semaphore(CONCURRENT_REQUESTS) 44 | 45 | async def bounded_fetch(offset): 46 | async with sem: 47 | return await fetch_page(session, offset) 48 | 49 | tasks = [bounded_fetch(o) for o in offsets] 50 | results = await asyncio.gather(*tasks, return_exceptions=True) 51 | 52 | for result in results: 53 | if isinstance(result, list) and result: 54 | all_events.extend(result) 55 | elif isinstance(result, list) and not result: 56 | break # empty page, we're done 57 | 58 | # Process all events 59 | for event in all_events: 60 | neg_risk = event.get("negRisk", False) 61 | for market in event.get("markets", []): 62 | token_ids = json.loads(market.get("clobTokenIds", "[]")) 63 | for tid in token_ids: 64 | if tid: 65 | neg_risk_map[tid] = neg_risk 66 | 67 | Path(CACHE_FILE).write_text(json.dumps(neg_risk_map)) 68 | 69 | print(f"Cached {len(neg_risk_map)} tokens") 70 | 71 | 72 | if __name__ == "__main__": 73 | asyncio.run(main()) -------------------------------------------------------------------------------- /src/cache.rs: -------------------------------------------------------------------------------- 1 | // src/cache.rs 2 | // Team code mapping cache - maps Polymarket codes to Kalshi codes 3 | 4 | use anyhow::Result; 5 | use serde::{Deserialize, Serialize}; 6 | use std::collections::HashMap; 7 | use std::path::Path; 8 | 9 | const CACHE_FILE: &str = "kalshi_team_cache.json"; 10 | 11 | /// Team code cache - bidirectional mapping between Poly and Kalshi team codes 12 | #[derive(Debug, Clone, Serialize, Deserialize, Default)] 13 | pub struct TeamCache { 14 | /// Forward: "league:poly_code" -> "kalshi_code" 15 | #[serde(serialize_with = "serialize_boxed_map", deserialize_with = "deserialize_boxed_map")] 16 | forward: HashMap, Box>, 17 | /// Reverse: "league:kalshi_code" -> "poly_code" 18 | #[serde(skip)] 19 | reverse: HashMap, Box>, 20 | } 21 | 22 | fn serialize_boxed_map(map: &HashMap, Box>, serializer: S) -> Result 23 | where 24 | S: serde::Serializer, 25 | { 26 | use serde::ser::SerializeMap; 27 | let mut ser_map = serializer.serialize_map(Some(map.len()))?; 28 | for (k, v) in map { 29 | ser_map.serialize_entry(k.as_ref(), v.as_ref())?; 30 | } 31 | ser_map.end() 32 | } 33 | 34 | fn deserialize_boxed_map<'de, D>(deserializer: D) -> Result, Box>, D::Error> 35 | where 36 | D: serde::Deserializer<'de>, 37 | { 38 | let string_map: HashMap = HashMap::deserialize(deserializer)?; 39 | Ok(string_map 40 | .into_iter() 41 | .map(|(k, v)| (k.into_boxed_str(), v.into_boxed_str())) 42 | .collect()) 43 | } 44 | 45 | #[allow(dead_code)] 46 | impl TeamCache { 47 | /// Load cache from JSON file 48 | pub fn load() -> Self { 49 | Self::load_from(CACHE_FILE) 50 | } 51 | 52 | /// Load from specific path 53 | pub fn load_from>(path: P) -> Self { 54 | let mut cache = match std::fs::read_to_string(path.as_ref()) { 55 | Ok(contents) => { 56 | serde_json::from_str(&contents).unwrap_or_else(|e| { 57 | tracing::warn!("Failed to parse team cache: {}", e); 58 | Self::default() 59 | }) 60 | } 61 | Err(_) => { 62 | tracing::info!("No team cache found at {:?}, starting empty", path.as_ref()); 63 | Self::default() 64 | } 65 | }; 66 | cache.rebuild_reverse(); 67 | cache 68 | } 69 | 70 | /// Save cache to JSON file 71 | pub fn save(&self) -> Result<()> { 72 | self.save_to(CACHE_FILE) 73 | } 74 | 75 | /// Save to specific path 76 | pub fn save_to>(&self, path: P) -> Result<()> { 77 | let json = serde_json::to_string_pretty(&self)?; 78 | std::fs::write(path, json)?; 79 | Ok(()) 80 | } 81 | 82 | /// Get Kalshi code for a Polymarket team code 83 | /// e.g., ("epl", "che") -> "cfc" 84 | pub fn poly_to_kalshi(&self, league: &str, poly_code: &str) -> Option { 85 | let mut key_buf = String::with_capacity(league.len() + 1 + poly_code.len()); 86 | key_buf.push_str(&league.to_ascii_lowercase()); 87 | key_buf.push(':'); 88 | key_buf.push_str(&poly_code.to_ascii_lowercase()); 89 | self.forward.get(key_buf.as_str()).map(|s| s.to_string()) 90 | } 91 | 92 | /// Get Polymarket code for a Kalshi team code (reverse lookup) 93 | /// e.g., ("epl", "cfc") -> "che" 94 | pub fn kalshi_to_poly(&self, league: &str, kalshi_code: &str) -> Option { 95 | let mut key_buf = String::with_capacity(league.len() + 1 + kalshi_code.len()); 96 | key_buf.push_str(&league.to_ascii_lowercase()); 97 | key_buf.push(':'); 98 | key_buf.push_str(&kalshi_code.to_ascii_lowercase()); 99 | 100 | self.reverse 101 | .get(key_buf.as_str()) 102 | .map(|s| s.to_string()) 103 | .or_else(|| Some(kalshi_code.to_ascii_lowercase())) 104 | } 105 | 106 | /// Add or update a mapping 107 | pub fn insert(&mut self, league: &str, poly_code: &str, kalshi_code: &str) { 108 | let league_lower = league.to_ascii_lowercase(); 109 | let poly_lower = poly_code.to_ascii_lowercase(); 110 | let kalshi_lower = kalshi_code.to_ascii_lowercase(); 111 | 112 | let forward_key: Box = format!("{}:{}", league_lower, poly_lower).into(); 113 | let reverse_key: Box = format!("{}:{}", league_lower, kalshi_lower).into(); 114 | 115 | self.forward.insert(forward_key, kalshi_lower.into()); 116 | self.reverse.insert(reverse_key, poly_lower.into()); 117 | } 118 | 119 | /// Number of mappings 120 | pub fn len(&self) -> usize { 121 | self.forward.len() 122 | } 123 | 124 | pub fn is_empty(&self) -> bool { 125 | self.forward.is_empty() 126 | } 127 | 128 | /// Rebuild reverse lookup map from forward mappings 129 | fn rebuild_reverse(&mut self) { 130 | self.reverse.clear(); 131 | self.reverse.reserve(self.forward.len()); 132 | for (key, kalshi_code) in &self.forward { 133 | if let Some((league, poly)) = key.split_once(':') { 134 | let reverse_key: Box = format!("{}:{}", league, kalshi_code).into(); 135 | self.reverse.insert(reverse_key, poly.into()); 136 | } 137 | } 138 | } 139 | } 140 | 141 | #[cfg(test)] 142 | mod tests { 143 | use super::*; 144 | 145 | #[test] 146 | fn test_cache_lookup() { 147 | let mut cache = TeamCache::default(); 148 | cache.insert("epl", "che", "cfc"); 149 | cache.insert("epl", "mun", "mun"); 150 | 151 | assert_eq!(cache.poly_to_kalshi("epl", "che"), Some("cfc".to_string())); 152 | assert_eq!(cache.poly_to_kalshi("epl", "CHE"), Some("cfc".to_string())); 153 | assert_eq!(cache.kalshi_to_poly("epl", "cfc"), Some("che".to_string())); 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | // src/config.rs 2 | // Configuration constants and league mappings 3 | 4 | /// Kalshi WebSocket URL 5 | pub const KALSHI_WS_URL: &str = "wss://api.elections.kalshi.com/trade-api/ws/v2"; 6 | 7 | /// Kalshi REST API base URL 8 | pub const KALSHI_API_BASE: &str = "https://api.elections.kalshi.com/trade-api/v2"; 9 | 10 | /// Polymarket WebSocket URL 11 | pub const POLYMARKET_WS_URL: &str = "wss://ws-subscriptions-clob.polymarket.com/ws/market"; 12 | 13 | /// Gamma API base URL (Polymarket market data) 14 | pub const GAMMA_API_BASE: &str = "https://gamma-api.polymarket.com"; 15 | 16 | /// Arb threshold: alert when total cost < this (e.g., 0.995 = 0.5% profit) 17 | pub const ARB_THRESHOLD: f64 = 0.995; 18 | 19 | /// Polymarket ping interval (seconds) - keep connection alive 20 | pub const POLY_PING_INTERVAL_SECS: u64 = 30; 21 | 22 | /// Kalshi API rate limit delay (milliseconds between requests) 23 | /// Kalshi limit: 20 req/sec = 50ms minimum. We use 60ms for safety margin. 24 | pub const KALSHI_API_DELAY_MS: u64 = 60; 25 | 26 | /// WebSocket reconnect delay (seconds) 27 | pub const WS_RECONNECT_DELAY_SECS: u64 = 5; 28 | 29 | /// Which leagues to monitor (empty slice = all) 30 | pub const ENABLED_LEAGUES: &[&str] = &[]; 31 | 32 | /// Price logging enabled (set PRICE_LOGGING=1 to enable) 33 | #[allow(dead_code)] 34 | pub fn price_logging_enabled() -> bool { 35 | static CACHED: std::sync::OnceLock = std::sync::OnceLock::new(); 36 | *CACHED.get_or_init(|| { 37 | std::env::var("PRICE_LOGGING") 38 | .map(|v| v == "1" || v.to_lowercase() == "true") 39 | .unwrap_or(false) 40 | }) 41 | } 42 | 43 | /// League configuration for market discovery 44 | #[derive(Debug, Clone)] 45 | pub struct LeagueConfig { 46 | pub league_code: &'static str, 47 | pub poly_prefix: &'static str, 48 | pub kalshi_series_game: &'static str, 49 | pub kalshi_series_spread: Option<&'static str>, 50 | pub kalshi_series_total: Option<&'static str>, 51 | pub kalshi_series_btts: Option<&'static str>, 52 | } 53 | 54 | /// Get all supported leagues with their configurations 55 | pub fn get_league_configs() -> Vec { 56 | vec![ 57 | // Major European leagues (full market types) 58 | LeagueConfig { 59 | league_code: "epl", 60 | poly_prefix: "epl", 61 | kalshi_series_game: "KXEPLGAME", 62 | kalshi_series_spread: Some("KXEPLSPREAD"), 63 | kalshi_series_total: Some("KXEPLTOTAL"), 64 | kalshi_series_btts: Some("KXEPLBTTS"), 65 | }, 66 | LeagueConfig { 67 | league_code: "bundesliga", 68 | poly_prefix: "bun", 69 | kalshi_series_game: "KXBUNDESLIGAGAME", 70 | kalshi_series_spread: Some("KXBUNDESLIGASPREAD"), 71 | kalshi_series_total: Some("KXBUNDESLIGATOTAL"), 72 | kalshi_series_btts: Some("KXBUNDESLIGABTTS"), 73 | }, 74 | LeagueConfig { 75 | league_code: "laliga", 76 | poly_prefix: "lal", 77 | kalshi_series_game: "KXLALIGAGAME", 78 | kalshi_series_spread: Some("KXLALIGASPREAD"), 79 | kalshi_series_total: Some("KXLALIGATOTAL"), 80 | kalshi_series_btts: Some("KXLALIGABTTS"), 81 | }, 82 | LeagueConfig { 83 | league_code: "seriea", 84 | poly_prefix: "sea", 85 | kalshi_series_game: "KXSERIEAGAME", 86 | kalshi_series_spread: Some("KXSERIEASPREAD"), 87 | kalshi_series_total: Some("KXSERIEATOTAL"), 88 | kalshi_series_btts: Some("KXSERIEABTTS"), 89 | }, 90 | LeagueConfig { 91 | league_code: "ligue1", 92 | poly_prefix: "fl1", 93 | kalshi_series_game: "KXLIGUE1GAME", 94 | kalshi_series_spread: Some("KXLIGUE1SPREAD"), 95 | kalshi_series_total: Some("KXLIGUE1TOTAL"), 96 | kalshi_series_btts: Some("KXLIGUE1BTTS"), 97 | }, 98 | LeagueConfig { 99 | league_code: "ucl", 100 | poly_prefix: "ucl", 101 | kalshi_series_game: "KXUCLGAME", 102 | kalshi_series_spread: Some("KXUCLSPREAD"), 103 | kalshi_series_total: Some("KXUCLTOTAL"), 104 | kalshi_series_btts: Some("KXUCLBTTS"), 105 | }, 106 | // Secondary European leagues (moneyline only) 107 | LeagueConfig { 108 | league_code: "uel", 109 | poly_prefix: "uel", 110 | kalshi_series_game: "KXUELGAME", 111 | kalshi_series_spread: None, 112 | kalshi_series_total: None, 113 | kalshi_series_btts: None, 114 | }, 115 | LeagueConfig { 116 | league_code: "eflc", 117 | poly_prefix: "elc", 118 | kalshi_series_game: "KXEFLCHAMPIONSHIPGAME", 119 | kalshi_series_spread: None, 120 | kalshi_series_total: None, 121 | kalshi_series_btts: None, 122 | }, 123 | // US Sports 124 | LeagueConfig { 125 | league_code: "nba", 126 | poly_prefix: "nba", 127 | kalshi_series_game: "KXNBAGAME", 128 | kalshi_series_spread: Some("KXNBASPREAD"), 129 | kalshi_series_total: Some("KXNBATOTAL"), 130 | kalshi_series_btts: None, 131 | }, 132 | LeagueConfig { 133 | league_code: "nfl", 134 | poly_prefix: "nfl", 135 | kalshi_series_game: "KXNFLGAME", 136 | kalshi_series_spread: Some("KXNFLSPREAD"), 137 | kalshi_series_total: Some("KXNFLTOTAL"), 138 | kalshi_series_btts: None, 139 | }, 140 | LeagueConfig { 141 | league_code: "nhl", 142 | poly_prefix: "nhl", 143 | kalshi_series_game: "KXNHLGAME", 144 | kalshi_series_spread: Some("KXNHLSPREAD"), 145 | kalshi_series_total: Some("KXNHLTOTAL"), 146 | kalshi_series_btts: None, 147 | }, 148 | LeagueConfig { 149 | league_code: "mlb", 150 | poly_prefix: "mlb", 151 | kalshi_series_game: "KXMLBGAME", 152 | kalshi_series_spread: Some("KXMLBSPREAD"), 153 | kalshi_series_total: Some("KXMLBTOTAL"), 154 | kalshi_series_btts: None, 155 | }, 156 | LeagueConfig { 157 | league_code: "mls", 158 | poly_prefix: "mls", 159 | kalshi_series_game: "KXMLSGAME", 160 | kalshi_series_spread: None, 161 | kalshi_series_total: None, 162 | kalshi_series_btts: None, 163 | }, 164 | LeagueConfig { 165 | league_code: "ncaaf", 166 | poly_prefix: "cfb", 167 | kalshi_series_game: "KXNCAAFGAME", 168 | kalshi_series_spread: Some("KXNCAAFSPREAD"), 169 | kalshi_series_total: Some("KXNCAAFTOTAL"), 170 | kalshi_series_btts: None, 171 | }, 172 | ] 173 | } 174 | 175 | /// Get config for a specific league 176 | pub fn get_league_config(league: &str) -> Option { 177 | get_league_configs() 178 | .into_iter() 179 | .find(|c| c.league_code == league || c.poly_prefix == league) 180 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Polymarket-Kalshi Arbitrage Bot 2 | 3 | A arbitrage system for cross-platform prediction market trading between Kalshi and Polymarket. 4 | 5 | ## Quick Start 6 | 7 | ### 1. Install Dependencies 8 | 9 | ```bash 10 | # Rust 1.75+ 11 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh 12 | 13 | # Build 14 | cd e_poly_kalshi_arb 15 | cargo build --release 16 | ``` 17 | 18 | ### 2. Set Up Credentials 19 | 20 | Create a `.env` file: 21 | 22 | ```bash 23 | # === KALSHI CREDENTIALS === 24 | KALSHI_API_KEY_ID=your_kalshi_api_key_id 25 | KALSHI_PRIVATE_KEY_PATH=/path/to/kalshi_private_key.pem 26 | 27 | # === POLYMARKET CREDENTIALS === 28 | POLY_PRIVATE_KEY=0xYOUR_WALLET_PRIVATE_KEY 29 | POLY_FUNDER=0xYOUR_WALLET_ADDRESS 30 | 31 | # === BOT CONFIGURATION === 32 | DRY_RUN=1 33 | RUST_LOG=info 34 | ``` 35 | 36 | ### 3. Run 37 | 38 | ```bash 39 | # Dry run (paper trading) 40 | dotenvx run -- cargo run --release 41 | 42 | # Live execution 43 | DRY_RUN=0 dotenvx run -- cargo run --release 44 | ``` 45 | 46 | --- 47 | 48 | ## Environment Variables 49 | 50 | ### Required 51 | 52 | | Variable | Description | 53 | | ------------------------- | ----------------------------------------------------------- | 54 | | `KALSHI_API_KEY_ID` | Your Kalshi API key ID | 55 | | `KALSHI_PRIVATE_KEY_PATH` | Path to RSA private key (PEM format) for Kalshi API signing | 56 | | `POLY_PRIVATE_KEY` | Ethereum private key (with 0x prefix) for Polymarket wallet | 57 | | `POLY_FUNDER` | Your Polymarket wallet address (with 0x prefix) | 58 | 59 | ### Bot Configuration 60 | 61 | | Variable | Default | Description | 62 | | ----------------- | ------- | ----------------------------------------------------- | 63 | | `DRY_RUN` | `1` | `1` = paper trading (no orders), `0` = live execution | 64 | | `RUST_LOG` | `info` | Log level: `error`, `warn`, `info`, `debug`, `trace` | 65 | | `FORCE_DISCOVERY` | `0` | `1` = re-fetch market mappings (ignore cache) | 66 | | `PRICE_LOGGING` | `0` | `1` = verbose price update logging | 67 | 68 | ### Test Mode 69 | 70 | | Variable | Default | Description | 71 | | --------------- | -------------------- | ---------------------------------------------------------------------------------------------- | 72 | | `TEST_ARB` | `0` | `1` = inject synthetic arb opportunity for testing | 73 | | `TEST_ARB_TYPE` | `poly_yes_kalshi_no` | Arb type: `poly_yes_kalshi_no`, `kalshi_yes_poly_no`, `poly_same_market`, `kalshi_same_market` | 74 | 75 | ### Circuit Breaker 76 | 77 | | Variable | Default | Description | 78 | | ---------------------------- | ------- | ------------------------------------------- | 79 | | `CB_ENABLED` | `true` | Enable/disable circuit breaker | 80 | | `CB_MAX_POSITION_PER_MARKET` | `100` | Max contracts per market | 81 | | `CB_MAX_TOTAL_POSITION` | `500` | Max total contracts across all markets | 82 | | `CB_MAX_DAILY_LOSS` | `5000` | Max daily loss in cents before halt | 83 | | `CB_MAX_CONSECUTIVE_ERRORS` | `5` | Consecutive errors before halt | 84 | | `CB_COOLDOWN_SECS` | `60` | Cooldown period after circuit breaker trips | 85 | 86 | --- 87 | 88 | ## Obtaining Credentials 89 | 90 | ### Kalshi 91 | 92 | 1. Log in to [Kalshi](https://kalshi.com) 93 | 2. Go to **Settings → API Keys** 94 | 3. Create a new API key with trading permissions 95 | 4. Download the private key (PEM file) 96 | 5. Note the API Key ID 97 | 98 | ### Polymarket 99 | 100 | 1. Create or import an Ethereum wallet (MetaMask, etc.) 101 | 2. Export the private key (include `0x` prefix) 102 | 3. Fund your wallet on Polygon network with USDC 103 | 4. The wallet address is your `POLY_FUNDER` 104 | 105 | --- 106 | 107 | ## Usage Examples 108 | 109 | ### Paper Trading (Development) 110 | 111 | ```bash 112 | # Full logging, dry run 113 | RUST_LOG=debug DRY_RUN=1 dotenvx run -- cargo run --release 114 | ``` 115 | 116 | ### Test Arbitrage Execution 117 | 118 | ```bash 119 | # Inject synthetic arb to test execution path 120 | TEST_ARB=1 DRY_RUN=0 dotenvx run -- cargo run --release 121 | ``` 122 | 123 | ### Production 124 | 125 | ```bash 126 | # Live trading with circuit breaker 127 | DRY_RUN=0 CB_MAX_DAILY_LOSS=10000 dotenvx run -- cargo run --release 128 | ``` 129 | 130 | ### Force Market Re-Discovery 131 | 132 | ```bash 133 | # Clear cache and re-fetch all market mappings 134 | FORCE_DISCOVERY=1 dotenvx run -- cargo run --release 135 | ``` 136 | 137 | --- 138 | 139 | ## How It Works 140 | 141 | ### Arbitrage Mechanics 142 | 143 | In prediction markets, YES + NO = $1.00 guaranteed. 144 | 145 | **Arbitrage exists when:** 146 | 147 | ``` 148 | Best YES ask (platform A) + Best NO ask (platform B) < $1.00 149 | ``` 150 | 151 | **Example:** 152 | 153 | ``` 154 | Kalshi YES ask: 42¢ 155 | Poly NO ask: 56¢ 156 | Total cost: 98¢ 157 | Guaranteed: 100¢ 158 | Profit: 2¢ per contract 159 | ``` 160 | 161 | ### Four Arbitrage Types 162 | 163 | | Type | Buy | Sell | 164 | | -------------------- | ------------------- | ------------- | 165 | | `poly_yes_kalshi_no` | Polymarket YES | Kalshi NO | 166 | | `kalshi_yes_poly_no` | Kalshi YES | Polymarket NO | 167 | | `poly_same_market` | Polymarket YES + NO | (rare) | 168 | | `kalshi_same_market` | Kalshi YES + NO | (rare) | 169 | 170 | ### Fee Handling 171 | 172 | - **Kalshi**: `ceil(0.07 × contracts × price × (1-price))` - factored into arb detection 173 | - **Polymarket**: Zero trading fees 174 | 175 | --- 176 | 177 | ## Architecture 178 | 179 | ``` 180 | src/ 181 | ├── main.rs # Entry point, WebSocket orchestration 182 | ├── types.rs # MarketArbState 183 | ├── execution.rs # Concurrent leg execution, in-flight deduplication 184 | ├── position_tracker.rs # Channel-based fill recording, P&L tracking 185 | ├── circuit_breaker.rs # Risk limits, error tracking, auto-halt 186 | ├── discovery.rs # Kalshi↔Polymarket market matching 187 | ├── cache.rs # Team code mappings (EPL, NBA, etc.) 188 | ├── kalshi.rs # Kalshi REST/WS client 189 | ├── polymarket.rs # Polymarket WS client 190 | ├── polymarket_clob.rs # Polymarket CLOB order execution 191 | └── config.rs # League configs, thresholds 192 | ``` 193 | 194 | --- 195 | 196 | ## Development 197 | 198 | ### Run Tests 199 | 200 | ```bash 201 | cargo test 202 | ``` 203 | 204 | ### Enable Profiling 205 | 206 | ```bash 207 | cargo build --release --features profiling 208 | ``` 209 | 210 | ### Benchmarks 211 | 212 | ```bash 213 | cargo bench 214 | ``` 215 | 216 | --- 217 | 218 | ## Project Status 219 | 220 | - [x] Kalshi REST/WebSocket client 221 | - [x] Polymarket REST/WebSocket client 222 | - [x] Lock-free orderbook cache 223 | - [x] SIMD arb detection 224 | - [x] Concurrent order execution 225 | - [x] Position & P&L tracking 226 | - [x] Circuit breaker 227 | - [x] Market discovery & caching 228 | - [ ] Risk limit configuration UI 229 | - [ ] Multi-account support 230 | 231 | # poly-kalshi-arb 232 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | //! Polymarket-Kalshi Arbitrage Bot v2.0 2 | //! 3 | //! Strategy: BUY YES on Platform A + BUY NO on Platform B 4 | //! Arb exists when: YES_ask + NO_ask < $1.00 5 | 6 | mod cache; 7 | mod circuit_breaker; 8 | mod config; 9 | mod discovery; 10 | mod execution; 11 | mod kalshi; 12 | mod polymarket; 13 | mod polymarket_clob; 14 | mod position_tracker; 15 | mod types; 16 | 17 | use anyhow::{Context, Result}; 18 | use std::sync::Arc; 19 | use tokio::sync::RwLock; 20 | use tracing::{error, info, warn}; 21 | 22 | use cache::TeamCache; 23 | use circuit_breaker::{CircuitBreaker, CircuitBreakerConfig}; 24 | use config::{ARB_THRESHOLD, ENABLED_LEAGUES, WS_RECONNECT_DELAY_SECS}; 25 | use discovery::DiscoveryClient; 26 | use execution::{ExecutionEngine, create_execution_channel, run_execution_loop}; 27 | use kalshi::{KalshiConfig, KalshiApiClient}; 28 | use polymarket_clob::{PolymarketAsyncClient, PreparedCreds, SharedAsyncClient}; 29 | use position_tracker::{PositionTracker, create_position_channel, position_writer_loop}; 30 | use types::{GlobalState, PriceCents}; 31 | 32 | /// Polymarket CLOB API host 33 | const POLY_CLOB_HOST: &str = "https://clob.polymarket.com"; 34 | /// Polygon chain ID 35 | const POLYGON_CHAIN_ID: u64 = 137; 36 | 37 | #[tokio::main] 38 | async fn main() -> Result<()> { 39 | // Initialize logging 40 | tracing_subscriber::fmt() 41 | .with_env_filter( 42 | tracing_subscriber::EnvFilter::from_default_env() 43 | .add_directive("arb_bot=info".parse().unwrap()), 44 | ) 45 | .init(); 46 | 47 | info!("🎯 Arb Bot v2.0"); 48 | info!(" Threshold: <{:.1}¢ for {:.1}% profit", 49 | ARB_THRESHOLD * 100.0, (1.0 - ARB_THRESHOLD) * 100.0); 50 | info!(" Leagues: {:?}", ENABLED_LEAGUES); 51 | 52 | // Check for dry run mode 53 | let dry_run = std::env::var("DRY_RUN").map(|v| v == "1" || v == "true").unwrap_or(true); 54 | if dry_run { 55 | info!(" Mode: DRY RUN (set DRY_RUN=0 to execute)"); 56 | } else { 57 | warn!(" Mode: LIVE EXECUTION"); 58 | } 59 | 60 | // Load Kalshi credentials 61 | let kalshi_config = KalshiConfig::from_env()?; 62 | info!("[KALSHI] API key loaded"); 63 | 64 | // Load Polymarket credentials 65 | dotenvy::dotenv().ok(); 66 | let poly_private_key = std::env::var("POLY_PRIVATE_KEY") 67 | .context("POLY_PRIVATE_KEY not set")?; 68 | let poly_funder = std::env::var("POLY_FUNDER") 69 | .context("POLY_FUNDER not set (your wallet address)")?; 70 | 71 | // Create async Polymarket client and derive API credentials 72 | info!("[POLYMARKET] Creating async client and deriving API credentials..."); 73 | let poly_async_client = PolymarketAsyncClient::new( 74 | POLY_CLOB_HOST, 75 | POLYGON_CHAIN_ID, 76 | &poly_private_key, 77 | &poly_funder, 78 | )?; 79 | let api_creds = poly_async_client.derive_api_key(0).await?; 80 | let prepared_creds = PreparedCreds::from_api_creds(&api_creds)?; 81 | let poly_async = Arc::new(SharedAsyncClient::new(poly_async_client, prepared_creds, POLYGON_CHAIN_ID)); 82 | 83 | // Load neg_risk cache from Python script output 84 | match poly_async.load_cache(".clob_market_cache.json") { 85 | Ok(count) => info!("[POLYMARKET] Loaded {} neg_risk entries from cache", count), 86 | Err(e) => warn!("[POLYMARKET] Could not load neg_risk cache: {}", e), 87 | } 88 | 89 | info!("[POLYMARKET] Client ready for {}", &poly_funder[..10]); 90 | 91 | // Load team cache 92 | let team_cache = TeamCache::load(); 93 | info!("📂 Loaded {} team mappings", team_cache.len()); 94 | 95 | // Create Kalshi API client 96 | let kalshi_api = Arc::new(KalshiApiClient::new(kalshi_config)); 97 | 98 | // Run discovery (with caching support) 99 | let force_discovery = std::env::var("FORCE_DISCOVERY") 100 | .map(|v| v == "1" || v == "true") 101 | .unwrap_or(false); 102 | 103 | info!("🔍 Discovering markets{}...", 104 | if force_discovery { " (forced refresh)" } else { "" }); 105 | 106 | let discovery = DiscoveryClient::new( 107 | KalshiApiClient::new(KalshiConfig::from_env()?), 108 | team_cache 109 | ); 110 | 111 | let result = if force_discovery { 112 | discovery.discover_all_force(ENABLED_LEAGUES).await 113 | } else { 114 | discovery.discover_all(ENABLED_LEAGUES).await 115 | }; 116 | 117 | info!("📊 Discovery complete:"); 118 | info!(" - Market pairs found: {}", result.pairs.len()); 119 | 120 | if !result.errors.is_empty() { 121 | for err in &result.errors { 122 | warn!(" ⚠️ {}", err); 123 | } 124 | } 125 | 126 | if result.pairs.is_empty() { 127 | error!("No market pairs found!"); 128 | return Ok(()); 129 | } 130 | 131 | // Print discovered pairs 132 | info!("📋 Matched markets:"); 133 | for pair in &result.pairs { 134 | info!(" ✅ {} | {} | K:{}", 135 | pair.description, 136 | pair.market_type, 137 | pair.kalshi_market_ticker); 138 | } 139 | 140 | // Build global state 141 | let state = Arc::new({ 142 | let mut s = GlobalState::new(); 143 | for pair in result.pairs { 144 | s.add_pair(pair); 145 | } 146 | info!("📡 State: Tracking {} markets", s.market_count()); 147 | s 148 | }); 149 | 150 | // Create execution infrastructure 151 | let (exec_tx, exec_rx) = create_execution_channel(); 152 | let circuit_breaker = Arc::new(CircuitBreaker::new(CircuitBreakerConfig::from_env())); 153 | 154 | let position_tracker = Arc::new(RwLock::new(PositionTracker::new())); 155 | let (position_channel, position_rx) = create_position_channel(); 156 | 157 | tokio::spawn(position_writer_loop(position_rx, position_tracker)); 158 | 159 | let threshold_cents: PriceCents = ((ARB_THRESHOLD * 100.0).round() as u16).max(1); 160 | info!(" Threshold: {} cents", threshold_cents); 161 | 162 | let engine = Arc::new(ExecutionEngine::new( 163 | kalshi_api.clone(), 164 | poly_async, 165 | state.clone(), 166 | circuit_breaker.clone(), 167 | position_channel, 168 | dry_run, 169 | )); 170 | 171 | let exec_handle = tokio::spawn(run_execution_loop(exec_rx, engine)); 172 | 173 | // === TEST MODE: Inject fake arb after delay === 174 | // TEST_ARB=1 to enable, TEST_ARB_TYPE=poly_yes_kalshi_no|kalshi_yes_poly_no|poly_only|kalshi_only 175 | let test_arb = std::env::var("TEST_ARB").map(|v| v == "1" || v == "true").unwrap_or(false); 176 | if test_arb { 177 | let test_state = state.clone(); 178 | let test_exec_tx = exec_tx.clone(); 179 | let test_dry_run = dry_run; 180 | 181 | // Parse arb type from environment (default: poly_yes_kalshi_no) 182 | let arb_type_str = std::env::var("TEST_ARB_TYPE").unwrap_or_else(|_| "poly_yes_kalshi_no".to_string()); 183 | 184 | tokio::spawn(async move { 185 | use types::{FastExecutionRequest, ArbType}; 186 | 187 | // Wait for WebSockets to connect and populate some prices 188 | info!("[TEST] Will inject fake arb in 10 seconds..."); 189 | tokio::time::sleep(tokio::time::Duration::from_secs(10)).await; 190 | 191 | // Parse arb type 192 | let arb_type = match arb_type_str.to_lowercase().as_str() { 193 | "poly_yes_kalshi_no" | "pykn" | "0" => ArbType::PolyYesKalshiNo, 194 | "kalshi_yes_poly_no" | "kypn" | "1" => ArbType::KalshiYesPolyNo, 195 | "poly_only" | "poly" | "2" => ArbType::PolyOnly, 196 | "kalshi_only" | "kalshi" | "3" => ArbType::KalshiOnly, 197 | _ => { 198 | warn!("[TEST] Unknown TEST_ARB_TYPE='{}', defaulting to PolyYesKalshiNo", arb_type_str); 199 | warn!("[TEST] Valid values: poly_yes_kalshi_no, kalshi_yes_poly_no, poly_only, kalshi_only"); 200 | ArbType::PolyYesKalshiNo 201 | } 202 | }; 203 | 204 | // Set prices based on arb type for realistic test scenarios 205 | let (yes_price, no_price, description) = match arb_type { 206 | ArbType::PolyYesKalshiNo => (40, 50, "P_yes=40¢ + K_no=50¢ + fee≈2¢ = 92¢ → 8¢ profit"), 207 | ArbType::KalshiYesPolyNo => (40, 50, "K_yes=40¢ + P_no=50¢ + fee≈2¢ = 92¢ → 8¢ profit"), 208 | ArbType::PolyOnly => (48, 50, "P_yes=48¢ + P_no=50¢ + fee=0¢ = 98¢ → 2¢ profit (NO FEES!)"), 209 | ArbType::KalshiOnly => (44, 44, "K_yes=44¢ + K_no=44¢ + fee≈4¢ = 92¢ → 8¢ profit (DOUBLE FEES)"), 210 | }; 211 | 212 | // Find first market with valid state 213 | let market_count = test_state.market_count(); 214 | for market_id in 0..market_count { 215 | if let Some(market) = test_state.get_by_id(market_id as u16) { 216 | if let Some(pair) = &market.pair { 217 | // SIZE: 1000 cents = 10 contracts (Poly $1 min requires ~3 contracts at 40¢) 218 | let fake_req = FastExecutionRequest { 219 | market_id: market_id as u16, 220 | yes_price, 221 | no_price, 222 | yes_size: 1000, // 1000¢ = 10 contracts 223 | no_size: 1000, // 1000¢ = 10 contracts 224 | arb_type, 225 | detected_ns: 0, 226 | }; 227 | 228 | warn!("[TEST] 🧪 Injecting FAKE {:?} arb for: {}", arb_type, pair.description); 229 | warn!("[TEST] {}", description); 230 | warn!("[TEST] SIZE CAPPED TO 10 CONTRACTS for safety!"); 231 | warn!("[TEST] Execution mode: DRY_RUN={}", test_dry_run); 232 | 233 | if let Err(e) = test_exec_tx.send(fake_req).await { 234 | error!("[TEST] Failed to send fake arb: {}", e); 235 | } 236 | break; 237 | } 238 | } 239 | } 240 | }); 241 | } 242 | 243 | // Start Kalshi WebSocket (config parsed once, reused on reconnects) 244 | let kalshi_state = state.clone(); 245 | let kalshi_exec_tx = exec_tx.clone(); 246 | let kalshi_threshold = threshold_cents; 247 | let kalshi_ws_config = KalshiConfig::from_env()?; 248 | let kalshi_handle = tokio::spawn(async move { 249 | loop { 250 | if let Err(e) = kalshi::run_ws(&kalshi_ws_config, kalshi_state.clone(), kalshi_exec_tx.clone(), kalshi_threshold).await { 251 | error!("[KALSHI] Disconnected: {} - reconnecting...", e); 252 | } 253 | tokio::time::sleep(tokio::time::Duration::from_secs(WS_RECONNECT_DELAY_SECS)).await; 254 | } 255 | }); 256 | 257 | // Start Polymarket WebSocket 258 | let poly_state = state.clone(); 259 | let poly_exec_tx = exec_tx.clone(); 260 | let poly_threshold = threshold_cents; 261 | let poly_handle = tokio::spawn(async move { 262 | loop { 263 | if let Err(e) = polymarket::run_ws(poly_state.clone(), poly_exec_tx.clone(), poly_threshold).await { 264 | error!("[POLYMARKET] Disconnected: {} - reconnecting...", e); 265 | } 266 | tokio::time::sleep(tokio::time::Duration::from_secs(WS_RECONNECT_DELAY_SECS)).await; 267 | } 268 | }); 269 | 270 | // Heartbeat task with arb diagnostics 271 | let heartbeat_state = state.clone(); 272 | let heartbeat_threshold = threshold_cents; 273 | let heartbeat_handle = tokio::spawn(async move { 274 | use crate::types::kalshi_fee_cents; 275 | let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(60)); 276 | loop { 277 | interval.tick().await; 278 | let market_count = heartbeat_state.market_count(); 279 | let mut with_kalshi = 0; 280 | let mut with_poly = 0; 281 | let mut with_both = 0; 282 | // (cost, market_id, p_yes, k_no, k_yes, p_no, fee, is_poly_yes_kalshi_no) 283 | let mut best_arb: Option<(u16, u16, u16, u16, u16, u16, u16, bool)> = None; 284 | 285 | for market in heartbeat_state.markets.iter().take(market_count) { 286 | let (k_yes, k_no, _, _) = market.kalshi.load(); 287 | let (p_yes, p_no, _, _) = market.poly.load(); 288 | let has_k = k_yes > 0 && k_no > 0; 289 | let has_p = p_yes > 0 && p_no > 0; 290 | if k_yes > 0 || k_no > 0 { with_kalshi += 1; } 291 | if p_yes > 0 || p_no > 0 { with_poly += 1; } 292 | if has_k && has_p { 293 | with_both += 1; 294 | 295 | let fee1 = kalshi_fee_cents(k_no); 296 | let cost1 = p_yes + k_no + fee1; 297 | 298 | let fee2 = kalshi_fee_cents(k_yes); 299 | let cost2 = k_yes + fee2 + p_no; 300 | 301 | let (best_cost, best_fee, is_poly_yes) = if cost1 <= cost2 { 302 | (cost1, fee1, true) 303 | } else { 304 | (cost2, fee2, false) 305 | }; 306 | 307 | if best_arb.is_none() || best_cost < best_arb.as_ref().unwrap().0 { 308 | best_arb = Some((best_cost, market.market_id, p_yes, k_no, k_yes, p_no, best_fee, is_poly_yes)); 309 | } 310 | } 311 | } 312 | 313 | info!("💓 Heartbeat | Markets: {} total, {} w/Kalshi, {} w/Poly, {} w/Both | threshold={}¢", 314 | market_count, with_kalshi, with_poly, with_both, heartbeat_threshold); 315 | 316 | if let Some((cost, market_id, p_yes, k_no, k_yes, p_no, fee, is_poly_yes)) = best_arb { 317 | let gap = cost as i16 - heartbeat_threshold as i16; 318 | let desc = heartbeat_state.get_by_id(market_id) 319 | .and_then(|m| m.pair.as_ref()) 320 | .map(|p| &*p.description) 321 | .unwrap_or("Unknown"); 322 | let leg_breakdown = if is_poly_yes { 323 | format!("P_yes({}¢) + K_no({}¢) + K_fee({}¢) = {}¢", p_yes, k_no, fee, cost) 324 | } else { 325 | format!("K_yes({}¢) + P_no({}¢) + K_fee({}¢) = {}¢", k_yes, p_no, fee, cost) 326 | }; 327 | if gap <= 10 { 328 | info!(" 📊 Best: {} | {} | gap={:+}¢ | [P_yes={}¢ K_no={}¢ K_yes={}¢ P_no={}¢]", 329 | desc, leg_breakdown, gap, p_yes, k_no, k_yes, p_no); 330 | } else { 331 | info!(" 📊 Best: {} | {} | gap={:+}¢ - efficient", 332 | desc, leg_breakdown, gap); 333 | } 334 | } else if with_both == 0 { 335 | warn!(" ⚠️ No markets with BOTH Kalshi and Poly prices - check WebSocket connections"); 336 | } 337 | } 338 | }); 339 | 340 | // Run forever 341 | let _ = tokio::join!(kalshi_handle, poly_handle, heartbeat_handle, exec_handle); 342 | 343 | Ok(()) 344 | } 345 | -------------------------------------------------------------------------------- /src/circuit_breaker.rs: -------------------------------------------------------------------------------- 1 | // src/circuit_breaker.rs 2 | // Safety circuit breakers - halt trading on various conditions 3 | 4 | use std::sync::atomic::{AtomicBool, AtomicI64, Ordering}; 5 | use std::time::{Duration, Instant}; 6 | use tokio::sync::RwLock; 7 | use tracing::{error, warn, info}; 8 | 9 | /// Circuit breaker configuration from environment 10 | #[derive(Debug, Clone)] 11 | pub struct CircuitBreakerConfig { 12 | /// Maximum position size per market (in contracts) 13 | pub max_position_per_market: i64, 14 | 15 | /// Maximum total position across all markets (in contracts) 16 | pub max_total_position: i64, 17 | 18 | /// Maximum daily loss (in dollars) before halting 19 | pub max_daily_loss: f64, 20 | 21 | /// Maximum number of consecutive errors before halting 22 | pub max_consecutive_errors: u32, 23 | 24 | /// Cooldown period after a trip (seconds) 25 | pub cooldown_secs: u64, 26 | 27 | /// Whether circuit breakers are enabled 28 | pub enabled: bool, 29 | } 30 | 31 | impl CircuitBreakerConfig { 32 | pub fn from_env() -> Self { 33 | Self { 34 | max_position_per_market: std::env::var("CB_MAX_POSITION_PER_MARKET") 35 | .ok() 36 | .and_then(|v| v.parse().ok()) 37 | .unwrap_or(50000), 38 | 39 | max_total_position: std::env::var("CB_MAX_TOTAL_POSITION") 40 | .ok() 41 | .and_then(|v| v.parse().ok()) 42 | .unwrap_or(100000), 43 | 44 | max_daily_loss: std::env::var("CB_MAX_DAILY_LOSS") 45 | .ok() 46 | .and_then(|v| v.parse().ok()) 47 | .unwrap_or(500.0), 48 | 49 | max_consecutive_errors: std::env::var("CB_MAX_CONSECUTIVE_ERRORS") 50 | .ok() 51 | .and_then(|v| v.parse().ok()) 52 | .unwrap_or(5), 53 | 54 | cooldown_secs: std::env::var("CB_COOLDOWN_SECS") 55 | .ok() 56 | .and_then(|v| v.parse().ok()) 57 | .unwrap_or(300), // 5 minutes default 58 | 59 | enabled: std::env::var("CB_ENABLED") 60 | .map(|v| v == "1" || v == "true") 61 | .unwrap_or(true), // Enabled by default for safety 62 | } 63 | } 64 | } 65 | 66 | /// Reason why circuit breaker was tripped 67 | #[derive(Debug, Clone, PartialEq)] 68 | pub enum TripReason { 69 | MaxPositionPerMarket { market: String, position: i64, limit: i64 }, 70 | MaxTotalPosition { position: i64, limit: i64 }, 71 | MaxDailyLoss { loss: f64, limit: f64 }, 72 | ConsecutiveErrors { count: u32, limit: u32 }, 73 | ManualHalt, 74 | } 75 | 76 | impl std::fmt::Display for TripReason { 77 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 78 | match self { 79 | TripReason::MaxPositionPerMarket { market, position, limit } => { 80 | write!(f, "Max position per market: {} has {} contracts (limit: {})", market, position, limit) 81 | } 82 | TripReason::MaxTotalPosition { position, limit } => { 83 | write!(f, "Max total position: {} contracts (limit: {})", position, limit) 84 | } 85 | TripReason::MaxDailyLoss { loss, limit } => { 86 | write!(f, "Max daily loss: ${:.2} (limit: ${:.2})", loss, limit) 87 | } 88 | TripReason::ConsecutiveErrors { count, limit } => { 89 | write!(f, "Consecutive errors: {} (limit: {})", count, limit) 90 | } 91 | TripReason::ManualHalt => { 92 | write!(f, "Manual halt triggered") 93 | } 94 | } 95 | } 96 | } 97 | 98 | /// Position tracking for a single market 99 | #[derive(Debug, Default)] 100 | pub struct MarketPosition { 101 | pub kalshi_yes: i64, 102 | pub kalshi_no: i64, 103 | pub poly_yes: i64, 104 | pub poly_no: i64, 105 | } 106 | 107 | #[allow(dead_code)] 108 | impl MarketPosition { 109 | pub fn net_position(&self) -> i64 { 110 | // Net exposure: positive = long YES, negative = long NO 111 | (self.kalshi_yes + self.poly_yes) - (self.kalshi_no + self.poly_no) 112 | } 113 | 114 | pub fn total_contracts(&self) -> i64 { 115 | self.kalshi_yes + self.kalshi_no + self.poly_yes + self.poly_no 116 | } 117 | } 118 | 119 | /// Circuit breaker state 120 | pub struct CircuitBreaker { 121 | config: CircuitBreakerConfig, 122 | 123 | /// Whether trading is currently halted 124 | halted: AtomicBool, 125 | 126 | /// When the circuit breaker was tripped 127 | tripped_at: RwLock>, 128 | 129 | /// Reason for trip 130 | trip_reason: RwLock>, 131 | 132 | /// Consecutive error count 133 | consecutive_errors: AtomicI64, 134 | 135 | /// Daily P&L tracking (in cents) 136 | daily_pnl_cents: AtomicI64, 137 | 138 | /// Positions per market 139 | positions: RwLock>, 140 | } 141 | 142 | impl CircuitBreaker { 143 | pub fn new(config: CircuitBreakerConfig) -> Self { 144 | info!("[CB] Circuit breaker initialized:"); 145 | info!("[CB] Enabled: {}", config.enabled); 146 | info!("[CB] Max position per market: {} contracts", config.max_position_per_market); 147 | info!("[CB] Max total position: {} contracts", config.max_total_position); 148 | info!("[CB] Max daily loss: ${:.2}", config.max_daily_loss); 149 | info!("[CB] Max consecutive errors: {}", config.max_consecutive_errors); 150 | info!("[CB] Cooldown: {}s", config.cooldown_secs); 151 | 152 | Self { 153 | config, 154 | halted: AtomicBool::new(false), 155 | tripped_at: RwLock::new(None), 156 | trip_reason: RwLock::new(None), 157 | consecutive_errors: AtomicI64::new(0), 158 | daily_pnl_cents: AtomicI64::new(0), 159 | positions: RwLock::new(std::collections::HashMap::new()), 160 | } 161 | } 162 | 163 | /// Check if trading is allowed 164 | #[allow(dead_code)] 165 | pub fn is_trading_allowed(&self) -> bool { 166 | if !self.config.enabled { 167 | return true; 168 | } 169 | !self.halted.load(Ordering::SeqCst) 170 | } 171 | 172 | /// Check if we can execute a trade for a specific market 173 | pub async fn can_execute(&self, market_id: &str, contracts: i64) -> Result<(), TripReason> { 174 | if !self.config.enabled { 175 | return Ok(()); 176 | } 177 | 178 | if self.halted.load(Ordering::SeqCst) { 179 | let reason = self.trip_reason.read().await; 180 | return Err(reason.clone().unwrap_or(TripReason::ManualHalt)); 181 | } 182 | 183 | // Check position limits 184 | let positions = self.positions.read().await; 185 | 186 | // Per-market limit 187 | if let Some(pos) = positions.get(market_id) { 188 | let new_position = pos.total_contracts() + contracts; 189 | if new_position > self.config.max_position_per_market { 190 | return Err(TripReason::MaxPositionPerMarket { 191 | market: market_id.to_string(), 192 | position: new_position, 193 | limit: self.config.max_position_per_market, 194 | }); 195 | } 196 | } 197 | 198 | // Total position limit 199 | let total: i64 = positions.values().map(|p| p.total_contracts()).sum(); 200 | if total + contracts > self.config.max_total_position { 201 | return Err(TripReason::MaxTotalPosition { 202 | position: total + contracts, 203 | limit: self.config.max_total_position, 204 | }); 205 | } 206 | 207 | // Daily loss limit 208 | let daily_loss = -self.daily_pnl_cents.load(Ordering::SeqCst) as f64 / 100.0; 209 | if daily_loss > self.config.max_daily_loss { 210 | return Err(TripReason::MaxDailyLoss { 211 | loss: daily_loss, 212 | limit: self.config.max_daily_loss, 213 | }); 214 | } 215 | 216 | Ok(()) 217 | } 218 | 219 | /// Record a successful execution 220 | pub async fn record_success(&self, market_id: &str, kalshi_contracts: i64, poly_contracts: i64, pnl: f64) { 221 | // Reset consecutive errors 222 | self.consecutive_errors.store(0, Ordering::SeqCst); 223 | 224 | // Update P&L 225 | let pnl_cents = (pnl * 100.0) as i64; 226 | self.daily_pnl_cents.fetch_add(pnl_cents, Ordering::SeqCst); 227 | 228 | // Update positions 229 | let mut positions = self.positions.write().await; 230 | let pos = positions.entry(market_id.to_string()).or_default(); 231 | pos.kalshi_yes += kalshi_contracts; 232 | pos.poly_no += poly_contracts; 233 | } 234 | 235 | /// Record an error 236 | pub async fn record_error(&self) { 237 | let errors = self.consecutive_errors.fetch_add(1, Ordering::SeqCst) + 1; 238 | 239 | if errors >= self.config.max_consecutive_errors as i64 { 240 | self.trip(TripReason::ConsecutiveErrors { 241 | count: errors as u32, 242 | limit: self.config.max_consecutive_errors, 243 | }).await; 244 | } 245 | } 246 | 247 | /// Record P&L update (for tracking without execution) 248 | #[allow(dead_code)] 249 | pub fn record_pnl(&self, pnl: f64) { 250 | let pnl_cents = (pnl * 100.0) as i64; 251 | self.daily_pnl_cents.fetch_add(pnl_cents, Ordering::SeqCst); 252 | } 253 | 254 | /// Trip the circuit breaker 255 | pub async fn trip(&self, reason: TripReason) { 256 | if !self.config.enabled { 257 | return; 258 | } 259 | 260 | error!("🚨 CIRCUIT BREAKER TRIPPED: {}", reason); 261 | 262 | self.halted.store(true, Ordering::SeqCst); 263 | *self.tripped_at.write().await = Some(Instant::now()); 264 | *self.trip_reason.write().await = Some(reason); 265 | } 266 | 267 | /// Manually halt trading 268 | #[allow(dead_code)] 269 | pub async fn halt(&self) { 270 | warn!("[CB] Manual halt triggered"); 271 | self.trip(TripReason::ManualHalt).await; 272 | } 273 | 274 | /// Reset the circuit breaker (after cooldown or manual reset) 275 | #[allow(dead_code)] 276 | pub async fn reset(&self) { 277 | info!("[CB] Circuit breaker reset"); 278 | self.halted.store(false, Ordering::SeqCst); 279 | *self.tripped_at.write().await = None; 280 | *self.trip_reason.write().await = None; 281 | self.consecutive_errors.store(0, Ordering::SeqCst); 282 | } 283 | 284 | /// Reset daily P&L (call at midnight) 285 | #[allow(dead_code)] 286 | pub fn reset_daily_pnl(&self) { 287 | info!("[CB] Daily P&L reset"); 288 | self.daily_pnl_cents.store(0, Ordering::SeqCst); 289 | } 290 | 291 | /// Check if cooldown has elapsed and auto-reset if so 292 | #[allow(dead_code)] 293 | pub async fn check_cooldown(&self) -> bool { 294 | if !self.halted.load(Ordering::SeqCst) { 295 | return true; 296 | } 297 | 298 | let tripped_at = self.tripped_at.read().await; 299 | if let Some(tripped) = *tripped_at { 300 | if tripped.elapsed() > Duration::from_secs(self.config.cooldown_secs) { 301 | drop(tripped_at); // Release read lock before reset 302 | self.reset().await; 303 | return true; 304 | } 305 | } 306 | 307 | false 308 | } 309 | 310 | /// Get current status 311 | #[allow(dead_code)] 312 | pub async fn status(&self) -> CircuitBreakerStatus { 313 | let positions = self.positions.read().await; 314 | let total_position: i64 = positions.values().map(|p| p.total_contracts()).sum(); 315 | 316 | CircuitBreakerStatus { 317 | enabled: self.config.enabled, 318 | halted: self.halted.load(Ordering::SeqCst), 319 | trip_reason: self.trip_reason.read().await.clone(), 320 | consecutive_errors: self.consecutive_errors.load(Ordering::SeqCst) as u32, 321 | daily_pnl: self.daily_pnl_cents.load(Ordering::SeqCst) as f64 / 100.0, 322 | total_position, 323 | market_count: positions.len(), 324 | } 325 | } 326 | } 327 | 328 | #[derive(Debug, Clone)] 329 | #[allow(dead_code)] 330 | pub struct CircuitBreakerStatus { 331 | pub enabled: bool, 332 | pub halted: bool, 333 | pub trip_reason: Option, 334 | pub consecutive_errors: u32, 335 | pub daily_pnl: f64, 336 | pub total_position: i64, 337 | pub market_count: usize, 338 | } 339 | 340 | impl std::fmt::Display for CircuitBreakerStatus { 341 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 342 | if !self.enabled { 343 | return write!(f, "Circuit Breaker: DISABLED"); 344 | } 345 | 346 | if self.halted { 347 | write!(f, "Circuit Breaker: 🛑 HALTED")?; 348 | if let Some(reason) = &self.trip_reason { 349 | write!(f, " ({})", reason)?; 350 | } 351 | } else { 352 | write!(f, "Circuit Breaker: ✅ OK")?; 353 | } 354 | 355 | write!(f, " | P&L: ${:.2} | Pos: {} contracts across {} markets | Errors: {}", 356 | self.daily_pnl, self.total_position, self.market_count, self.consecutive_errors) 357 | } 358 | } 359 | 360 | #[cfg(test)] 361 | mod tests { 362 | use super::*; 363 | 364 | #[tokio::test] 365 | async fn test_circuit_breaker_position_limit() { 366 | let config = CircuitBreakerConfig { 367 | max_position_per_market: 10, 368 | max_total_position: 50, 369 | max_daily_loss: 100.0, 370 | max_consecutive_errors: 3, 371 | cooldown_secs: 60, 372 | enabled: true, 373 | }; 374 | 375 | let cb = CircuitBreaker::new(config); 376 | 377 | // Should allow initial trade 378 | assert!(cb.can_execute("market1", 5).await.is_ok()); 379 | 380 | // Record the trade 381 | cb.record_success("market1", 5, 5, 0.0).await; 382 | 383 | // Should reject trade exceeding per-market limit 384 | let result = cb.can_execute("market1", 10).await; 385 | assert!(matches!(result, Err(TripReason::MaxPositionPerMarket { .. }))); 386 | } 387 | 388 | #[tokio::test] 389 | async fn test_consecutive_errors() { 390 | let config = CircuitBreakerConfig { 391 | max_position_per_market: 100, 392 | max_total_position: 500, 393 | max_daily_loss: 100.0, 394 | max_consecutive_errors: 3, 395 | cooldown_secs: 60, 396 | enabled: true, 397 | }; 398 | 399 | let cb = CircuitBreaker::new(config); 400 | 401 | // Record errors 402 | cb.record_error().await; 403 | cb.record_error().await; 404 | assert!(cb.is_trading_allowed()); 405 | 406 | // Third error should trip 407 | cb.record_error().await; 408 | assert!(!cb.is_trading_allowed()); 409 | } 410 | } -------------------------------------------------------------------------------- /src/polymarket.rs: -------------------------------------------------------------------------------- 1 | // src/polymarket.rs 2 | // Polymarket WebSocket client with ping keepalive 3 | 4 | use anyhow::{Context, Result}; 5 | use futures_util::{SinkExt, StreamExt}; 6 | use serde::{Deserialize, Serialize}; 7 | use std::sync::Arc; 8 | use std::time::Duration; 9 | use tokio::sync::mpsc; 10 | use tokio::time::{interval, Instant}; 11 | use tokio_tungstenite::{connect_async, tungstenite::Message}; 12 | use tracing::{error, info, warn}; 13 | 14 | use crate::config::{POLYMARKET_WS_URL, POLY_PING_INTERVAL_SECS, GAMMA_API_BASE}; 15 | use crate::execution::NanoClock; 16 | use crate::types::{ 17 | GlobalState, FastExecutionRequest, ArbType, PriceCents, SizeCents, 18 | parse_price, fxhash_str, 19 | }; 20 | 21 | // === WebSocket Message Types === 22 | 23 | #[derive(Deserialize, Debug)] 24 | pub struct BookSnapshot { 25 | pub asset_id: String, 26 | #[allow(dead_code)] 27 | pub bids: Vec, 28 | pub asks: Vec, 29 | } 30 | 31 | #[derive(Deserialize, Debug)] 32 | pub struct PriceLevel { 33 | pub price: String, 34 | pub size: String, 35 | } 36 | 37 | #[derive(Deserialize, Debug)] 38 | pub struct PriceChangeEvent { 39 | pub event_type: Option, 40 | #[serde(default)] 41 | pub price_changes: Option>, 42 | } 43 | 44 | #[derive(Deserialize, Debug)] 45 | pub struct PriceChangeItem { 46 | pub asset_id: String, 47 | pub price: Option, 48 | pub side: Option, 49 | } 50 | 51 | #[derive(Serialize)] 52 | struct SubscribeCmd { 53 | assets_ids: Vec, 54 | #[serde(rename = "type")] 55 | sub_type: &'static str, 56 | } 57 | 58 | // === Gamma API Client === 59 | 60 | pub struct GammaClient { 61 | http: reqwest::Client, 62 | } 63 | 64 | impl GammaClient { 65 | pub fn new() -> Self { 66 | Self { 67 | http: reqwest::Client::builder() 68 | .timeout(Duration::from_secs(10)) 69 | .build() 70 | .expect("Failed to build HTTP client"), 71 | } 72 | } 73 | 74 | /// Look up Polymarket market by slug, return (yes_token, no_token) 75 | /// Tries both the exact date and next day (timezone handling) 76 | pub async fn lookup_market(&self, slug: &str) -> Result> { 77 | // Try exact slug first 78 | if let Some(tokens) = self.try_lookup_slug(slug).await? { 79 | return Ok(Some(tokens)); 80 | } 81 | 82 | // Try with next day (Polymarket may use local time) 83 | if let Some(next_day_slug) = increment_date_in_slug(slug) { 84 | if let Some(tokens) = self.try_lookup_slug(&next_day_slug).await? { 85 | info!(" 📅 Found with next-day slug: {}", next_day_slug); 86 | return Ok(Some(tokens)); 87 | } 88 | } 89 | 90 | Ok(None) 91 | } 92 | 93 | async fn try_lookup_slug(&self, slug: &str) -> Result> { 94 | let url = format!("{}/markets?slug={}", GAMMA_API_BASE, slug); 95 | 96 | let resp = self.http.get(&url).send().await?; 97 | 98 | if !resp.status().is_success() { 99 | return Ok(None); 100 | } 101 | 102 | let markets: Vec = resp.json().await?; 103 | 104 | if markets.is_empty() { 105 | return Ok(None); 106 | } 107 | 108 | let market = &markets[0]; 109 | 110 | // Check if active and not closed 111 | if market.closed == Some(true) || market.active == Some(false) { 112 | return Ok(None); 113 | } 114 | 115 | // Parse clobTokenIds JSON array 116 | let token_ids: Vec = market.clob_token_ids 117 | .as_ref() 118 | .and_then(|s| serde_json::from_str(s).ok()) 119 | .unwrap_or_default(); 120 | 121 | if token_ids.len() >= 2 { 122 | Ok(Some((token_ids[0].clone(), token_ids[1].clone()))) 123 | } else { 124 | Ok(None) 125 | } 126 | } 127 | } 128 | 129 | #[derive(Debug, Deserialize)] 130 | struct GammaMarket { 131 | #[serde(rename = "clobTokenIds")] 132 | clob_token_ids: Option, 133 | active: Option, 134 | closed: Option, 135 | } 136 | 137 | /// Increment the date in a Polymarket slug by 1 day 138 | /// e.g., "epl-che-avl-2025-12-08" -> "epl-che-avl-2025-12-09" 139 | fn increment_date_in_slug(slug: &str) -> Option { 140 | let parts: Vec<&str> = slug.split('-').collect(); 141 | if parts.len() < 6 { 142 | return None; 143 | } 144 | 145 | let year: i32 = parts[3].parse().ok()?; 146 | let month: u32 = parts[4].parse().ok()?; 147 | let day: u32 = parts[5].parse().ok()?; 148 | 149 | // Compute next day 150 | let days_in_month = match month { 151 | 1 | 3 | 5 | 7 | 8 | 10 | 12 => 31, 152 | 4 | 6 | 9 | 11 => 30, 153 | 2 => if year % 4 == 0 && (year % 100 != 0 || year % 400 == 0) { 29 } else { 28 }, 154 | _ => 31, 155 | }; 156 | 157 | let (new_year, new_month, new_day) = if day >= days_in_month { 158 | if month == 12 { (year + 1, 1, 1) } else { (year, month + 1, 1) } 159 | } else { 160 | (year, month, day + 1) 161 | }; 162 | 163 | // Rebuild slug with owned strings 164 | let prefix = parts[..3].join("-"); 165 | let suffix = if parts.len() > 6 { format!("-{}", parts[6..].join("-")) } else { String::new() }; 166 | 167 | Some(format!("{}-{}-{:02}-{:02}{}", prefix, new_year, new_month, new_day, suffix)) 168 | } 169 | 170 | // ============================================================================= 171 | // WebSocket Runner 172 | // ============================================================================= 173 | 174 | /// Parse size from Polymarket (format: "123.45" dollars) 175 | #[inline(always)] 176 | fn parse_size(s: &str) -> SizeCents { 177 | // Parse as f64 and convert to cents 178 | s.parse::() 179 | .map(|size| (size * 100.0).round() as SizeCents) 180 | .unwrap_or(0) 181 | } 182 | 183 | /// WebSocket runner 184 | pub async fn run_ws( 185 | state: Arc, 186 | exec_tx: mpsc::Sender, 187 | threshold_cents: PriceCents, 188 | ) -> Result<()> { 189 | let tokens: Vec = state.markets.iter() 190 | .take(state.market_count()) 191 | .filter_map(|m| m.pair.as_ref()) 192 | .flat_map(|p| [p.poly_yes_token.to_string(), p.poly_no_token.to_string()]) 193 | .collect(); 194 | 195 | if tokens.is_empty() { 196 | info!("[POLY] No markets to monitor"); 197 | tokio::time::sleep(Duration::from_secs(u64::MAX)).await; 198 | return Ok(()); 199 | } 200 | 201 | let (ws_stream, _) = connect_async(POLYMARKET_WS_URL) 202 | .await 203 | .context("Failed to connect to Polymarket")?; 204 | 205 | info!("[POLY] Connected"); 206 | 207 | let (mut write, mut read) = ws_stream.split(); 208 | 209 | // Subscribe 210 | let subscribe_msg = SubscribeCmd { 211 | assets_ids: tokens.clone(), 212 | sub_type: "market", 213 | }; 214 | 215 | write.send(Message::Text(serde_json::to_string(&subscribe_msg)?)).await?; 216 | info!("[POLY] Subscribed to {} tokens", tokens.len()); 217 | 218 | let clock = NanoClock::new(); 219 | let mut ping_interval = interval(Duration::from_secs(POLY_PING_INTERVAL_SECS)); 220 | let mut last_message = Instant::now(); 221 | 222 | loop { 223 | tokio::select! { 224 | _ = ping_interval.tick() => { 225 | if let Err(e) = write.send(Message::Ping(vec![])).await { 226 | error!("[POLY] Failed to send ping: {}", e); 227 | break; 228 | } 229 | } 230 | 231 | msg = read.next() => { 232 | match msg { 233 | Some(Ok(Message::Text(text))) => { 234 | last_message = Instant::now(); 235 | 236 | // Try book snapshot first 237 | if let Ok(books) = serde_json::from_str::>(&text) { 238 | for book in &books { 239 | process_book(&state, book, &exec_tx, threshold_cents, &clock).await; 240 | } 241 | } 242 | // Try price change event 243 | else if let Ok(event) = serde_json::from_str::(&text) { 244 | if event.event_type.as_deref() == Some("price_change") { 245 | if let Some(changes) = &event.price_changes { 246 | for change in changes { 247 | process_price_change(&state, change, &exec_tx, threshold_cents, &clock).await; 248 | } 249 | } 250 | } 251 | } 252 | // Log unknown message types at trace level for debugging 253 | else { 254 | tracing::trace!("[POLY] Unknown WS message: {}...", &text[..text.len().min(100)]); 255 | } 256 | } 257 | Some(Ok(Message::Ping(data))) => { 258 | let _ = write.send(Message::Pong(data)).await; 259 | last_message = Instant::now(); 260 | } 261 | Some(Ok(Message::Pong(_))) => { 262 | last_message = Instant::now(); 263 | } 264 | Some(Ok(Message::Close(frame))) => { 265 | warn!("[POLY] Server closed: {:?}", frame); 266 | break; 267 | } 268 | Some(Err(e)) => { 269 | error!("[POLY] WebSocket error: {}", e); 270 | break; 271 | } 272 | None => { 273 | warn!("[POLY] Stream ended"); 274 | break; 275 | } 276 | _ => {} 277 | } 278 | } 279 | } 280 | 281 | if last_message.elapsed() > Duration::from_secs(120) { 282 | warn!("[POLY] Stale connection, reconnecting..."); 283 | break; 284 | } 285 | } 286 | 287 | Ok(()) 288 | } 289 | 290 | /// Process book snapshot 291 | #[inline] 292 | async fn process_book( 293 | state: &GlobalState, 294 | book: &BookSnapshot, 295 | exec_tx: &mpsc::Sender, 296 | threshold_cents: PriceCents, 297 | clock: &NanoClock, 298 | ) { 299 | let token_hash = fxhash_str(&book.asset_id); 300 | 301 | // Find best ask (lowest price) 302 | let (best_ask, ask_size) = book.asks.iter() 303 | .filter_map(|l| { 304 | let price = parse_price(&l.price); 305 | let size = parse_size(&l.size); 306 | if price > 0 { Some((price, size)) } else { None } 307 | }) 308 | .min_by_key(|(p, _)| *p) 309 | .unwrap_or((0, 0)); 310 | 311 | // Check if YES token 312 | if let Some(&market_id) = state.poly_yes_to_id.get(&token_hash) { 313 | let market = &state.markets[market_id as usize]; 314 | market.poly.update_yes(best_ask, ask_size); 315 | 316 | // Check arbs 317 | let arb_mask = market.check_arbs(threshold_cents); 318 | if arb_mask != 0 { 319 | send_arb_request(market_id, market, arb_mask, exec_tx, clock).await; 320 | } 321 | } 322 | // Check if NO token 323 | else if let Some(&market_id) = state.poly_no_to_id.get(&token_hash) { 324 | let market = &state.markets[market_id as usize]; 325 | market.poly.update_no(best_ask, ask_size); 326 | 327 | // Check arbs 328 | let arb_mask = market.check_arbs(threshold_cents); 329 | if arb_mask != 0 { 330 | send_arb_request(market_id, market, arb_mask, exec_tx, clock).await; 331 | } 332 | } 333 | } 334 | 335 | /// Process price change 336 | #[inline] 337 | async fn process_price_change( 338 | state: &GlobalState, 339 | change: &PriceChangeItem, 340 | exec_tx: &mpsc::Sender, 341 | threshold_cents: PriceCents, 342 | clock: &NanoClock, 343 | ) { 344 | // Only process ASK side updates 345 | if !matches!(change.side.as_deref(), Some("ASK" | "ask")) { 346 | return; 347 | } 348 | 349 | let Some(price_str) = &change.price else { return }; 350 | let price = parse_price(price_str); 351 | if price == 0 { return; } 352 | 353 | let token_hash = fxhash_str(&change.asset_id); 354 | 355 | // Check YES token 356 | if let Some(&market_id) = state.poly_yes_to_id.get(&token_hash) { 357 | let market = &state.markets[market_id as usize]; 358 | let (current_yes, _, current_yes_size, _) = market.poly.load(); 359 | 360 | // Only update if new price is better (lower) 361 | if price < current_yes || current_yes == 0 { 362 | // Keep existing size - it may be stale but FAK orders handle partial fills. 363 | // Size is an upper bound anyway; better to attempt arb than miss it. 364 | market.poly.update_yes(price, current_yes_size); 365 | 366 | let arb_mask = market.check_arbs(threshold_cents); 367 | if arb_mask != 0 { 368 | send_arb_request(market_id, market, arb_mask, exec_tx, clock).await; 369 | } 370 | } 371 | } 372 | // Check NO token 373 | else if let Some(&market_id) = state.poly_no_to_id.get(&token_hash) { 374 | let market = &state.markets[market_id as usize]; 375 | let (_, current_no, _, current_no_size) = market.poly.load(); 376 | 377 | if price < current_no || current_no == 0 { 378 | market.poly.update_no(price, current_no_size); 379 | 380 | let arb_mask = market.check_arbs(threshold_cents); 381 | if arb_mask != 0 { 382 | send_arb_request(market_id, market, arb_mask, exec_tx, clock).await; 383 | } 384 | } 385 | } 386 | } 387 | 388 | /// Send arb request to execution engine 389 | #[inline] 390 | async fn send_arb_request( 391 | market_id: u16, 392 | market: &crate::types::AtomicMarketState, 393 | arb_mask: u8, 394 | exec_tx: &mpsc::Sender, 395 | clock: &NanoClock, 396 | ) { 397 | let (k_yes, k_no, k_yes_size, k_no_size) = market.kalshi.load(); 398 | let (p_yes, p_no, p_yes_size, p_no_size) = market.poly.load(); 399 | 400 | // Priority order: cross-platform arbs first (more reliable) 401 | let (yes_price, no_price, yes_size, no_size, arb_type) = if arb_mask & 1 != 0 { 402 | // Poly YES + Kalshi NO 403 | (p_yes, k_no, p_yes_size, k_no_size, ArbType::PolyYesKalshiNo) 404 | } else if arb_mask & 2 != 0 { 405 | // Kalshi YES + Poly NO 406 | (k_yes, p_no, k_yes_size, p_no_size, ArbType::KalshiYesPolyNo) 407 | } else if arb_mask & 4 != 0 { 408 | // Poly only (both sides) 409 | (p_yes, p_no, p_yes_size, p_no_size, ArbType::PolyOnly) 410 | } else if arb_mask & 8 != 0 { 411 | // Kalshi only (both sides) 412 | (k_yes, k_no, k_yes_size, k_no_size, ArbType::KalshiOnly) 413 | } else { 414 | return; 415 | }; 416 | 417 | let req = FastExecutionRequest { 418 | market_id, 419 | yes_price, 420 | no_price, 421 | yes_size, 422 | no_size, 423 | arb_type, 424 | detected_ns: clock.now_ns(), 425 | }; 426 | 427 | // send! ~~ 428 | let _ = exec_tx.try_send(req); 429 | } -------------------------------------------------------------------------------- /src/position_tracker.rs: -------------------------------------------------------------------------------- 1 | // src/position_tracker.rs 2 | // Track positions, cost basis, and P&L across both platforms 3 | 4 | use anyhow::Result; 5 | use serde::{Deserialize, Serialize}; 6 | use std::collections::HashMap; 7 | use std::path::Path; 8 | use std::sync::Arc; 9 | use std::time::Duration; 10 | use tokio::sync::{mpsc, RwLock}; 11 | use tracing::{info, warn}; 12 | 13 | const POSITION_FILE: &str = "positions.json"; 14 | 15 | /// A single position leg on one platform 16 | #[derive(Debug, Clone, Serialize, Deserialize, Default)] 17 | pub struct PositionLeg { 18 | /// Number of contracts held 19 | pub contracts: f64, 20 | /// Total cost paid (in dollars) 21 | pub cost_basis: f64, 22 | /// Average price per contract 23 | pub avg_price: f64, 24 | } 25 | 26 | #[allow(dead_code)] 27 | impl PositionLeg { 28 | pub fn add(&mut self, contracts: f64, price: f64) { 29 | let new_cost = contracts * price; 30 | self.cost_basis += new_cost; 31 | self.contracts += contracts; 32 | if self.contracts > 0.0 { 33 | self.avg_price = self.cost_basis / self.contracts; 34 | } 35 | } 36 | 37 | /// Unrealized P&L based on current market price 38 | pub fn unrealized_pnl(&self, current_price: f64) -> f64 { 39 | let current_value = self.contracts * current_price; 40 | current_value - self.cost_basis 41 | } 42 | 43 | /// Value if this position wins (pays $1 per contract) 44 | pub fn value_if_win(&self) -> f64 { 45 | self.contracts * 1.0 46 | } 47 | 48 | /// Profit if this position wins 49 | pub fn profit_if_win(&self) -> f64 { 50 | self.value_if_win() - self.cost_basis 51 | } 52 | } 53 | 54 | /// A paired position (arb position spans both platforms) 55 | #[derive(Debug, Clone, Serialize, Deserialize, Default)] 56 | pub struct ArbPosition { 57 | /// Market identifier (Kalshi ticker) 58 | pub market_id: String, 59 | 60 | /// Description for logging 61 | pub description: String, 62 | 63 | /// Kalshi YES position 64 | pub kalshi_yes: PositionLeg, 65 | 66 | /// Kalshi NO position 67 | pub kalshi_no: PositionLeg, 68 | 69 | /// Polymarket YES position 70 | pub poly_yes: PositionLeg, 71 | 72 | /// Polymarket NO position 73 | pub poly_no: PositionLeg, 74 | 75 | /// Total fees paid (Kalshi fees) 76 | pub total_fees: f64, 77 | 78 | /// Timestamp when position was opened 79 | pub opened_at: String, 80 | 81 | /// Status: "open", "closed", "resolved" 82 | pub status: String, 83 | 84 | /// Realized P&L (set when position closes/resolves) 85 | pub realized_pnl: Option, 86 | } 87 | 88 | #[allow(dead_code)] 89 | impl ArbPosition { 90 | pub fn new(market_id: &str, description: &str) -> Self { 91 | Self { 92 | market_id: market_id.to_string(), 93 | description: description.to_string(), 94 | status: "open".to_string(), 95 | opened_at: chrono::Utc::now().to_rfc3339(), 96 | ..Default::default() 97 | } 98 | } 99 | 100 | /// Total contracts across all legs 101 | pub fn total_contracts(&self) -> f64 { 102 | self.kalshi_yes.contracts + self.kalshi_no.contracts + 103 | self.poly_yes.contracts + self.poly_no.contracts 104 | } 105 | 106 | /// Total cost basis across all legs 107 | pub fn total_cost(&self) -> f64 { 108 | self.kalshi_yes.cost_basis + self.kalshi_no.cost_basis + 109 | self.poly_yes.cost_basis + self.poly_no.cost_basis + 110 | self.total_fees 111 | } 112 | 113 | /// For a proper arb (YES on one platform + NO on other), one side always wins 114 | /// This calculates the guaranteed profit assuming the arb is balanced 115 | pub fn guaranteed_profit(&self) -> f64 { 116 | // In a balanced arb: we hold equal YES on platform A and NO on platform B 117 | // Regardless of outcome, we get $1 per contract pair 118 | let balanced_contracts = self.matched_contracts(); 119 | balanced_contracts - self.total_cost() 120 | } 121 | 122 | /// Number of matched contract pairs (min of YES and NO across platforms) 123 | pub fn matched_contracts(&self) -> f64 { 124 | let yes_total = self.kalshi_yes.contracts + self.poly_yes.contracts; 125 | let no_total = self.kalshi_no.contracts + self.poly_no.contracts; 126 | yes_total.min(no_total) 127 | } 128 | 129 | /// Unmatched exposure (contracts without offsetting position) 130 | pub fn unmatched_exposure(&self) -> f64 { 131 | let yes_total = self.kalshi_yes.contracts + self.poly_yes.contracts; 132 | let no_total = self.kalshi_no.contracts + self.poly_no.contracts; 133 | (yes_total - no_total).abs() 134 | } 135 | 136 | /// Mark position as resolved with outcome 137 | pub fn resolve(&mut self, outcome_yes_won: bool) { 138 | let payout = if outcome_yes_won { 139 | // YES won: Kalshi YES + Poly YES pay out 140 | self.kalshi_yes.contracts + self.poly_yes.contracts 141 | } else { 142 | // NO won: Kalshi NO + Poly NO pay out 143 | self.kalshi_no.contracts + self.poly_no.contracts 144 | }; 145 | 146 | self.realized_pnl = Some(payout - self.total_cost()); 147 | self.status = "resolved".to_string(); 148 | } 149 | } 150 | 151 | /// Summary of all positions 152 | #[derive(Debug, Clone, Serialize, Deserialize, Default)] 153 | #[allow(dead_code)] 154 | pub struct PositionSummary { 155 | /// Total cost basis across all open positions 156 | pub total_cost_basis: f64, 157 | 158 | /// Total guaranteed profit from matched arbs 159 | pub total_guaranteed_profit: f64, 160 | 161 | /// Total unmatched exposure (risk) 162 | pub total_unmatched_exposure: f64, 163 | 164 | /// Total realized P&L from closed/resolved positions 165 | pub realized_pnl: f64, 166 | 167 | /// Number of open positions 168 | pub open_positions: usize, 169 | 170 | /// Number of resolved positions 171 | pub resolved_positions: usize, 172 | 173 | /// Total contracts held 174 | pub total_contracts: f64, 175 | } 176 | 177 | /// Position tracker with persistence 178 | #[derive(Debug, Serialize, Deserialize)] 179 | pub struct PositionTracker { 180 | /// All positions keyed by market_id 181 | positions: HashMap, 182 | 183 | /// Daily realized P&L 184 | pub daily_realized_pnl: f64, 185 | 186 | /// Daily trading date (for reset) 187 | pub trading_date: String, 188 | 189 | /// Cumulative all-time P&L 190 | pub all_time_pnl: f64, 191 | } 192 | 193 | /// Data structure for serialization 194 | #[derive(Serialize)] 195 | struct SaveData { 196 | positions: HashMap, 197 | daily_realized_pnl: f64, 198 | trading_date: String, 199 | all_time_pnl: f64, 200 | } 201 | 202 | impl Default for PositionTracker { 203 | fn default() -> Self { 204 | Self::new() 205 | } 206 | } 207 | 208 | #[allow(dead_code)] 209 | impl PositionTracker { 210 | pub fn new() -> Self { 211 | Self { 212 | positions: HashMap::new(), 213 | daily_realized_pnl: 0.0, 214 | trading_date: today_string(), 215 | all_time_pnl: 0.0, 216 | } 217 | } 218 | 219 | /// Load from file or create new 220 | pub fn load() -> Self { 221 | Self::load_from(POSITION_FILE) 222 | } 223 | 224 | pub fn load_from>(path: P) -> Self { 225 | match std::fs::read_to_string(path.as_ref()) { 226 | Ok(contents) => { 227 | match serde_json::from_str::(&contents) { 228 | Ok(mut tracker) => { 229 | // Check if we need to reset daily P&L 230 | let today = today_string(); 231 | if tracker.trading_date != today { 232 | info!("[POSITIONS] New trading day, resetting daily P&L"); 233 | tracker.daily_realized_pnl = 0.0; 234 | tracker.trading_date = today; 235 | } 236 | info!("[POSITIONS] Loaded {} positions from {:?}", 237 | tracker.positions.len(), path.as_ref()); 238 | tracker 239 | } 240 | Err(e) => { 241 | warn!("[POSITIONS] Failed to parse positions file: {}", e); 242 | Self::new() 243 | } 244 | } 245 | } 246 | Err(_) => { 247 | info!("[POSITIONS] No positions file found, starting fresh"); 248 | Self::new() 249 | } 250 | } 251 | } 252 | 253 | /// Save to file 254 | pub fn save(&self) -> Result<()> { 255 | self.save_to(POSITION_FILE) 256 | } 257 | 258 | pub fn save_to>(&self, path: P) -> Result<()> { 259 | let json = serde_json::to_string_pretty(self)?; 260 | std::fs::write(path, json)?; 261 | Ok(()) 262 | } 263 | 264 | /// Save positions 265 | pub fn save_async(&self) { 266 | // Clone data for serialization 267 | let data = SaveData { 268 | positions: self.positions.clone(), 269 | daily_realized_pnl: self.daily_realized_pnl, 270 | trading_date: self.trading_date.clone(), 271 | all_time_pnl: self.all_time_pnl, 272 | }; 273 | // Try to spawn on runtime; if no runtime, save synchronously 274 | if tokio::runtime::Handle::try_current().is_ok() { 275 | tokio::spawn(async move { 276 | if let Ok(json) = serde_json::to_string_pretty(&data) { 277 | let _ = tokio::fs::write(POSITION_FILE, json).await; 278 | } 279 | }); 280 | } else if let Ok(json) = serde_json::to_string_pretty(&data) { 281 | let _ = std::fs::write(POSITION_FILE, json); 282 | } 283 | } 284 | 285 | /// Record a fill 286 | pub fn record_fill(&mut self, fill: &FillRecord) { 287 | self.record_fill_internal(fill); 288 | self.save_async(); 289 | } 290 | 291 | /// Record a fill without saving 292 | pub fn record_fill_internal(&mut self, fill: &FillRecord) { 293 | let position = self.positions 294 | .entry(fill.market_id.clone()) 295 | .or_insert_with(|| ArbPosition::new(&fill.market_id, &fill.description)); 296 | 297 | match (fill.platform.as_str(), fill.side.as_str()) { 298 | ("kalshi", "yes") => position.kalshi_yes.add(fill.contracts, fill.price), 299 | ("kalshi", "no") => position.kalshi_no.add(fill.contracts, fill.price), 300 | ("polymarket", "yes") => position.poly_yes.add(fill.contracts, fill.price), 301 | ("polymarket", "no") => position.poly_no.add(fill.contracts, fill.price), 302 | _ => warn!("[POSITIONS] Unknown platform/side: {}/{}", fill.platform, fill.side), 303 | } 304 | 305 | position.total_fees += fill.fees; 306 | 307 | info!("[POSITIONS] Recorded fill: {} {} {} @{:.1}¢ x{:.0} (fees: ${:.4})", 308 | fill.platform, fill.side, fill.market_id, 309 | fill.price * 100.0, fill.contracts, fill.fees); 310 | } 311 | 312 | /// Get or create position for a market 313 | pub fn get_or_create(&mut self, market_id: &str, description: &str) -> &mut ArbPosition { 314 | self.positions 315 | .entry(market_id.to_string()) 316 | .or_insert_with(|| ArbPosition::new(market_id, description)) 317 | } 318 | 319 | /// Get position (if exists) 320 | pub fn get(&self, market_id: &str) -> Option<&ArbPosition> { 321 | self.positions.get(market_id) 322 | } 323 | 324 | /// Mark a position as resolved 325 | pub fn resolve_position(&mut self, market_id: &str, yes_won: bool) -> Option { 326 | if let Some(position) = self.positions.get_mut(market_id) { 327 | position.resolve(yes_won); 328 | let pnl = position.realized_pnl.unwrap_or(0.0); 329 | 330 | self.daily_realized_pnl += pnl; 331 | self.all_time_pnl += pnl; 332 | 333 | info!("[POSITIONS] Resolved {}: {} won, P&L: ${:.2}", 334 | market_id, if yes_won { "YES" } else { "NO" }, pnl); 335 | 336 | self.save_async(); 337 | Some(pnl) 338 | } else { 339 | None 340 | } 341 | } 342 | 343 | /// Get summary statistics 344 | pub fn summary(&self) -> PositionSummary { 345 | let mut summary = PositionSummary::default(); 346 | 347 | for position in self.positions.values() { 348 | match position.status.as_str() { 349 | "open" => { 350 | summary.open_positions += 1; 351 | summary.total_cost_basis += position.total_cost(); 352 | summary.total_guaranteed_profit += position.guaranteed_profit(); 353 | summary.total_unmatched_exposure += position.unmatched_exposure(); 354 | summary.total_contracts += position.total_contracts(); 355 | } 356 | "resolved" => { 357 | summary.resolved_positions += 1; 358 | summary.realized_pnl += position.realized_pnl.unwrap_or(0.0); 359 | } 360 | _ => {} 361 | } 362 | } 363 | 364 | summary 365 | } 366 | 367 | /// Get all open positions 368 | pub fn open_positions(&self) -> Vec<&ArbPosition> { 369 | self.positions.values() 370 | .filter(|p| p.status == "open") 371 | .collect() 372 | } 373 | 374 | /// Daily P&L (realized only) 375 | pub fn daily_pnl(&self) -> f64 { 376 | self.daily_realized_pnl 377 | } 378 | 379 | /// Reset daily counters (call at midnight) 380 | pub fn reset_daily(&mut self) { 381 | self.daily_realized_pnl = 0.0; 382 | self.trading_date = today_string(); 383 | self.save_async(); 384 | } 385 | } 386 | 387 | /// Record of a single fill 388 | #[derive(Debug, Clone)] 389 | pub struct FillRecord { 390 | pub market_id: String, 391 | pub description: String, 392 | pub platform: String, // "kalshi" or "polymarket" 393 | pub side: String, // "yes" or "no" 394 | pub contracts: f64, 395 | pub price: f64, 396 | pub fees: f64, 397 | #[allow(dead_code)] 398 | pub order_id: String, 399 | #[allow(dead_code)] 400 | pub timestamp: String, 401 | } 402 | 403 | impl FillRecord { 404 | pub fn new( 405 | market_id: &str, 406 | description: &str, 407 | platform: &str, 408 | side: &str, 409 | contracts: f64, 410 | price: f64, 411 | fees: f64, 412 | order_id: &str, 413 | ) -> Self { 414 | Self { 415 | market_id: market_id.to_string(), 416 | description: description.to_string(), 417 | platform: platform.to_string(), 418 | side: side.to_string(), 419 | contracts, 420 | price, 421 | fees, 422 | order_id: order_id.to_string(), 423 | timestamp: chrono::Utc::now().to_rfc3339(), 424 | } 425 | } 426 | } 427 | 428 | #[allow(dead_code)] 429 | pub type SharedPositionTracker = Arc>; 430 | 431 | #[allow(dead_code)] 432 | pub fn create_position_tracker() -> SharedPositionTracker { 433 | Arc::new(RwLock::new(PositionTracker::load())) 434 | } 435 | 436 | fn today_string() -> String { 437 | chrono::Utc::now().format("%Y-%m-%d").to_string() 438 | } 439 | 440 | #[derive(Clone)] 441 | pub struct PositionChannel { 442 | tx: mpsc::UnboundedSender, 443 | } 444 | 445 | impl PositionChannel { 446 | pub fn new(tx: mpsc::UnboundedSender) -> Self { 447 | Self { tx } 448 | } 449 | 450 | #[inline] 451 | pub fn record_fill(&self, fill: FillRecord) { 452 | let _ = self.tx.send(fill); 453 | } 454 | } 455 | 456 | pub fn create_position_channel() -> (PositionChannel, mpsc::UnboundedReceiver) { 457 | let (tx, rx) = mpsc::unbounded_channel(); 458 | (PositionChannel::new(tx), rx) 459 | } 460 | 461 | pub async fn position_writer_loop( 462 | mut rx: mpsc::UnboundedReceiver, 463 | tracker: Arc>, 464 | ) { 465 | let mut batch = Vec::with_capacity(16); 466 | let mut interval = tokio::time::interval(Duration::from_millis(100)); 467 | 468 | loop { 469 | tokio::select! { 470 | biased; 471 | 472 | Some(fill) = rx.recv() => { 473 | batch.push(fill); 474 | if batch.len() >= 16 { 475 | let mut guard = tracker.write().await; 476 | for fill in batch.drain(..) { 477 | guard.record_fill_internal(&fill); 478 | } 479 | guard.save_async(); 480 | } 481 | } 482 | _ = interval.tick() => { 483 | if !batch.is_empty() { 484 | let mut guard = tracker.write().await; 485 | for fill in batch.drain(..) { 486 | guard.record_fill_internal(&fill); 487 | } 488 | guard.save_async(); 489 | } 490 | } 491 | } 492 | } 493 | } 494 | 495 | #[cfg(test)] 496 | mod tests { 497 | use super::*; 498 | 499 | #[test] 500 | fn test_position_leg() { 501 | let mut leg = PositionLeg::default(); 502 | leg.add(10.0, 0.45); // Buy 10 contracts at 45¢ 503 | 504 | assert_eq!(leg.contracts, 10.0); 505 | assert!((leg.cost_basis - 4.50).abs() < 0.001); 506 | assert!((leg.avg_price - 0.45).abs() < 0.001); 507 | 508 | // Profit if this leg wins 509 | assert!((leg.profit_if_win() - 5.50).abs() < 0.001); // $10 payout - $4.50 cost 510 | } 511 | 512 | #[test] 513 | fn test_arb_position_guaranteed_profit() { 514 | let mut pos = ArbPosition::new("TEST-MARKET", "Test"); 515 | 516 | // Buy 10 YES on Poly at 45¢ 517 | pos.poly_yes.add(10.0, 0.45); 518 | 519 | // Buy 10 NO on Kalshi at 50¢ 520 | pos.kalshi_no.add(10.0, 0.50); 521 | 522 | // Total cost: $4.50 + $5.00 = $9.50 523 | // Guaranteed payout: $10.00 (one side wins) 524 | // Guaranteed profit: $0.50 525 | 526 | assert!((pos.total_cost() - 9.50).abs() < 0.001); 527 | assert!((pos.matched_contracts() - 10.0).abs() < 0.001); 528 | assert!((pos.guaranteed_profit() - 0.50).abs() < 0.001); 529 | assert!((pos.unmatched_exposure() - 0.0).abs() < 0.001); 530 | } 531 | 532 | #[test] 533 | fn test_unmatched_exposure() { 534 | let mut pos = ArbPosition::new("TEST-MARKET", "Test"); 535 | 536 | // Buy 10 YES on Poly 537 | pos.poly_yes.add(10.0, 0.45); 538 | 539 | // Buy only 8 NO on Kalshi (partial fill) 540 | pos.kalshi_no.add(8.0, 0.50); 541 | 542 | // Matched: 8, Unmatched: 2 543 | assert!((pos.matched_contracts() - 8.0).abs() < 0.001); 544 | assert!((pos.unmatched_exposure() - 2.0).abs() < 0.001); 545 | } 546 | 547 | #[test] 548 | fn test_resolution() { 549 | let mut pos = ArbPosition::new("TEST-MARKET", "Test"); 550 | pos.poly_yes.add(10.0, 0.45); 551 | pos.kalshi_no.add(10.0, 0.50); 552 | 553 | // YES wins 554 | pos.resolve(true); 555 | 556 | // Payout: 10 (poly_yes wins) 557 | // Cost: 9.50 558 | // P&L: +0.50 559 | assert!((pos.realized_pnl.unwrap() - 0.50).abs() < 0.001); 560 | assert_eq!(pos.status, "resolved"); 561 | } 562 | } -------------------------------------------------------------------------------- /src/kalshi.rs: -------------------------------------------------------------------------------- 1 | // src/kalshi.rs 2 | // Kalshi WebSocket and API client 3 | 4 | use anyhow::{Context, Result}; 5 | use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; 6 | use futures_util::{SinkExt, StreamExt}; 7 | use pkcs1::DecodeRsaPrivateKey; 8 | use rsa::{ 9 | pss::SigningKey, 10 | sha2::Sha256, 11 | signature::{RandomizedSigner, SignatureEncoding}, 12 | RsaPrivateKey, 13 | }; 14 | use serde::{Deserialize, Serialize}; 15 | use std::sync::Arc; 16 | use std::time::{Duration, SystemTime, UNIX_EPOCH}; 17 | use tokio::sync::mpsc; 18 | use tokio_tungstenite::{connect_async, tungstenite::{http::Request, Message}}; 19 | use tracing::{debug, error, info}; 20 | 21 | use crate::config::{KALSHI_WS_URL, KALSHI_API_BASE, KALSHI_API_DELAY_MS}; 22 | use crate::execution::NanoClock; 23 | use crate::types::{ 24 | KalshiEventsResponse, KalshiMarketsResponse, KalshiEvent, KalshiMarket, 25 | GlobalState, FastExecutionRequest, ArbType, PriceCents, SizeCents, fxhash_str, 26 | }; 27 | 28 | // === Order Types === 29 | 30 | use std::borrow::Cow; 31 | use std::fmt::Write; 32 | use arrayvec::ArrayString; 33 | 34 | #[derive(Debug, Clone, Serialize)] 35 | pub struct KalshiOrderRequest<'a> { 36 | pub ticker: Cow<'a, str>, 37 | pub action: &'static str, 38 | pub side: &'static str, 39 | #[serde(rename = "type")] 40 | pub order_type: &'static str, 41 | pub count: i64, 42 | #[serde(skip_serializing_if = "Option::is_none")] 43 | pub yes_price: Option, 44 | #[serde(skip_serializing_if = "Option::is_none")] 45 | pub no_price: Option, 46 | pub client_order_id: Cow<'a, str>, 47 | #[serde(skip_serializing_if = "Option::is_none")] 48 | pub expiration_ts: Option, 49 | #[serde(skip_serializing_if = "Option::is_none")] 50 | pub time_in_force: Option<&'static str>, 51 | } 52 | 53 | impl<'a> KalshiOrderRequest<'a> { 54 | /// Create an IOC (immediate-or-cancel) buy order 55 | pub fn ioc_buy(ticker: Cow<'a, str>, side: &'static str, price_cents: i64, count: i64, client_order_id: Cow<'a, str>) -> Self { 56 | let (yes_price, no_price) = if side == "yes" { 57 | (Some(price_cents), None) 58 | } else { 59 | (None, Some(price_cents)) 60 | }; 61 | 62 | Self { 63 | ticker, 64 | action: "buy", 65 | side, 66 | order_type: "limit", 67 | count, 68 | yes_price, 69 | no_price, 70 | client_order_id, 71 | expiration_ts: None, 72 | time_in_force: Some("immediate_or_cancel"), 73 | } 74 | } 75 | 76 | /// Create an IOC (immediate-or-cancel) sell order 77 | pub fn ioc_sell(ticker: Cow<'a, str>, side: &'static str, price_cents: i64, count: i64, client_order_id: Cow<'a, str>) -> Self { 78 | let (yes_price, no_price) = if side == "yes" { 79 | (Some(price_cents), None) 80 | } else { 81 | (None, Some(price_cents)) 82 | }; 83 | 84 | Self { 85 | ticker, 86 | action: "sell", 87 | side, 88 | order_type: "limit", 89 | count, 90 | yes_price, 91 | no_price, 92 | client_order_id, 93 | expiration_ts: None, 94 | time_in_force: Some("immediate_or_cancel"), 95 | } 96 | } 97 | } 98 | 99 | #[derive(Debug, Clone, Deserialize)] 100 | pub struct KalshiOrderResponse { 101 | pub order: KalshiOrderDetails, 102 | } 103 | 104 | #[allow(dead_code)] 105 | #[derive(Debug, Clone, Deserialize)] 106 | pub struct KalshiOrderDetails { 107 | pub order_id: String, 108 | pub ticker: String, 109 | pub status: String, // "resting", "canceled", "executed", "pending" 110 | #[serde(default)] 111 | pub remaining_count: Option, 112 | #[serde(default)] 113 | pub queue_position: Option, 114 | pub action: String, 115 | pub side: String, 116 | #[serde(rename = "type")] 117 | pub order_type: String, 118 | pub yes_price: Option, 119 | pub no_price: Option, 120 | pub created_time: Option, 121 | #[serde(default)] 122 | pub taker_fill_count: Option, 123 | #[serde(default)] 124 | pub maker_fill_count: Option, 125 | #[serde(default)] 126 | pub place_count: Option, 127 | #[serde(default)] 128 | pub taker_fill_cost: Option, 129 | #[serde(default)] 130 | pub maker_fill_cost: Option, 131 | } 132 | 133 | #[allow(dead_code)] 134 | impl KalshiOrderDetails { 135 | /// Total filled contracts 136 | pub fn filled_count(&self) -> i64 { 137 | self.taker_fill_count.unwrap_or(0) + self.maker_fill_count.unwrap_or(0) 138 | } 139 | 140 | /// Check if order was fully filled 141 | pub fn is_filled(&self) -> bool { 142 | self.status == "executed" || self.remaining_count == Some(0) 143 | } 144 | 145 | /// Check if order was partially filled 146 | pub fn is_partial(&self) -> bool { 147 | self.filled_count() > 0 && !self.is_filled() 148 | } 149 | } 150 | 151 | // === Kalshi Auth Config === 152 | 153 | pub struct KalshiConfig { 154 | pub api_key_id: String, 155 | pub private_key: RsaPrivateKey, 156 | } 157 | 158 | impl KalshiConfig { 159 | pub fn from_env() -> Result { 160 | dotenvy::dotenv().ok(); 161 | let api_key_id = std::env::var("KALSHI_API_KEY_ID").context("KALSHI_API_KEY_ID not set")?; 162 | // Support both KALSHI_PRIVATE_KEY_PATH and KALSHI_PRIVATE_KEY_FILE for compatibility 163 | let key_path = std::env::var("KALSHI_PRIVATE_KEY_PATH") 164 | .or_else(|_| std::env::var("KALSHI_PRIVATE_KEY_FILE")) 165 | .unwrap_or_else(|_| "kalshi_private_key.txt".to_string()); 166 | let private_key_pem = std::fs::read_to_string(&key_path) 167 | .with_context(|| format!("Failed to read private key from {}", key_path))? 168 | .trim() 169 | .to_owned(); 170 | let private_key = RsaPrivateKey::from_pkcs1_pem(&private_key_pem) 171 | .context("Failed to parse private key PEM")?; 172 | Ok(Self { api_key_id, private_key }) 173 | } 174 | 175 | pub fn sign(&self, message: &str) -> Result { 176 | tracing::debug!("[KALSHI-DEBUG] Signing message: {}", message); 177 | let signing_key = SigningKey::::new(self.private_key.clone()); 178 | let signature = signing_key.sign_with_rng(&mut rand::thread_rng(), message.as_bytes()); 179 | let sig_b64 = BASE64.encode(signature.to_bytes()); 180 | tracing::debug!("[KALSHI-DEBUG] Signature (first 50 chars): {}...", &sig_b64[..50.min(sig_b64.len())]); 181 | Ok(sig_b64) 182 | } 183 | } 184 | 185 | // === Kalshi REST API Client === 186 | 187 | /// Timeout for order requests (shorter than general API timeout) 188 | const ORDER_TIMEOUT: Duration = Duration::from_secs(5); 189 | 190 | use std::sync::atomic::{AtomicU32, Ordering}; 191 | 192 | /// Global order counter for unique client_order_id generation 193 | static ORDER_COUNTER: AtomicU32 = AtomicU32::new(0); 194 | 195 | pub struct KalshiApiClient { 196 | http: reqwest::Client, 197 | pub config: KalshiConfig, 198 | } 199 | 200 | impl KalshiApiClient { 201 | pub fn new(config: KalshiConfig) -> Self { 202 | Self { 203 | http: reqwest::Client::builder() 204 | .timeout(Duration::from_secs(10)) 205 | .build() 206 | .expect("Failed to build HTTP client"), 207 | config, 208 | } 209 | } 210 | 211 | #[inline] 212 | fn next_order_id() -> ArrayString<24> { 213 | let counter = ORDER_COUNTER.fetch_add(1, Ordering::Relaxed); 214 | let ts = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); 215 | let mut buf = ArrayString::<24>::new(); 216 | let _ = write!(&mut buf, "a{}{}", ts, counter); 217 | buf 218 | } 219 | 220 | /// Generic authenticated GET request with retry on rate limit 221 | async fn get(&self, path: &str) -> Result { 222 | let mut retries = 0; 223 | const MAX_RETRIES: u32 = 5; 224 | 225 | loop { 226 | let url = format!("{}{}", KALSHI_API_BASE, path); 227 | let timestamp_ms = SystemTime::now() 228 | .duration_since(UNIX_EPOCH) 229 | .unwrap() 230 | .as_millis() as u64; 231 | // Kalshi signature uses FULL path including /trade-api/v2 prefix 232 | let full_path = format!("/trade-api/v2{}", path); 233 | let signature = self.config.sign(&format!("{}GET{}", timestamp_ms, full_path))?; 234 | 235 | let resp = self.http 236 | .get(&url) 237 | .header("KALSHI-ACCESS-KEY", &self.config.api_key_id) 238 | .header("KALSHI-ACCESS-SIGNATURE", &signature) 239 | .header("KALSHI-ACCESS-TIMESTAMP", timestamp_ms.to_string()) 240 | .send() 241 | .await?; 242 | 243 | let status = resp.status(); 244 | 245 | // Handle rate limit with exponential backoff 246 | if status == reqwest::StatusCode::TOO_MANY_REQUESTS { 247 | retries += 1; 248 | if retries > MAX_RETRIES { 249 | anyhow::bail!("Kalshi API rate limited after {} retries", MAX_RETRIES); 250 | } 251 | let backoff_ms = 2000 * (1 << retries); // 4s, 8s, 16s, 32s, 64s 252 | debug!("[KALSHI] Rate limited, backing off {}ms (retry {}/{})", 253 | backoff_ms, retries, MAX_RETRIES); 254 | tokio::time::sleep(Duration::from_millis(backoff_ms)).await; 255 | continue; 256 | } 257 | 258 | if !status.is_success() { 259 | let body = resp.text().await.unwrap_or_default(); 260 | anyhow::bail!("Kalshi API error {}: {}", status, body); 261 | } 262 | 263 | let data: T = resp.json().await?; 264 | tokio::time::sleep(Duration::from_millis(KALSHI_API_DELAY_MS)).await; 265 | return Ok(data); 266 | } 267 | } 268 | 269 | pub async fn get_events(&self, series_ticker: &str, limit: u32) -> Result> { 270 | let path = format!("/events?series_ticker={}&limit={}&status=open", series_ticker, limit); 271 | let resp: KalshiEventsResponse = self.get(&path).await?; 272 | Ok(resp.events) 273 | } 274 | 275 | pub async fn get_markets(&self, event_ticker: &str) -> Result> { 276 | let path = format!("/markets?event_ticker={}", event_ticker); 277 | let resp: KalshiMarketsResponse = self.get(&path).await?; 278 | Ok(resp.markets) 279 | } 280 | 281 | /// Generic authenticated POST request 282 | async fn post(&self, path: &str, body: &B) -> Result { 283 | let url = format!("{}{}", KALSHI_API_BASE, path); 284 | let timestamp_ms = SystemTime::now() 285 | .duration_since(UNIX_EPOCH) 286 | .unwrap() 287 | .as_millis() as u64; 288 | // Kalshi signature uses FULL path including /trade-api/v2 prefix 289 | let full_path = format!("/trade-api/v2{}", path); 290 | let msg = format!("{}POST{}", timestamp_ms, full_path); 291 | let signature = self.config.sign(&msg)?; 292 | 293 | let resp = self.http 294 | .post(&url) 295 | .header("KALSHI-ACCESS-KEY", &self.config.api_key_id) 296 | .header("KALSHI-ACCESS-SIGNATURE", &signature) 297 | .header("KALSHI-ACCESS-TIMESTAMP", timestamp_ms.to_string()) 298 | .header("Content-Type", "application/json") 299 | .timeout(ORDER_TIMEOUT) 300 | .json(body) 301 | .send() 302 | .await?; 303 | 304 | let status = resp.status(); 305 | if !status.is_success() { 306 | let body = resp.text().await.unwrap_or_default(); 307 | anyhow::bail!("Kalshi API error {}: {}", status, body); 308 | } 309 | 310 | let data: T = resp.json().await?; 311 | Ok(data) 312 | } 313 | 314 | /// Create an order on Kalshi 315 | pub async fn create_order(&self, order: &KalshiOrderRequest<'_>) -> Result { 316 | let path = "/portfolio/orders"; 317 | self.post(path, order).await 318 | } 319 | 320 | /// Create an IOC buy order (convenience method) 321 | pub async fn buy_ioc( 322 | &self, 323 | ticker: &str, 324 | side: &str, // "yes" or "no" 325 | price_cents: i64, 326 | count: i64, 327 | ) -> Result { 328 | debug_assert!(!ticker.is_empty(), "ticker must not be empty"); 329 | debug_assert!(price_cents >= 1 && price_cents <= 99, "price must be 1-99"); 330 | debug_assert!(count >= 1, "count must be >= 1"); 331 | 332 | let side_static: &'static str = if side == "yes" { "yes" } else { "no" }; 333 | let order_id = Self::next_order_id(); 334 | let order = KalshiOrderRequest::ioc_buy( 335 | Cow::Borrowed(ticker), 336 | side_static, 337 | price_cents, 338 | count, 339 | Cow::Borrowed(&order_id) 340 | ); 341 | debug!("[KALSHI] IOC {} {} @{}¢ x{}", side, ticker, price_cents, count); 342 | 343 | let resp = self.create_order(&order).await?; 344 | debug!("[KALSHI] {} filled={}", resp.order.status, resp.order.filled_count()); 345 | Ok(resp) 346 | } 347 | 348 | pub async fn sell_ioc( 349 | &self, 350 | ticker: &str, 351 | side: &str, 352 | price_cents: i64, 353 | count: i64, 354 | ) -> Result { 355 | debug_assert!(!ticker.is_empty(), "ticker must not be empty"); 356 | debug_assert!(price_cents >= 1 && price_cents <= 99, "price must be 1-99"); 357 | debug_assert!(count >= 1, "count must be >= 1"); 358 | 359 | let side_static: &'static str = if side == "yes" { "yes" } else { "no" }; 360 | let order_id = Self::next_order_id(); 361 | let order = KalshiOrderRequest::ioc_sell( 362 | Cow::Borrowed(ticker), 363 | side_static, 364 | price_cents, 365 | count, 366 | Cow::Borrowed(&order_id) 367 | ); 368 | debug!("[KALSHI] SELL {} {} @{}¢ x{}", side, ticker, price_cents, count); 369 | 370 | let resp = self.create_order(&order).await?; 371 | debug!("[KALSHI] {} filled={}", resp.order.status, resp.order.filled_count()); 372 | Ok(resp) 373 | } 374 | } 375 | 376 | // === WebSocket Message Types === 377 | 378 | #[derive(Deserialize, Debug)] 379 | pub struct KalshiWsMessage { 380 | #[serde(rename = "type")] 381 | pub msg_type: String, 382 | pub msg: Option, 383 | } 384 | 385 | #[allow(dead_code)] 386 | #[derive(Deserialize, Debug)] 387 | pub struct KalshiWsMsgBody { 388 | pub market_ticker: Option, 389 | // Snapshot fields - arrays of [price_cents, quantity] 390 | pub yes: Option>>, 391 | pub no: Option>>, 392 | // Delta fields 393 | pub price: Option, 394 | pub delta: Option, 395 | pub side: Option, 396 | } 397 | 398 | #[derive(Serialize)] 399 | struct SubscribeCmd { 400 | id: i32, 401 | cmd: &'static str, 402 | params: SubscribeParams, 403 | } 404 | 405 | #[derive(Serialize)] 406 | struct SubscribeParams { 407 | channels: Vec<&'static str>, 408 | market_tickers: Vec, 409 | } 410 | 411 | // ============================================================================= 412 | // WebSocket Runner 413 | // ============================================================================= 414 | 415 | /// WebSocket runner 416 | pub async fn run_ws( 417 | config: &KalshiConfig, 418 | state: Arc, 419 | exec_tx: mpsc::Sender, 420 | threshold_cents: PriceCents, 421 | ) -> Result<()> { 422 | let tickers: Vec = state.markets.iter() 423 | .take(state.market_count()) 424 | .filter_map(|m| m.pair.as_ref().map(|p| p.kalshi_market_ticker.to_string())) 425 | .collect(); 426 | 427 | if tickers.is_empty() { 428 | info!("[KALSHI] No markets to monitor"); 429 | tokio::time::sleep(Duration::from_secs(u64::MAX)).await; 430 | return Ok(()); 431 | } 432 | 433 | let timestamp = SystemTime::now() 434 | .duration_since(UNIX_EPOCH)? 435 | .as_millis() 436 | .to_string(); 437 | 438 | let signature = config.sign(&format!("{}GET/trade-api/ws/v2", timestamp))?; 439 | 440 | let request = Request::builder() 441 | .uri(KALSHI_WS_URL) 442 | .header("KALSHI-ACCESS-KEY", &config.api_key_id) 443 | .header("KALSHI-ACCESS-SIGNATURE", &signature) 444 | .header("KALSHI-ACCESS-TIMESTAMP", ×tamp) 445 | .header("Host", "api.elections.kalshi.com") 446 | .header("Connection", "Upgrade") 447 | .header("Upgrade", "websocket") 448 | .header("Sec-WebSocket-Version", "13") 449 | .header("Sec-WebSocket-Key", tokio_tungstenite::tungstenite::handshake::client::generate_key()) 450 | .body(())?; 451 | 452 | let (ws_stream, _) = connect_async(request).await.context("Failed to connect to Kalshi")?; 453 | info!("[KALSHI] Connected"); 454 | 455 | let (mut write, mut read) = ws_stream.split(); 456 | 457 | // Subscribe to all tickers 458 | let subscribe_msg = SubscribeCmd { 459 | id: 1, 460 | cmd: "subscribe", 461 | params: SubscribeParams { 462 | channels: vec!["orderbook_delta"], 463 | market_tickers: tickers.clone(), 464 | }, 465 | }; 466 | 467 | write.send(Message::Text(serde_json::to_string(&subscribe_msg)?)).await?; 468 | info!("[KALSHI] Subscribed to {} markets", tickers.len()); 469 | 470 | let clock = NanoClock::new(); 471 | 472 | while let Some(msg) = read.next().await { 473 | match msg { 474 | Ok(Message::Text(text)) => { 475 | match serde_json::from_str::(&text) { 476 | Ok(kalshi_msg) => { 477 | let ticker = kalshi_msg.msg.as_ref() 478 | .and_then(|m| m.market_ticker.as_ref()); 479 | 480 | let Some(ticker) = ticker else { continue }; 481 | let ticker_hash = fxhash_str(ticker); 482 | 483 | let Some(&market_id) = state.kalshi_to_id.get(&ticker_hash) else { continue }; 484 | let market = &state.markets[market_id as usize]; 485 | 486 | match kalshi_msg.msg_type.as_str() { 487 | "orderbook_snapshot" => { 488 | if let Some(body) = &kalshi_msg.msg { 489 | process_kalshi_snapshot(market, body); 490 | 491 | // Check for arbs 492 | let arb_mask = market.check_arbs(threshold_cents); 493 | if arb_mask != 0 { 494 | send_kalshi_arb_request(market_id, market, arb_mask, &exec_tx, &clock).await; 495 | } 496 | } 497 | } 498 | "orderbook_delta" => { 499 | if let Some(body) = &kalshi_msg.msg { 500 | process_kalshi_delta(market, body); 501 | 502 | let arb_mask = market.check_arbs(threshold_cents); 503 | if arb_mask != 0 { 504 | send_kalshi_arb_request(market_id, market, arb_mask, &exec_tx, &clock).await; 505 | } 506 | } 507 | } 508 | _ => {} 509 | } 510 | } 511 | Err(e) => { 512 | // Log at trace level - unknown message types are normal 513 | tracing::trace!("[KALSHI] WS parse error: {} (msg: {}...)", e, &text[..text.len().min(100)]); 514 | } 515 | } 516 | } 517 | Ok(Message::Ping(data)) => { 518 | let _ = write.send(Message::Pong(data)).await; 519 | } 520 | Err(e) => { 521 | error!("[KALSHI] WebSocket error: {}", e); 522 | break; 523 | } 524 | _ => {} 525 | } 526 | } 527 | 528 | Ok(()) 529 | } 530 | 531 | /// Process Kalshi orderbook snapshot 532 | /// Note: Kalshi sends BIDS - to buy YES you pay (100 - best_NO_bid), to buy NO you pay (100 - best_YES_bid) 533 | #[inline] 534 | fn process_kalshi_snapshot(market: &crate::types::AtomicMarketState, body: &KalshiWsMsgBody) { 535 | // Find best YES bid (highest price) - this determines NO ask 536 | let (no_ask, no_size) = body.yes.as_ref() 537 | .and_then(|levels| { 538 | levels.iter() 539 | .filter_map(|l| { 540 | if l.len() >= 2 && l[1] > 0 { // Has quantity 541 | Some((l[0], l[1])) // (price, qty) 542 | } else { 543 | None 544 | } 545 | }) 546 | .max_by_key(|(p, _)| *p) // Highest bid 547 | .map(|(price, qty)| { 548 | let ask = (100 - price) as PriceCents; // To buy NO, pay 100 - YES_bid 549 | let size = (qty * price / 100) as SizeCents; 550 | (ask, size) 551 | }) 552 | }) 553 | .unwrap_or((0, 0)); 554 | 555 | // Find best NO bid (highest price) - this determines YES ask 556 | let (yes_ask, yes_size) = body.no.as_ref() 557 | .and_then(|levels| { 558 | levels.iter() 559 | .filter_map(|l| { 560 | if l.len() >= 2 && l[1] > 0 { 561 | Some((l[0], l[1])) 562 | } else { 563 | None 564 | } 565 | }) 566 | .max_by_key(|(p, _)| *p) 567 | .map(|(price, qty)| { 568 | let ask = (100 - price) as PriceCents; // To buy YES, pay 100 - NO_bid 569 | let size = (qty * price / 100) as SizeCents; 570 | (ask, size) 571 | }) 572 | }) 573 | .unwrap_or((0, 0)); 574 | 575 | // Store 576 | market.kalshi.store(yes_ask, no_ask, yes_size, no_size); 577 | } 578 | 579 | /// Process Kalshi orderbook delta 580 | /// Note: Deltas update bid levels; we recompute asks from best bids 581 | #[inline] 582 | fn process_kalshi_delta(market: &crate::types::AtomicMarketState, body: &KalshiWsMsgBody) { 583 | // For deltas, recompute from snapshot-like format 584 | // Kalshi deltas have yes/no as arrays of [price, new_qty] 585 | let (current_yes, current_no, current_yes_size, current_no_size) = market.kalshi.load(); 586 | 587 | // Process YES bid updates (affects NO ask) 588 | let (no_ask, no_size) = if let Some(levels) = &body.yes { 589 | // Find best (highest) YES bid with non-zero quantity 590 | levels.iter() 591 | .filter_map(|l| { 592 | if l.len() >= 2 && l[1] > 0 { 593 | Some((l[0], l[1])) 594 | } else { 595 | None 596 | } 597 | }) 598 | .max_by_key(|(p, _)| *p) 599 | .map(|(price, qty)| { 600 | let ask = (100 - price) as PriceCents; 601 | let size = (qty * price / 100) as SizeCents; 602 | (ask, size) 603 | }) 604 | .unwrap_or((current_no, current_no_size)) 605 | } else { 606 | (current_no, current_no_size) 607 | }; 608 | 609 | // Process NO bid updates (affects YES ask) 610 | let (yes_ask, yes_size) = if let Some(levels) = &body.no { 611 | levels.iter() 612 | .filter_map(|l| { 613 | if l.len() >= 2 && l[1] > 0 { 614 | Some((l[0], l[1])) 615 | } else { 616 | None 617 | } 618 | }) 619 | .max_by_key(|(p, _)| *p) 620 | .map(|(price, qty)| { 621 | let ask = (100 - price) as PriceCents; 622 | let size = (qty * price / 100) as SizeCents; 623 | (ask, size) 624 | }) 625 | .unwrap_or((current_yes, current_yes_size)) 626 | } else { 627 | (current_yes, current_yes_size) 628 | }; 629 | 630 | market.kalshi.store(yes_ask, no_ask, yes_size, no_size); 631 | } 632 | 633 | /// Send arb request from Kalshi handler 634 | #[inline] 635 | async fn send_kalshi_arb_request( 636 | market_id: u16, 637 | market: &crate::types::AtomicMarketState, 638 | arb_mask: u8, 639 | exec_tx: &mpsc::Sender, 640 | clock: &NanoClock, 641 | ) { 642 | let (k_yes, k_no, k_yes_size, k_no_size) = market.kalshi.load(); 643 | let (p_yes, p_no, p_yes_size, p_no_size) = market.poly.load(); 644 | 645 | let (yes_price, no_price, yes_size, no_size, arb_type) = if arb_mask & 1 != 0 { 646 | (p_yes, k_no, p_yes_size, k_no_size, ArbType::PolyYesKalshiNo) 647 | } else if arb_mask & 2 != 0 { 648 | (k_yes, p_no, k_yes_size, p_no_size, ArbType::KalshiYesPolyNo) 649 | } else if arb_mask & 4 != 0 { 650 | (p_yes, p_no, p_yes_size, p_no_size, ArbType::PolyOnly) 651 | } else if arb_mask & 8 != 0 { 652 | (k_yes, k_no, k_yes_size, k_no_size, ArbType::KalshiOnly) 653 | } else { 654 | return; 655 | }; 656 | 657 | let req = FastExecutionRequest { 658 | market_id, 659 | yes_price, 660 | no_price, 661 | yes_size, 662 | no_size, 663 | arb_type, 664 | detected_ns: clock.now_ns(), 665 | }; 666 | 667 | let _ = exec_tx.try_send(req); 668 | } -------------------------------------------------------------------------------- /src/discovery.rs: -------------------------------------------------------------------------------- 1 | // src/discovery.rs 2 | // Market discovery - matches Kalshi events to Polymarket markets 3 | 4 | use anyhow::Result; 5 | use futures_util::{stream, StreamExt}; 6 | use governor::{Quota, RateLimiter, state::NotKeyed, clock::DefaultClock, middleware::NoOpMiddleware}; 7 | use serde::{Serialize, Deserialize}; 8 | use std::num::NonZeroU32; 9 | use std::sync::Arc; 10 | use std::time::{SystemTime, UNIX_EPOCH}; 11 | use tokio::sync::Semaphore; 12 | use tracing::{info, warn}; 13 | 14 | use crate::cache::TeamCache; 15 | use crate::config::{LeagueConfig, get_league_configs, get_league_config}; 16 | use crate::kalshi::KalshiApiClient; 17 | use crate::polymarket::GammaClient; 18 | use crate::types::{MarketPair, MarketType, DiscoveryResult, KalshiMarket, KalshiEvent}; 19 | 20 | /// Max concurrent Gamma API requests 21 | const GAMMA_CONCURRENCY: usize = 20; 22 | 23 | /// Kalshi rate limit: 2 requests per second (very conservative - they rate limit aggressively) 24 | /// Must be conservative because discovery runs many leagues/series in parallel 25 | const KALSHI_RATE_LIMIT_PER_SEC: u32 = 2; 26 | 27 | /// Max concurrent Kalshi API requests GLOBALLY across all leagues/series 28 | /// This is the hard cap - prevents bursting even when rate limiter has tokens 29 | const KALSHI_GLOBAL_CONCURRENCY: usize = 1; 30 | 31 | /// Cache file path 32 | const DISCOVERY_CACHE_PATH: &str = ".discovery_cache.json"; 33 | 34 | /// Cache TTL in seconds (2 hours - new markets appear every ~2 hours) 35 | const CACHE_TTL_SECS: u64 = 2 * 60 * 60; 36 | 37 | /// Task for parallel Gamma lookup 38 | struct GammaLookupTask { 39 | event: Arc, 40 | market: KalshiMarket, 41 | poly_slug: String, 42 | market_type: MarketType, 43 | league: String, 44 | } 45 | 46 | /// Type alias for Kalshi rate limiter 47 | type KalshiRateLimiter = RateLimiter; 48 | 49 | /// Persistent cache for discovered market pairs 50 | #[derive(Debug, Clone, Serialize, Deserialize)] 51 | struct DiscoveryCache { 52 | /// Unix timestamp when cache was created 53 | timestamp_secs: u64, 54 | /// Cached market pairs 55 | pairs: Vec, 56 | /// Set of known Kalshi market tickers (for incremental updates) 57 | known_kalshi_tickers: Vec, 58 | } 59 | 60 | impl DiscoveryCache { 61 | fn new(pairs: Vec) -> Self { 62 | let known_kalshi_tickers: Vec = pairs.iter() 63 | .map(|p| p.kalshi_market_ticker.to_string()) 64 | .collect(); 65 | Self { 66 | timestamp_secs: current_unix_secs(), 67 | pairs, 68 | known_kalshi_tickers, 69 | } 70 | } 71 | 72 | fn is_expired(&self) -> bool { 73 | let now = current_unix_secs(); 74 | now.saturating_sub(self.timestamp_secs) > CACHE_TTL_SECS 75 | } 76 | 77 | fn age_secs(&self) -> u64 { 78 | current_unix_secs().saturating_sub(self.timestamp_secs) 79 | } 80 | 81 | fn has_ticker(&self, ticker: &str) -> bool { 82 | self.known_kalshi_tickers.iter().any(|t| t == ticker) 83 | } 84 | } 85 | 86 | fn current_unix_secs() -> u64 { 87 | SystemTime::now() 88 | .duration_since(UNIX_EPOCH) 89 | .unwrap_or_default() 90 | .as_secs() 91 | } 92 | 93 | /// Market discovery client 94 | pub struct DiscoveryClient { 95 | kalshi: Arc, 96 | gamma: Arc, 97 | pub team_cache: Arc, 98 | kalshi_limiter: Arc, 99 | kalshi_semaphore: Arc, // Global concurrency limit for Kalshi 100 | gamma_semaphore: Arc, 101 | } 102 | 103 | impl DiscoveryClient { 104 | pub fn new(kalshi: KalshiApiClient, team_cache: TeamCache) -> Self { 105 | // Create token bucket rate limiter for Kalshi 106 | let quota = Quota::per_second(NonZeroU32::new(KALSHI_RATE_LIMIT_PER_SEC).unwrap()); 107 | let kalshi_limiter = Arc::new(RateLimiter::direct(quota)); 108 | 109 | Self { 110 | kalshi: Arc::new(kalshi), 111 | gamma: Arc::new(GammaClient::new()), 112 | team_cache: Arc::new(team_cache), 113 | kalshi_limiter, 114 | kalshi_semaphore: Arc::new(Semaphore::new(KALSHI_GLOBAL_CONCURRENCY)), 115 | gamma_semaphore: Arc::new(Semaphore::new(GAMMA_CONCURRENCY)), 116 | } 117 | } 118 | 119 | /// Load cache from disk (async) 120 | async fn load_cache() -> Option { 121 | let data = tokio::fs::read_to_string(DISCOVERY_CACHE_PATH).await.ok()?; 122 | serde_json::from_str(&data).ok() 123 | } 124 | 125 | /// Save cache to disk (async) 126 | async fn save_cache(cache: &DiscoveryCache) -> Result<()> { 127 | let data = serde_json::to_string_pretty(cache)?; 128 | tokio::fs::write(DISCOVERY_CACHE_PATH, data).await?; 129 | Ok(()) 130 | } 131 | 132 | /// Discover all market pairs with caching support 133 | /// 134 | /// Strategy: 135 | /// 1. Try to load cache from disk 136 | /// 2. If cache exists and is fresh (<2 hours), use it directly 137 | /// 3. If cache exists but is stale, load it + fetch incremental updates 138 | /// 4. If no cache, do full discovery 139 | pub async fn discover_all(&self, leagues: &[&str]) -> DiscoveryResult { 140 | // Try to load existing cache 141 | let cached = Self::load_cache().await; 142 | 143 | match cached { 144 | Some(cache) if !cache.is_expired() => { 145 | // Cache is fresh - use it directly 146 | info!("📂 Loaded {} pairs from cache (age: {}s)", 147 | cache.pairs.len(), cache.age_secs()); 148 | return DiscoveryResult { 149 | pairs: cache.pairs, 150 | kalshi_events_found: 0, // From cache 151 | poly_matches: 0, 152 | poly_misses: 0, 153 | errors: vec![], 154 | }; 155 | } 156 | Some(cache) => { 157 | // Cache is stale - do incremental discovery 158 | info!("📂 Cache expired (age: {}s), doing incremental refresh...", cache.age_secs()); 159 | return self.discover_incremental(leagues, cache).await; 160 | } 161 | None => { 162 | // No cache - do full discovery 163 | info!("📂 No cache found, doing full discovery..."); 164 | } 165 | } 166 | 167 | // Full discovery (no cache) 168 | let result = self.discover_full(leagues).await; 169 | 170 | // Save to cache 171 | if !result.pairs.is_empty() { 172 | let cache = DiscoveryCache::new(result.pairs.clone()); 173 | if let Err(e) = Self::save_cache(&cache).await { 174 | warn!("Failed to save discovery cache: {}", e); 175 | } else { 176 | info!("💾 Saved {} pairs to cache", result.pairs.len()); 177 | } 178 | } 179 | 180 | result 181 | } 182 | 183 | /// Force full discovery (ignores cache) 184 | pub async fn discover_all_force(&self, leagues: &[&str]) -> DiscoveryResult { 185 | info!("🔄 Forced full discovery (ignoring cache)..."); 186 | let result = self.discover_full(leagues).await; 187 | 188 | // Save to cache 189 | if !result.pairs.is_empty() { 190 | let cache = DiscoveryCache::new(result.pairs.clone()); 191 | if let Err(e) = Self::save_cache(&cache).await { 192 | warn!("Failed to save discovery cache: {}", e); 193 | } else { 194 | info!("💾 Saved {} pairs to cache", result.pairs.len()); 195 | } 196 | } 197 | 198 | result 199 | } 200 | 201 | /// Full discovery without cache 202 | async fn discover_full(&self, leagues: &[&str]) -> DiscoveryResult { 203 | let configs: Vec<_> = if leagues.is_empty() { 204 | get_league_configs() 205 | } else { 206 | leagues.iter() 207 | .filter_map(|l| get_league_config(l)) 208 | .collect() 209 | }; 210 | 211 | // Parallel discovery across all leagues 212 | let league_futures: Vec<_> = configs.iter() 213 | .map(|config| self.discover_league(config, None)) 214 | .collect(); 215 | 216 | let league_results = futures_util::future::join_all(league_futures).await; 217 | 218 | // Merge results 219 | let mut result = DiscoveryResult::default(); 220 | for league_result in league_results { 221 | result.pairs.extend(league_result.pairs); 222 | result.poly_matches += league_result.poly_matches; 223 | result.errors.extend(league_result.errors); 224 | } 225 | result.kalshi_events_found = result.pairs.len(); 226 | 227 | result 228 | } 229 | 230 | /// Incremental discovery - merge cached pairs with newly discovered ones 231 | async fn discover_incremental(&self, leagues: &[&str], cache: DiscoveryCache) -> DiscoveryResult { 232 | let configs: Vec<_> = if leagues.is_empty() { 233 | get_league_configs() 234 | } else { 235 | leagues.iter() 236 | .filter_map(|l| get_league_config(l)) 237 | .collect() 238 | }; 239 | 240 | // Discover with filter for known tickers 241 | let league_futures: Vec<_> = configs.iter() 242 | .map(|config| self.discover_league(config, Some(&cache))) 243 | .collect(); 244 | 245 | let league_results = futures_util::future::join_all(league_futures).await; 246 | 247 | // Merge cached pairs with newly discovered ones 248 | let mut all_pairs = cache.pairs; 249 | let mut new_count = 0; 250 | 251 | for league_result in league_results { 252 | for pair in league_result.pairs { 253 | if !all_pairs.iter().any(|p| *p.kalshi_market_ticker == *pair.kalshi_market_ticker) { 254 | all_pairs.push(pair); 255 | new_count += 1; 256 | } 257 | } 258 | } 259 | 260 | if new_count > 0 { 261 | info!("🆕 Found {} new market pairs", new_count); 262 | 263 | // Update cache 264 | let new_cache = DiscoveryCache::new(all_pairs.clone()); 265 | if let Err(e) = Self::save_cache(&new_cache).await { 266 | warn!("Failed to update discovery cache: {}", e); 267 | } else { 268 | info!("💾 Updated cache with {} total pairs", all_pairs.len()); 269 | } 270 | } else { 271 | info!("✅ No new markets found, using {} cached pairs", all_pairs.len()); 272 | 273 | // Just update timestamp to extend TTL 274 | let refreshed_cache = DiscoveryCache::new(all_pairs.clone()); 275 | let _ = Self::save_cache(&refreshed_cache).await; 276 | } 277 | 278 | DiscoveryResult { 279 | pairs: all_pairs, 280 | kalshi_events_found: new_count, 281 | poly_matches: new_count, 282 | poly_misses: 0, 283 | errors: vec![], 284 | } 285 | } 286 | 287 | /// Discover all market types for a single league (PARALLEL) 288 | /// If cache is provided, only discovers markets not already in cache 289 | async fn discover_league(&self, config: &LeagueConfig, cache: Option<&DiscoveryCache>) -> DiscoveryResult { 290 | info!("🔍 Discovering {} markets...", config.league_code); 291 | 292 | let market_types = [MarketType::Moneyline, MarketType::Spread, MarketType::Total, MarketType::Btts]; 293 | 294 | // Parallel discovery across market types 295 | let type_futures: Vec<_> = market_types.iter() 296 | .filter_map(|market_type| { 297 | let series = self.get_series_for_type(config, *market_type)?; 298 | Some(self.discover_series(config, series, *market_type, cache)) 299 | }) 300 | .collect(); 301 | 302 | let type_results = futures_util::future::join_all(type_futures).await; 303 | 304 | let mut result = DiscoveryResult::default(); 305 | for (pairs_result, market_type) in type_results.into_iter().zip(market_types.iter()) { 306 | match pairs_result { 307 | Ok(pairs) => { 308 | let count = pairs.len(); 309 | if count > 0 { 310 | info!(" ✅ {} {}: {} pairs", config.league_code, market_type, count); 311 | } 312 | result.poly_matches += count; 313 | result.pairs.extend(pairs); 314 | } 315 | Err(e) => { 316 | result.errors.push(format!("{} {}: {}", config.league_code, market_type, e)); 317 | } 318 | } 319 | } 320 | 321 | result 322 | } 323 | 324 | fn get_series_for_type(&self, config: &LeagueConfig, market_type: MarketType) -> Option<&'static str> { 325 | match market_type { 326 | MarketType::Moneyline => Some(config.kalshi_series_game), 327 | MarketType::Spread => config.kalshi_series_spread, 328 | MarketType::Total => config.kalshi_series_total, 329 | MarketType::Btts => config.kalshi_series_btts, 330 | } 331 | } 332 | 333 | /// Discover markets for a specific series (PARALLEL Kalshi + Gamma lookups) 334 | /// If cache is provided, skips markets already in cache 335 | async fn discover_series( 336 | &self, 337 | config: &LeagueConfig, 338 | series: &str, 339 | market_type: MarketType, 340 | cache: Option<&DiscoveryCache>, 341 | ) -> Result> { 342 | // Fetch Kalshi events 343 | { 344 | let _permit = self.kalshi_semaphore.acquire().await.map_err(|e| anyhow::anyhow!("semaphore closed: {}", e))?; 345 | self.kalshi_limiter.until_ready().await; 346 | } 347 | let events = self.kalshi.get_events(series, 50).await?; 348 | 349 | // PHASE 2: Parallel market fetching 350 | let kalshi = self.kalshi.clone(); 351 | let limiter = self.kalshi_limiter.clone(); 352 | let semaphore = self.kalshi_semaphore.clone(); 353 | 354 | // Parse events first, filtering out unparseable ones 355 | let parsed_events: Vec<_> = events.into_iter() 356 | .filter_map(|event| { 357 | let parsed = match parse_kalshi_event_ticker(&event.event_ticker) { 358 | Some(p) => p, 359 | None => { 360 | warn!(" ⚠️ Could not parse event ticker {}", event.event_ticker); 361 | return None; 362 | } 363 | }; 364 | Some((parsed, event)) 365 | }) 366 | .collect(); 367 | 368 | // Execute market fetches with GLOBAL concurrency limit 369 | let market_results: Vec<_> = stream::iter(parsed_events) 370 | .map(|(parsed, event)| { 371 | let kalshi = kalshi.clone(); 372 | let limiter = limiter.clone(); 373 | let semaphore = semaphore.clone(); 374 | let event_ticker = event.event_ticker.clone(); 375 | async move { 376 | let _permit = semaphore.acquire().await.ok(); 377 | // rate limit 378 | limiter.until_ready().await; 379 | let markets_result = kalshi.get_markets(&event_ticker).await; 380 | (parsed, Arc::new(event), markets_result) 381 | } 382 | }) 383 | .buffer_unordered(KALSHI_GLOBAL_CONCURRENCY * 2) // Allow some buffering, semaphore is the real limit 384 | .collect() 385 | .await; 386 | 387 | // Collect all (event, market) pairs 388 | let mut event_markets = Vec::with_capacity(market_results.len() * 3); 389 | for (parsed, event, markets_result) in market_results { 390 | match markets_result { 391 | Ok(markets) => { 392 | for market in markets { 393 | // Skip if already in cache 394 | if let Some(c) = cache { 395 | if c.has_ticker(&market.ticker) { 396 | continue; 397 | } 398 | } 399 | event_markets.push((parsed.clone(), event.clone(), market)); 400 | } 401 | } 402 | Err(e) => { 403 | warn!(" ⚠️ Failed to get markets for {}: {}", event.event_ticker, e); 404 | } 405 | } 406 | } 407 | 408 | // Parallel Gamma lookups with semaphore 409 | let lookup_futures: Vec<_> = event_markets 410 | .into_iter() 411 | .map(|(parsed, event, market)| { 412 | let poly_slug = self.build_poly_slug(config.poly_prefix, &parsed, market_type, &market); 413 | 414 | GammaLookupTask { 415 | event, 416 | market, 417 | poly_slug, 418 | market_type, 419 | league: config.league_code.to_string(), 420 | } 421 | }) 422 | .collect(); 423 | 424 | // Execute lookups in parallel 425 | let pairs: Vec = stream::iter(lookup_futures) 426 | .map(|task| { 427 | let gamma = self.gamma.clone(); 428 | let semaphore = self.gamma_semaphore.clone(); 429 | async move { 430 | let _permit = semaphore.acquire().await.ok()?; 431 | match gamma.lookup_market(&task.poly_slug).await { 432 | Ok(Some((yes_token, no_token))) => { 433 | let team_suffix = extract_team_suffix(&task.market.ticker); 434 | Some(MarketPair { 435 | pair_id: format!("{}-{}", task.poly_slug, task.market.ticker).into(), 436 | league: task.league.into(), 437 | market_type: task.market_type, 438 | description: format!("{} - {}", task.event.title, task.market.title).into(), 439 | kalshi_event_ticker: task.event.event_ticker.clone().into(), 440 | kalshi_market_ticker: task.market.ticker.into(), 441 | poly_slug: task.poly_slug.into(), 442 | poly_yes_token: yes_token.into(), 443 | poly_no_token: no_token.into(), 444 | line_value: task.market.floor_strike, 445 | team_suffix: team_suffix.map(|s| s.into()), 446 | }) 447 | } 448 | Ok(None) => None, 449 | Err(e) => { 450 | warn!(" ⚠️ Gamma lookup failed for {}: {}", task.poly_slug, e); 451 | None 452 | } 453 | } 454 | } 455 | }) 456 | .buffer_unordered(GAMMA_CONCURRENCY) 457 | .filter_map(|x| async { x }) 458 | .collect() 459 | .await; 460 | 461 | Ok(pairs) 462 | } 463 | 464 | /// Build Polymarket slug from Kalshi event data 465 | fn build_poly_slug( 466 | &self, 467 | poly_prefix: &str, 468 | parsed: &ParsedKalshiTicker, 469 | market_type: MarketType, 470 | market: &KalshiMarket, 471 | ) -> String { 472 | // Convert Kalshi team codes to Polymarket codes using cache 473 | let poly_team1 = self.team_cache 474 | .kalshi_to_poly(poly_prefix, &parsed.team1) 475 | .unwrap_or_else(|| parsed.team1.to_lowercase()); 476 | let poly_team2 = self.team_cache 477 | .kalshi_to_poly(poly_prefix, &parsed.team2) 478 | .unwrap_or_else(|| parsed.team2.to_lowercase()); 479 | 480 | // Convert date from "25DEC27" to "2025-12-27" 481 | let date_str = kalshi_date_to_iso(&parsed.date); 482 | 483 | // Base slug: league-team1-team2-date 484 | let base = format!("{}-{}-{}-{}", poly_prefix, poly_team1, poly_team2, date_str); 485 | 486 | match market_type { 487 | MarketType::Moneyline => { 488 | if let Some(suffix) = extract_team_suffix(&market.ticker) { 489 | if suffix.to_lowercase() == "tie" { 490 | format!("{}-draw", base) 491 | } else { 492 | let poly_suffix = self.team_cache 493 | .kalshi_to_poly(poly_prefix, &suffix) 494 | .unwrap_or_else(|| suffix.to_lowercase()); 495 | format!("{}-{}", base, poly_suffix) 496 | } 497 | } else { 498 | base 499 | } 500 | } 501 | MarketType::Spread => { 502 | if let Some(floor) = market.floor_strike { 503 | let floor_str = format!("{:.1}", floor).replace(".", "pt"); 504 | format!("{}-spread-{}", base, floor_str) 505 | } else { 506 | format!("{}-spread", base) 507 | } 508 | } 509 | MarketType::Total => { 510 | if let Some(floor) = market.floor_strike { 511 | let floor_str = format!("{:.1}", floor).replace(".", "pt"); 512 | format!("{}-total-{}", base, floor_str) 513 | } else { 514 | format!("{}-total", base) 515 | } 516 | } 517 | MarketType::Btts => { 518 | format!("{}-btts", base) 519 | } 520 | } 521 | } 522 | } 523 | 524 | // === Helpers === 525 | 526 | #[derive(Debug, Clone)] 527 | struct ParsedKalshiTicker { 528 | date: String, // "25DEC27" 529 | team1: String, // "CFC" 530 | team2: String, // "AVL" 531 | } 532 | 533 | /// Parse Kalshi event ticker like "KXEPLGAME-25DEC27CFCAVL" or "KXNCAAFGAME-25DEC27M-OHFRES" 534 | fn parse_kalshi_event_ticker(ticker: &str) -> Option { 535 | let parts: Vec<&str> = ticker.split('-').collect(); 536 | if parts.len() < 2 { 537 | return None; 538 | } 539 | 540 | // Handle two formats: 541 | // 1. "KXEPLGAME-25DEC27CFCAVL" - date+teams in parts[1] 542 | // 2. "KXNCAAFGAME-25DEC27M-OHFRES" - date in parts[1], teams in parts[2] 543 | let (date, teams_part) = if parts.len() >= 3 && parts[2].len() >= 4 { 544 | // Format 2: 3-part ticker with separate teams section 545 | // parts[1] is like "25DEC27M" (date + optional suffix) 546 | let date_part = parts[1]; 547 | let date = if date_part.len() >= 7 { 548 | date_part[..7].to_uppercase() 549 | } else { 550 | return None; 551 | }; 552 | (date, parts[2]) 553 | } else { 554 | // Format 1: 2-part ticker with combined date+teams 555 | let date_teams = parts[1]; 556 | // Minimum: 7 (date) + 2 + 2 (min team codes) = 11 557 | if date_teams.len() < 11 { 558 | return None; 559 | } 560 | let date = date_teams[..7].to_uppercase(); 561 | let teams = &date_teams[7..]; 562 | (date, teams) 563 | }; 564 | 565 | // Split team codes - try to find the best split point 566 | // Team codes range from 2-4 chars (e.g., OM, CFC, FRES) 567 | let (team1, team2) = split_team_codes(teams_part); 568 | 569 | Some(ParsedKalshiTicker { date, team1, team2 }) 570 | } 571 | 572 | /// Split a combined team string into two team codes 573 | /// Tries multiple split strategies based on string length 574 | fn split_team_codes(teams: &str) -> (String, String) { 575 | let len = teams.len(); 576 | 577 | // For 6 chars, could be 3+3, 2+4, or 4+2 578 | // For 5 chars, could be 2+3 or 3+2 579 | // For 4 chars, must be 2+2 580 | // For 7 chars, could be 3+4 or 4+3 581 | // For 8 chars, could be 4+4, 3+5, 5+3 582 | 583 | match len { 584 | 4 => (teams[..2].to_uppercase(), teams[2..].to_uppercase()), 585 | 5 => { 586 | // Prefer 2+3 (common for OM+ASM, OL+PSG) 587 | (teams[..2].to_uppercase(), teams[2..].to_uppercase()) 588 | } 589 | 6 => { 590 | // Check if it looks like 2+4 pattern (e.g., OHFRES = OH+FRES) 591 | // Common 2-letter codes: OM, OL, OH, SF, LA, NY, KC, TB, etc. 592 | let first_two = &teams[..2].to_uppercase(); 593 | if is_likely_two_letter_code(first_two) { 594 | (first_two.clone(), teams[2..].to_uppercase()) 595 | } else { 596 | // Default to 3+3 597 | (teams[..3].to_uppercase(), teams[3..].to_uppercase()) 598 | } 599 | } 600 | 7 => { 601 | // Could be 3+4 or 4+3 - prefer 3+4 602 | (teams[..3].to_uppercase(), teams[3..].to_uppercase()) 603 | } 604 | _ if len >= 8 => { 605 | // 4+4 or longer 606 | (teams[..4].to_uppercase(), teams[4..].to_uppercase()) 607 | } 608 | _ => { 609 | let mid = len / 2; 610 | (teams[..mid].to_uppercase(), teams[mid..].to_uppercase()) 611 | } 612 | } 613 | } 614 | 615 | /// Check if a 2-letter code is a known/likely team abbreviation 616 | fn is_likely_two_letter_code(code: &str) -> bool { 617 | matches!( 618 | code, 619 | // European football (Ligue 1, etc.) 620 | "OM" | "OL" | "FC" | 621 | // US sports common abbreviations 622 | "OH" | "SF" | "LA" | "NY" | "KC" | "TB" | "GB" | "NE" | "NO" | "LV" | 623 | // Generic short codes 624 | "BC" | "SC" | "AC" | "AS" | "US" 625 | ) 626 | } 627 | 628 | /// Convert Kalshi date "25DEC27" to ISO "2025-12-27" 629 | fn kalshi_date_to_iso(kalshi_date: &str) -> String { 630 | if kalshi_date.len() != 7 { 631 | return kalshi_date.to_string(); 632 | } 633 | 634 | let year = format!("20{}", &kalshi_date[..2]); 635 | let month = match &kalshi_date[2..5].to_uppercase()[..] { 636 | "JAN" => "01", "FEB" => "02", "MAR" => "03", "APR" => "04", 637 | "MAY" => "05", "JUN" => "06", "JUL" => "07", "AUG" => "08", 638 | "SEP" => "09", "OCT" => "10", "NOV" => "11", "DEC" => "12", 639 | _ => "01", 640 | }; 641 | let day = &kalshi_date[5..7]; 642 | 643 | format!("{}-{}-{}", year, month, day) 644 | } 645 | 646 | /// Extract team suffix from market ticker (e.g., "KXEPLGAME-25DEC27CFCAVL-CFC" -> "CFC") 647 | fn extract_team_suffix(ticker: &str) -> Option { 648 | let mut splits = ticker.splitn(3, '-'); 649 | splits.next()?; // series 650 | splits.next()?; // event 651 | splits.next().map(|s| s.to_uppercase()) 652 | } 653 | 654 | #[cfg(test)] 655 | mod tests { 656 | use super::*; 657 | 658 | #[test] 659 | fn test_parse_kalshi_ticker() { 660 | let parsed = parse_kalshi_event_ticker("KXEPLGAME-25DEC27CFCAVL").unwrap(); 661 | assert_eq!(parsed.date, "25DEC27"); 662 | assert_eq!(parsed.team1, "CFC"); 663 | assert_eq!(parsed.team2, "AVL"); 664 | } 665 | 666 | #[test] 667 | fn test_kalshi_date_to_iso() { 668 | assert_eq!(kalshi_date_to_iso("25DEC27"), "2025-12-27"); 669 | assert_eq!(kalshi_date_to_iso("25JAN01"), "2025-01-01"); 670 | } 671 | } 672 | -------------------------------------------------------------------------------- /src/execution.rs: -------------------------------------------------------------------------------- 1 | // src/execution.rs 2 | // Execution Engine 3 | 4 | use anyhow::{Result, anyhow}; 5 | use std::sync::Arc; 6 | use std::sync::atomic::{AtomicU64, Ordering}; 7 | use std::time::{Duration, Instant}; 8 | use tokio::sync::mpsc; 9 | use tracing::{info, warn, error}; 10 | 11 | use crate::kalshi::KalshiApiClient; 12 | use crate::polymarket_clob::SharedAsyncClient; 13 | use crate::types::{ 14 | ArbType, MarketPair, 15 | FastExecutionRequest, GlobalState, 16 | cents_to_price, 17 | }; 18 | use crate::circuit_breaker::CircuitBreaker; 19 | use crate::position_tracker::{FillRecord, PositionChannel}; 20 | 21 | // ============================================================================= 22 | // EXECUTION ENGINE 23 | // ============================================================================= 24 | 25 | /// Monotonic nanosecond clock for latency measurement 26 | pub struct NanoClock { 27 | start: Instant, 28 | } 29 | 30 | impl NanoClock { 31 | pub fn new() -> Self { 32 | Self { start: Instant::now() } 33 | } 34 | 35 | #[inline(always)] 36 | pub fn now_ns(&self) -> u64 { 37 | self.start.elapsed().as_nanos() as u64 38 | } 39 | } 40 | 41 | impl Default for NanoClock { 42 | fn default() -> Self { 43 | Self::new() 44 | } 45 | } 46 | 47 | /// Execution engine 48 | pub struct ExecutionEngine { 49 | kalshi: Arc, 50 | poly_async: Arc, 51 | state: Arc, 52 | circuit_breaker: Arc, 53 | position_channel: PositionChannel, 54 | in_flight: Arc<[AtomicU64; 8]>, 55 | clock: NanoClock, 56 | pub dry_run: bool, 57 | test_mode: bool, 58 | } 59 | 60 | impl ExecutionEngine { 61 | pub fn new( 62 | kalshi: Arc, 63 | poly_async: Arc, 64 | state: Arc, 65 | circuit_breaker: Arc, 66 | position_channel: PositionChannel, 67 | dry_run: bool, 68 | ) -> Self { 69 | let test_mode = std::env::var("TEST_ARB") 70 | .map(|v| v == "1" || v == "true") 71 | .unwrap_or(false); 72 | 73 | Self { 74 | kalshi, 75 | poly_async, 76 | state, 77 | circuit_breaker, 78 | position_channel, 79 | in_flight: Arc::new(std::array::from_fn(|_| AtomicU64::new(0))), 80 | clock: NanoClock::new(), 81 | dry_run, 82 | test_mode, 83 | } 84 | } 85 | 86 | /// Process an execution request 87 | #[inline] 88 | pub async fn process(&self, req: FastExecutionRequest) -> Result { 89 | let market_id = req.market_id; 90 | 91 | // Deduplication check (512 markets via 8x u64 bitmask) 92 | if market_id < 512 { 93 | let slot = (market_id / 64) as usize; 94 | let bit = market_id % 64; 95 | let mask = 1u64 << bit; 96 | let prev = self.in_flight[slot].fetch_or(mask, Ordering::AcqRel); 97 | if prev & mask != 0 { 98 | return Ok(ExecutionResult { 99 | market_id, 100 | success: false, 101 | profit_cents: 0, 102 | latency_ns: self.clock.now_ns() - req.detected_ns, 103 | error: Some("Already in-flight"), 104 | }); 105 | } 106 | } 107 | 108 | // Get market pair 109 | let market = self.state.get_by_id(market_id) 110 | .ok_or_else(|| anyhow!("Unknown market_id {}", market_id))?; 111 | 112 | let pair = market.pair.as_ref() 113 | .ok_or_else(|| anyhow!("No pair for market_id {}", market_id))?; 114 | 115 | // Calculate profit 116 | let profit_cents = req.profit_cents(); 117 | if profit_cents < 1 { 118 | self.release_in_flight(market_id); 119 | return Ok(ExecutionResult { 120 | market_id, 121 | success: false, 122 | profit_cents: 0, 123 | latency_ns: self.clock.now_ns() - req.detected_ns, 124 | error: Some("Profit below threshold"), 125 | }); 126 | } 127 | 128 | // Calculate max contracts from size (min of both sides) 129 | let mut max_contracts = (req.yes_size.min(req.no_size) / 100) as i64; 130 | 131 | // SAFETY: In test mode, cap at 10 contracts 132 | // Note: Polymarket has $1 minimum spend, so at 40¢ price, 1 contract = $0.40 (rejected!) 133 | // 10 contracts ensures we meet the minimum at any reasonable price 134 | if self.test_mode && max_contracts > 10 { 135 | warn!("[EXEC] ⚠️ TEST_MODE: Capping contracts from {} to 10", max_contracts); 136 | max_contracts = 10; 137 | } 138 | 139 | if max_contracts < 1 { 140 | warn!( 141 | "[EXEC] Liquidity fail: {:?} | yes_size={}¢ no_size={}¢", 142 | req.arb_type, req.yes_size, req.no_size 143 | ); 144 | self.release_in_flight(market_id); 145 | return Ok(ExecutionResult { 146 | market_id, 147 | success: false, 148 | profit_cents: 0, 149 | latency_ns: self.clock.now_ns() - req.detected_ns, 150 | error: Some("Insufficient liquidity"), 151 | }); 152 | } 153 | 154 | // Circuit breaker check 155 | if let Err(_reason) = self.circuit_breaker.can_execute(&pair.pair_id, max_contracts).await { 156 | self.release_in_flight(market_id); 157 | return Ok(ExecutionResult { 158 | market_id, 159 | success: false, 160 | profit_cents: 0, 161 | latency_ns: self.clock.now_ns() - req.detected_ns, 162 | error: Some("Circuit breaker"), 163 | }); 164 | } 165 | 166 | let latency_to_exec = self.clock.now_ns() - req.detected_ns; 167 | info!( 168 | "[EXEC] 🎯 {} | {:?} y={}¢ n={}¢ | profit={}¢ | {}x | {}µs", 169 | pair.description, 170 | req.arb_type, 171 | req.yes_price, 172 | req.no_price, 173 | profit_cents, 174 | max_contracts, 175 | latency_to_exec / 1000 176 | ); 177 | 178 | if self.dry_run { 179 | info!("[EXEC] 🏃 DRY RUN - would execute {} contracts", max_contracts); 180 | self.release_in_flight_delayed(market_id); 181 | return Ok(ExecutionResult { 182 | market_id, 183 | success: true, 184 | profit_cents, 185 | latency_ns: latency_to_exec, 186 | error: Some("DRY_RUN"), 187 | }); 188 | } 189 | 190 | // Execute both legs concurrently 191 | let result = self.execute_both_legs_async(&req, pair, max_contracts).await; 192 | 193 | // Release in-flight after delay 194 | self.release_in_flight_delayed(market_id); 195 | 196 | match result { 197 | // Note: For same-platform arbs (PolyOnly/KalshiOnly), these are YES/NO fills, not platform fills 198 | Ok((yes_filled, no_filled, yes_cost, no_cost, yes_order_id, no_order_id)) => { 199 | let matched = yes_filled.min(no_filled); 200 | let success = matched > 0; 201 | let actual_profit = matched as i16 * 100 - (yes_cost + no_cost) as i16; 202 | 203 | // === AUTO-CLOSE MISMATCHED EXPOSURE (non-blocking) === 204 | if yes_filled != no_filled && (yes_filled > 0 || no_filled > 0) { 205 | let excess = (yes_filled - no_filled).abs(); 206 | let (leg1_name, leg2_name) = match req.arb_type { 207 | ArbType::PolyYesKalshiNo => ("P_yes", "K_no"), 208 | ArbType::KalshiYesPolyNo => ("K_yes", "P_no"), 209 | ArbType::PolyOnly => ("P_yes", "P_no"), 210 | ArbType::KalshiOnly => ("K_yes", "K_no"), 211 | }; 212 | warn!("[EXEC] ⚠️ Fill mismatch: {}={} {}={} (excess={})", 213 | leg1_name, yes_filled, leg2_name, no_filled, excess); 214 | 215 | // Spawn auto-close in background (don't block hot path with 2s sleep) 216 | let kalshi = self.kalshi.clone(); 217 | let poly_async = self.poly_async.clone(); 218 | let arb_type = req.arb_type; 219 | let yes_price = req.yes_price; 220 | let no_price = req.no_price; 221 | let poly_yes_token = pair.poly_yes_token.clone(); 222 | let poly_no_token = pair.poly_no_token.clone(); 223 | let kalshi_ticker = pair.kalshi_market_ticker.clone(); 224 | let original_cost_per_contract = if yes_filled > no_filled { 225 | if yes_filled > 0 { yes_cost / yes_filled } else { 0 } 226 | } else { 227 | if no_filled > 0 { no_cost / no_filled } else { 0 } 228 | }; 229 | 230 | tokio::spawn(async move { 231 | Self::auto_close_background( 232 | kalshi, poly_async, arb_type, yes_filled, no_filled, 233 | yes_price, no_price, poly_yes_token, poly_no_token, 234 | kalshi_ticker, original_cost_per_contract 235 | ).await; 236 | }); 237 | } 238 | 239 | if success { 240 | self.circuit_breaker.record_success(&pair.pair_id, matched, matched, actual_profit as f64 / 100.0).await; 241 | } 242 | 243 | if matched > 0 { 244 | let (platform1, side1, platform2, side2) = match req.arb_type { 245 | ArbType::PolyYesKalshiNo => ("polymarket", "yes", "kalshi", "no"), 246 | ArbType::KalshiYesPolyNo => ("kalshi", "yes", "polymarket", "no"), 247 | ArbType::PolyOnly => ("polymarket", "yes", "polymarket", "no"), 248 | ArbType::KalshiOnly => ("kalshi", "yes", "kalshi", "no"), 249 | }; 250 | 251 | self.position_channel.record_fill(FillRecord::new( 252 | &pair.pair_id, &pair.description, platform1, side1, 253 | matched as f64, yes_cost as f64 / 100.0 / yes_filled.max(1) as f64, 254 | 0.0, &yes_order_id, 255 | )); 256 | self.position_channel.record_fill(FillRecord::new( 257 | &pair.pair_id, &pair.description, platform2, side2, 258 | matched as f64, no_cost as f64 / 100.0 / no_filled.max(1) as f64, 259 | 0.0, &no_order_id, 260 | )); 261 | } 262 | 263 | Ok(ExecutionResult { 264 | market_id, 265 | success, 266 | profit_cents: actual_profit, 267 | latency_ns: self.clock.now_ns() - req.detected_ns, 268 | error: if success { None } else { Some("Partial/no fill") }, 269 | }) 270 | } 271 | Err(_e) => { 272 | self.circuit_breaker.record_error().await; 273 | Ok(ExecutionResult { 274 | market_id, 275 | success: false, 276 | profit_cents: 0, 277 | latency_ns: self.clock.now_ns() - req.detected_ns, 278 | error: Some("Execution failed"), 279 | }) 280 | } 281 | } 282 | } 283 | 284 | async fn execute_both_legs_async( 285 | &self, 286 | req: &FastExecutionRequest, 287 | pair: &MarketPair, 288 | contracts: i64, 289 | ) -> Result<(i64, i64, i64, i64, String, String)> { 290 | match req.arb_type { 291 | // === CROSS-PLATFORM: Poly YES + Kalshi NO === 292 | ArbType::PolyYesKalshiNo => { 293 | let kalshi_fut = self.kalshi.buy_ioc( 294 | &pair.kalshi_market_ticker, 295 | "no", 296 | req.no_price as i64, 297 | contracts, 298 | ); 299 | let poly_fut = self.poly_async.buy_fak( 300 | &pair.poly_yes_token, 301 | cents_to_price(req.yes_price), 302 | contracts as f64, 303 | ); 304 | let (kalshi_res, poly_res) = tokio::join!(kalshi_fut, poly_fut); 305 | self.extract_cross_results(kalshi_res, poly_res) 306 | } 307 | 308 | // === CROSS-PLATFORM: Kalshi YES + Poly NO === 309 | ArbType::KalshiYesPolyNo => { 310 | let kalshi_fut = self.kalshi.buy_ioc( 311 | &pair.kalshi_market_ticker, 312 | "yes", 313 | req.yes_price as i64, 314 | contracts, 315 | ); 316 | let poly_fut = self.poly_async.buy_fak( 317 | &pair.poly_no_token, 318 | cents_to_price(req.no_price), 319 | contracts as f64, 320 | ); 321 | let (kalshi_res, poly_res) = tokio::join!(kalshi_fut, poly_fut); 322 | self.extract_cross_results(kalshi_res, poly_res) 323 | } 324 | 325 | // === SAME-PLATFORM: Poly YES + Poly NO === 326 | ArbType::PolyOnly => { 327 | let yes_fut = self.poly_async.buy_fak( 328 | &pair.poly_yes_token, 329 | cents_to_price(req.yes_price), 330 | contracts as f64, 331 | ); 332 | let no_fut = self.poly_async.buy_fak( 333 | &pair.poly_no_token, 334 | cents_to_price(req.no_price), 335 | contracts as f64, 336 | ); 337 | let (yes_res, no_res) = tokio::join!(yes_fut, no_fut); 338 | self.extract_poly_only_results(yes_res, no_res) 339 | } 340 | 341 | // === SAME-PLATFORM: Kalshi YES + Kalshi NO === 342 | ArbType::KalshiOnly => { 343 | let yes_fut = self.kalshi.buy_ioc( 344 | &pair.kalshi_market_ticker, 345 | "yes", 346 | req.yes_price as i64, 347 | contracts, 348 | ); 349 | let no_fut = self.kalshi.buy_ioc( 350 | &pair.kalshi_market_ticker, 351 | "no", 352 | req.no_price as i64, 353 | contracts, 354 | ); 355 | let (yes_res, no_res) = tokio::join!(yes_fut, no_fut); 356 | self.extract_kalshi_only_results(yes_res, no_res) 357 | } 358 | } 359 | } 360 | 361 | /// Extract results from cross-platform execution 362 | fn extract_cross_results( 363 | &self, 364 | kalshi_res: Result, 365 | poly_res: Result, 366 | ) -> Result<(i64, i64, i64, i64, String, String)> { 367 | let (kalshi_filled, kalshi_cost, kalshi_order_id) = match kalshi_res { 368 | Ok(resp) => { 369 | let filled = resp.order.filled_count(); 370 | let cost = resp.order.taker_fill_cost.unwrap_or(0) + resp.order.maker_fill_cost.unwrap_or(0); 371 | (filled, cost, resp.order.order_id) 372 | } 373 | Err(e) => { 374 | warn!("[EXEC] Kalshi failed: {}", e); 375 | (0, 0, String::new()) 376 | } 377 | }; 378 | 379 | let (poly_filled, poly_cost, poly_order_id) = match poly_res { 380 | Ok(fill) => { 381 | ((fill.filled_size as i64), (fill.fill_cost * 100.0) as i64, fill.order_id) 382 | } 383 | Err(e) => { 384 | warn!("[EXEC] Poly failed: {}", e); 385 | (0, 0, String::new()) 386 | } 387 | }; 388 | 389 | Ok((kalshi_filled, poly_filled, kalshi_cost, poly_cost, kalshi_order_id, poly_order_id)) 390 | } 391 | 392 | /// Extract results from Poly-only execution (same-platform) 393 | fn extract_poly_only_results( 394 | &self, 395 | yes_res: Result, 396 | no_res: Result, 397 | ) -> Result<(i64, i64, i64, i64, String, String)> { 398 | let (yes_filled, yes_cost, yes_order_id) = match yes_res { 399 | Ok(fill) => { 400 | ((fill.filled_size as i64), (fill.fill_cost * 100.0) as i64, fill.order_id) 401 | } 402 | Err(e) => { 403 | warn!("[EXEC] Poly YES failed: {}", e); 404 | (0, 0, String::new()) 405 | } 406 | }; 407 | 408 | let (no_filled, no_cost, no_order_id) = match no_res { 409 | Ok(fill) => { 410 | ((fill.filled_size as i64), (fill.fill_cost * 100.0) as i64, fill.order_id) 411 | } 412 | Err(e) => { 413 | warn!("[EXEC] Poly NO failed: {}", e); 414 | (0, 0, String::new()) 415 | } 416 | }; 417 | 418 | // For same-platform, return YES as "kalshi" slot and NO as "poly" slot 419 | // This keeps the existing result handling logic working 420 | Ok((yes_filled, no_filled, yes_cost, no_cost, yes_order_id, no_order_id)) 421 | } 422 | 423 | /// Extract results from Kalshi-only execution (same-platform) 424 | fn extract_kalshi_only_results( 425 | &self, 426 | yes_res: Result, 427 | no_res: Result, 428 | ) -> Result<(i64, i64, i64, i64, String, String)> { 429 | let (yes_filled, yes_cost, yes_order_id) = match yes_res { 430 | Ok(resp) => { 431 | let filled = resp.order.filled_count(); 432 | let cost = resp.order.taker_fill_cost.unwrap_or(0) + resp.order.maker_fill_cost.unwrap_or(0); 433 | (filled, cost, resp.order.order_id) 434 | } 435 | Err(e) => { 436 | warn!("[EXEC] Kalshi YES failed: {}", e); 437 | (0, 0, String::new()) 438 | } 439 | }; 440 | 441 | let (no_filled, no_cost, no_order_id) = match no_res { 442 | Ok(resp) => { 443 | let filled = resp.order.filled_count(); 444 | let cost = resp.order.taker_fill_cost.unwrap_or(0) + resp.order.maker_fill_cost.unwrap_or(0); 445 | (filled, cost, resp.order.order_id) 446 | } 447 | Err(e) => { 448 | warn!("[EXEC] Kalshi NO failed: {}", e); 449 | (0, 0, String::new()) 450 | } 451 | }; 452 | 453 | // For same-platform, return YES as "kalshi" slot and NO as "poly" slot 454 | Ok((yes_filled, no_filled, yes_cost, no_cost, yes_order_id, no_order_id)) 455 | } 456 | 457 | /// Background auto-close for mismatched fills 458 | async fn auto_close_background( 459 | kalshi: Arc, 460 | poly_async: Arc, 461 | arb_type: ArbType, 462 | yes_filled: i64, 463 | no_filled: i64, 464 | yes_price: u16, 465 | no_price: u16, 466 | poly_yes_token: Arc, 467 | poly_no_token: Arc, 468 | kalshi_ticker: Arc, 469 | original_cost_per_contract: i64, 470 | ) { 471 | let excess = (yes_filled - no_filled).abs(); 472 | if excess == 0 { 473 | return; 474 | } 475 | 476 | // Helper to log P&L after close 477 | let log_close_pnl = |platform: &str, closed: i64, proceeds: i64| { 478 | if closed > 0 { 479 | let close_pnl = proceeds - (original_cost_per_contract * excess); 480 | info!("[EXEC] ✅ Closed {} {} contracts for {}¢ (P&L: {}¢)", 481 | closed, platform, proceeds, close_pnl); 482 | } else { 483 | warn!("[EXEC] ⚠️ Failed to close {} excess - 0 filled", platform); 484 | } 485 | }; 486 | 487 | match arb_type { 488 | ArbType::PolyOnly => { 489 | let (token, side, price) = if yes_filled > no_filled { 490 | (&poly_yes_token, "yes", yes_price) 491 | } else { 492 | (&poly_no_token, "no", no_price) 493 | }; 494 | let close_price = cents_to_price((price as i16).saturating_sub(10).max(1) as u16); 495 | 496 | info!("[EXEC] 🔄 Waiting 2s for Poly settlement before auto-close ({} {} contracts)", excess, side); 497 | tokio::time::sleep(Duration::from_secs(2)).await; 498 | 499 | match poly_async.sell_fak(token, close_price, excess as f64).await { 500 | Ok(fill) => log_close_pnl("Poly", fill.filled_size as i64, (fill.fill_cost * 100.0) as i64), 501 | Err(e) => warn!("[EXEC] ⚠️ Failed to close Poly excess: {}", e), 502 | } 503 | } 504 | 505 | ArbType::KalshiOnly => { 506 | let (side, price) = if yes_filled > no_filled { 507 | ("yes", yes_price as i64) 508 | } else { 509 | ("no", no_price as i64) 510 | }; 511 | let close_price = price.saturating_sub(10).max(1); 512 | 513 | match kalshi.sell_ioc(&kalshi_ticker, side, close_price, excess).await { 514 | Ok(resp) => { 515 | let proceeds = resp.order.taker_fill_cost.unwrap_or(0) + resp.order.maker_fill_cost.unwrap_or(0); 516 | log_close_pnl("Kalshi", resp.order.filled_count(), proceeds); 517 | } 518 | Err(e) => warn!("[EXEC] ⚠️ Failed to close Kalshi excess: {}", e), 519 | } 520 | } 521 | 522 | ArbType::PolyYesKalshiNo => { 523 | if yes_filled > no_filled { 524 | // Poly YES excess 525 | let close_price = cents_to_price((yes_price as i16).saturating_sub(10).max(1) as u16); 526 | info!("[EXEC] 🔄 Waiting 2s for Poly settlement before auto-close ({} yes contracts)", excess); 527 | tokio::time::sleep(Duration::from_secs(2)).await; 528 | 529 | match poly_async.sell_fak(&poly_yes_token, close_price, excess as f64).await { 530 | Ok(fill) => log_close_pnl("Poly", fill.filled_size as i64, (fill.fill_cost * 100.0) as i64), 531 | Err(e) => warn!("[EXEC] ⚠️ Failed to close Poly excess: {}", e), 532 | } 533 | } else { 534 | // Kalshi NO excess 535 | let close_price = (no_price as i64).saturating_sub(10).max(1); 536 | match kalshi.sell_ioc(&kalshi_ticker, "no", close_price, excess).await { 537 | Ok(resp) => { 538 | let proceeds = resp.order.taker_fill_cost.unwrap_or(0) + resp.order.maker_fill_cost.unwrap_or(0); 539 | log_close_pnl("Kalshi", resp.order.filled_count(), proceeds); 540 | } 541 | Err(e) => warn!("[EXEC] ⚠️ Failed to close Kalshi excess: {}", e), 542 | } 543 | } 544 | } 545 | 546 | ArbType::KalshiYesPolyNo => { 547 | if yes_filled > no_filled { 548 | // Kalshi YES excess 549 | let close_price = (yes_price as i64).saturating_sub(10).max(1); 550 | match kalshi.sell_ioc(&kalshi_ticker, "yes", close_price, excess).await { 551 | Ok(resp) => { 552 | let proceeds = resp.order.taker_fill_cost.unwrap_or(0) + resp.order.maker_fill_cost.unwrap_or(0); 553 | log_close_pnl("Kalshi", resp.order.filled_count(), proceeds); 554 | } 555 | Err(e) => warn!("[EXEC] ⚠️ Failed to close Kalshi excess: {}", e), 556 | } 557 | } else { 558 | // Poly NO excess 559 | let close_price = cents_to_price((no_price as i16).saturating_sub(10).max(1) as u16); 560 | info!("[EXEC] 🔄 Waiting 2s for Poly settlement before auto-close ({} no contracts)", excess); 561 | tokio::time::sleep(Duration::from_secs(2)).await; 562 | 563 | match poly_async.sell_fak(&poly_no_token, close_price, excess as f64).await { 564 | Ok(fill) => log_close_pnl("Poly", fill.filled_size as i64, (fill.fill_cost * 100.0) as i64), 565 | Err(e) => warn!("[EXEC] ⚠️ Failed to close Poly excess: {}", e), 566 | } 567 | } 568 | } 569 | } 570 | } 571 | 572 | #[inline(always)] 573 | fn release_in_flight(&self, market_id: u16) { 574 | if market_id < 512 { 575 | let slot = (market_id / 64) as usize; 576 | let bit = market_id % 64; 577 | let mask = !(1u64 << bit); 578 | self.in_flight[slot].fetch_and(mask, Ordering::Release); 579 | } 580 | } 581 | 582 | fn release_in_flight_delayed(&self, market_id: u16) { 583 | if market_id < 512 { 584 | let in_flight = self.in_flight.clone(); 585 | let slot = (market_id / 64) as usize; 586 | let bit = market_id % 64; 587 | tokio::spawn(async move { 588 | tokio::time::sleep(Duration::from_secs(10)).await; 589 | let mask = !(1u64 << bit); 590 | in_flight[slot].fetch_and(mask, Ordering::Release); 591 | }); 592 | } 593 | } 594 | } 595 | 596 | /// Execution result 597 | #[derive(Debug, Clone, Copy)] 598 | pub struct ExecutionResult { 599 | pub market_id: u16, 600 | pub success: bool, 601 | pub profit_cents: i16, 602 | pub latency_ns: u64, 603 | pub error: Option<&'static str>, 604 | } 605 | 606 | /// Create execution channel 607 | pub fn create_execution_channel() -> (mpsc::Sender, mpsc::Receiver) { 608 | mpsc::channel(256) 609 | } 610 | 611 | /// Execution loop 612 | pub async fn run_execution_loop( 613 | mut rx: mpsc::Receiver, 614 | engine: Arc, 615 | ) { 616 | info!("[EXEC] Execution engine started (dry_run={})", engine.dry_run); 617 | 618 | while let Some(req) = rx.recv().await { 619 | let engine = engine.clone(); 620 | 621 | // Process immediately in spawned task 622 | tokio::spawn(async move { 623 | match engine.process(req).await { 624 | Ok(result) if result.success => { 625 | info!( 626 | "[EXEC] ✅ market_id={} profit={}¢ latency={}µs", 627 | result.market_id, result.profit_cents, result.latency_ns / 1000 628 | ); 629 | } 630 | Ok(result) => { 631 | if result.error != Some("Already in-flight") { 632 | warn!( 633 | "[EXEC] ⚠️ market_id={}: {:?}", 634 | result.market_id, result.error 635 | ); 636 | } 637 | } 638 | Err(e) => { 639 | error!("[EXEC] ❌ Error: {}", e); 640 | } 641 | } 642 | }); 643 | } 644 | 645 | info!("[EXEC] Execution engine stopped"); 646 | } -------------------------------------------------------------------------------- /src/polymarket_clob.rs: -------------------------------------------------------------------------------- 1 | // src/polymarket_clob.rs 2 | // Polymarket CLOB Client 3 | 4 | use std::time::{SystemTime, UNIX_EPOCH}; 5 | 6 | use anyhow::{Result, anyhow}; 7 | use base64::Engine; 8 | use base64::engine::general_purpose::URL_SAFE; 9 | use ethers::signers::{LocalWallet, Signer}; 10 | use ethers::types::H256; 11 | use ethers::types::transaction::eip712::{Eip712, TypedData}; 12 | use ethers::types::U256; 13 | use hmac::{Hmac, Mac}; 14 | use reqwest::header::{HeaderMap, HeaderValue}; 15 | use serde::{Deserialize, Serialize}; 16 | use serde_json::json; 17 | use sha2::Sha256; 18 | use std::collections::HashMap; 19 | use std::sync::Arc; 20 | 21 | const USER_AGENT: &str = "py_clob_client"; 22 | const MSG_TO_SIGN: &str = "This message attests that I control the given wallet"; 23 | const ZERO_ADDRESS: &str = "0x0000000000000000000000000000000000000000"; 24 | 25 | // ============================================================================ 26 | // PRE-COMPUTED EIP712 CONSTANTS 27 | // ============================================================================ 28 | 29 | type HmacSha256 = Hmac; 30 | 31 | #[derive(Debug, Clone, Serialize, Deserialize)] 32 | pub struct ApiCreds { 33 | #[serde(rename = "apiKey")] 34 | pub api_key: String, 35 | #[serde(rename = "secret")] 36 | pub api_secret: String, 37 | #[serde(rename = "passphrase")] 38 | pub api_passphrase: String, 39 | } 40 | 41 | // ============================================================================ 42 | // PREPARED CREDENTIALS 43 | // ============================================================================ 44 | 45 | #[derive(Clone)] 46 | pub struct PreparedCreds { 47 | pub api_key: String, 48 | hmac_template: HmacSha256, 49 | api_key_header: HeaderValue, 50 | passphrase_header: HeaderValue, 51 | } 52 | 53 | impl PreparedCreds { 54 | pub fn from_api_creds(creds: &ApiCreds) -> Result { 55 | let decoded_secret = URL_SAFE.decode(&creds.api_secret)?; 56 | let hmac_template = HmacSha256::new_from_slice(&decoded_secret) 57 | .map_err(|e| anyhow!("Invalid HMAC key: {}", e))?; 58 | 59 | let api_key_header = HeaderValue::from_str(&creds.api_key) 60 | .map_err(|e| anyhow!("Invalid API key for header: {}", e))?; 61 | let passphrase_header = HeaderValue::from_str(&creds.api_passphrase) 62 | .map_err(|e| anyhow!("Invalid passphrase for header: {}", e))?; 63 | 64 | Ok(Self { 65 | api_key: creds.api_key.clone(), 66 | hmac_template, 67 | api_key_header, 68 | passphrase_header, 69 | }) 70 | } 71 | 72 | /// Sign message using prewarmed HMAC 73 | #[inline] 74 | pub fn sign(&self, message: &[u8]) -> Vec { 75 | let mut mac = self.hmac_template.clone(); 76 | mac.update(message); 77 | mac.finalize().into_bytes().to_vec() 78 | } 79 | 80 | /// Sign and return base64 (for L2 headers) 81 | #[inline] 82 | pub fn sign_b64(&self, message: &[u8]) -> String { 83 | URL_SAFE.encode(self.sign(message)) 84 | } 85 | 86 | /// Get cached API key header 87 | #[inline] 88 | pub fn api_key_header(&self) -> HeaderValue { 89 | self.api_key_header.clone() 90 | } 91 | 92 | /// Get cached passphrase header 93 | #[inline] 94 | pub fn passphrase_header(&self) -> HeaderValue { 95 | self.passphrase_header.clone() 96 | } 97 | } 98 | 99 | fn add_default_headers(headers: &mut HeaderMap) { 100 | headers.insert("User-Agent", HeaderValue::from_static(USER_AGENT)); 101 | headers.insert("Accept", HeaderValue::from_static("*/*")); 102 | headers.insert("Connection", HeaderValue::from_static("keep-alive")); 103 | headers.insert("Content-Type", HeaderValue::from_static("application/json")); 104 | } 105 | 106 | #[inline(always)] 107 | fn current_unix_ts() -> u64 { 108 | SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() 109 | } 110 | 111 | fn clob_auth_digest(chain_id: u64, address_str: &str, timestamp: u64, nonce: u64) -> Result { 112 | let typed_json = json!({ 113 | "types": { 114 | "EIP712Domain": [ 115 | {"name": "name", "type": "string"}, 116 | {"name": "version", "type": "string"}, 117 | {"name": "chainId", "type": "uint256"} 118 | ], 119 | "ClobAuth": [ 120 | {"name": "address", "type": "address"}, 121 | {"name": "timestamp", "type": "string"}, 122 | {"name": "nonce", "type": "uint256"}, 123 | {"name": "message", "type": "string"} 124 | ] 125 | }, 126 | "primaryType": "ClobAuth", 127 | "domain": { "name": "ClobAuthDomain", "version": "1", "chainId": chain_id }, 128 | "message": { "address": address_str, "timestamp": timestamp.to_string(), "nonce": nonce, "message": MSG_TO_SIGN } 129 | }); 130 | let typed: TypedData = serde_json::from_value(typed_json)?; 131 | Ok(typed.encode_eip712()?.into()) 132 | } 133 | 134 | #[derive(Debug, Clone)] 135 | #[allow(dead_code)] 136 | pub struct OrderArgs { 137 | pub token_id: String, 138 | pub price: f64, 139 | pub size: f64, 140 | pub side: String, 141 | pub fee_rate_bps: Option, 142 | pub nonce: Option, 143 | pub expiration: Option, 144 | pub taker: Option, 145 | } 146 | 147 | /// Order data for EIP712 signing (references to avoid clones in hot path) 148 | struct OrderData<'a> { 149 | maker: &'a str, 150 | taker: &'a str, 151 | token_id: &'a str, 152 | maker_amount: &'a str, 153 | taker_amount: &'a str, 154 | side: i32, 155 | fee_rate_bps: &'a str, 156 | nonce: &'a str, 157 | signer: &'a str, 158 | expiration: &'a str, 159 | signature_type: i32, 160 | salt: u128 161 | } 162 | 163 | #[derive(Debug, Clone, Serialize)] 164 | pub struct OrderStruct { 165 | pub salt: u128, 166 | pub maker: String, 167 | pub signer: String, 168 | pub taker: String, 169 | #[serde(rename = "tokenId")] 170 | pub token_id: String, 171 | #[serde(rename = "makerAmount")] 172 | pub maker_amount: String, 173 | #[serde(rename = "takerAmount")] 174 | pub taker_amount: String, 175 | pub expiration: String, 176 | pub nonce: String, 177 | #[serde(rename = "feeRateBps")] 178 | pub fee_rate_bps: String, 179 | pub side: i32, 180 | #[serde(rename = "signatureType")] 181 | pub signature_type: i32, 182 | } 183 | 184 | #[derive(Debug, Clone, Serialize)] 185 | pub struct SignedOrder { 186 | pub order: OrderStruct, 187 | pub signature: String 188 | } 189 | 190 | impl SignedOrder { 191 | pub fn post_body(&self, owner: &str, order_type: &str) -> String { 192 | let side_str = if self.order.side == 0 { "BUY" } else { "SELL" }; 193 | let mut buf = String::with_capacity(512); 194 | buf.push_str(r#"{"order":{"salt":"#); 195 | buf.push_str(&self.order.salt.to_string()); 196 | buf.push_str(r#","maker":""#); 197 | buf.push_str(&self.order.maker); 198 | buf.push_str(r#"","signer":""#); 199 | buf.push_str(&self.order.signer); 200 | buf.push_str(r#"","taker":""#); 201 | buf.push_str(&self.order.taker); 202 | buf.push_str(r#"","tokenId":""#); 203 | buf.push_str(&self.order.token_id); 204 | buf.push_str(r#"","makerAmount":""#); 205 | buf.push_str(&self.order.maker_amount); 206 | buf.push_str(r#"","takerAmount":""#); 207 | buf.push_str(&self.order.taker_amount); 208 | buf.push_str(r#"","expiration":""#); 209 | buf.push_str(&self.order.expiration); 210 | buf.push_str(r#"","nonce":""#); 211 | buf.push_str(&self.order.nonce); 212 | buf.push_str(r#"","feeRateBps":""#); 213 | buf.push_str(&self.order.fee_rate_bps); 214 | buf.push_str(r#"","side":""#); 215 | buf.push_str(side_str); 216 | buf.push_str(r#"","signatureType":"#); 217 | buf.push_str(&self.order.signature_type.to_string()); 218 | buf.push_str(r#","signature":""#); 219 | buf.push_str(&self.signature); 220 | buf.push_str(r#""},"owner":""#); 221 | buf.push_str(owner); 222 | buf.push_str(r#"","orderType":""#); 223 | buf.push_str(order_type); 224 | buf.push_str(r#""}"#); 225 | buf 226 | } 227 | } 228 | 229 | #[inline(always)] 230 | fn generate_seed() -> u128 { 231 | (SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_nanos() % u128::from(u32::MAX)) as u128 232 | } 233 | 234 | // ============================================================================ 235 | // ORDER CALCULATIONS 236 | // ============================================================================ 237 | 238 | /// Convert f64 price (0.0-1.0) to basis points (0-10000) 239 | /// e.g., 0.65 -> 6500 240 | #[inline(always)] 241 | pub fn price_to_bps(price: f64) -> u64 { 242 | ((price * 10000.0).round() as i64).max(0) as u64 243 | } 244 | 245 | /// Convert f64 size to micro-units (6 decimal places) 246 | /// e.g., 100.5 -> 100_500_000 247 | #[inline(always)] 248 | pub fn size_to_micro(size: f64) -> u64 { 249 | ((size * 1_000_000.0).floor() as i64).max(0) as u64 250 | } 251 | 252 | /// BUY order calculation 253 | /// Input: size in micro-units, price in basis points 254 | /// Output: (side=0, maker_amount, taker_amount) in token decimals (6 dp) 255 | #[inline(always)] 256 | pub fn get_order_amounts_buy(size_micro: u64, price_bps: u64) -> (i32, u128, u128) { 257 | // For BUY: taker = size (what we receive), maker = size * price (what we pay) 258 | let taker = size_micro as u128; 259 | // maker = size * price / 10000 (convert bps to ratio) 260 | let maker = (size_micro as u128 * price_bps as u128) / 10000; 261 | (0, maker, taker) 262 | } 263 | 264 | /// SELL order calculation 265 | /// Input: size in micro-units, price in basis points 266 | /// Output: (side=1, maker_amount, taker_amount) in token decimals (6 dp) 267 | #[inline(always)] 268 | pub fn get_order_amounts_sell(size_micro: u64, price_bps: u64) -> (i32, u128, u128) { 269 | // For SELL: maker = size (what we give), taker = size * price (what we receive) 270 | let maker = size_micro as u128; 271 | // taker = size * price / 10000 (convert bps to ratio) 272 | let taker = (size_micro as u128 * price_bps as u128) / 10000; 273 | (1, maker, taker) 274 | } 275 | 276 | /// Validate price is within allowed range for tick=0.01 277 | #[inline(always)] 278 | pub fn price_valid(price_bps: u64) -> bool { 279 | // For tick=0.01: price must be >= 0.01 (100 bps) and <= 0.99 (9900 bps) 280 | price_bps >= 100 && price_bps <= 9900 281 | } 282 | 283 | fn order_typed_data(chain_id: u64, exchange: &str, data: &OrderData<'_>) -> Result { 284 | let typed_json = json!({ 285 | "types": { 286 | "EIP712Domain": [ 287 | {"name": "name", "type": "string"}, 288 | {"name": "version", "type": "string"}, 289 | {"name": "chainId", "type": "uint256"}, 290 | {"name": "verifyingContract", "type": "address"} 291 | ], 292 | "Order": [ 293 | {"name":"salt","type":"uint256"}, 294 | {"name":"maker","type":"address"}, 295 | {"name":"signer","type":"address"}, 296 | {"name":"taker","type":"address"}, 297 | {"name":"tokenId","type":"uint256"}, 298 | {"name":"makerAmount","type":"uint256"}, 299 | {"name":"takerAmount","type":"uint256"}, 300 | {"name":"expiration","type":"uint256"}, 301 | {"name":"nonce","type":"uint256"}, 302 | {"name":"feeRateBps","type":"uint256"}, 303 | {"name":"side","type":"uint8"}, 304 | {"name":"signatureType","type":"uint8"} 305 | ] 306 | }, 307 | "primaryType": "Order", 308 | "domain": { "name": "Polymarket CTF Exchange", "version": "1", "chainId": chain_id, "verifyingContract": exchange }, 309 | "message": { 310 | "salt": U256::from(data.salt), 311 | "maker": data.maker, 312 | "signer": data.signer, 313 | "taker": data.taker, 314 | "tokenId": U256::from_dec_str(data.token_id)?, 315 | "makerAmount": U256::from_dec_str(data.maker_amount)?, 316 | "takerAmount": U256::from_dec_str(data.taker_amount)?, 317 | "expiration": U256::from_dec_str(data.expiration)?, 318 | "nonce": U256::from_dec_str(data.nonce)?, 319 | "feeRateBps": U256::from_dec_str(data.fee_rate_bps)?, 320 | "side": data.side, 321 | "signatureType": data.signature_type, 322 | } 323 | }); 324 | Ok(serde_json::from_value(typed_json)?) 325 | } 326 | 327 | fn get_exchange_address(chain_id: u64, neg_risk: bool) -> Result { 328 | match (chain_id, neg_risk) { 329 | (137, true) => Ok("0xC5d563A36AE78145C45a50134d48A1215220f80a".into()), 330 | (137, false) => Ok("0x4bFb41d5B3570DeFd03C39a9A4D8dE6Bd8B8982E".into()), 331 | (80002, true) => Ok("0xd91E80cF2E7be2e162c6513ceD06f1dD0dA35296".into()), 332 | (80002, false) => Ok("0xdFE02Eb6733538f8Ea35D585af8DE5958AD99E40".into()), 333 | _ => Err(anyhow!("unsupported chain")), 334 | } 335 | } 336 | 337 | // ============================================================================ 338 | // ORDER TYPES FOR FAK/FOK 339 | // ============================================================================ 340 | 341 | /// Order type for Polymarket 342 | #[derive(Debug, Clone, Copy)] 343 | #[allow(dead_code)] 344 | pub enum PolyOrderType { 345 | /// Good Till Cancelled (default) 346 | GTC, 347 | /// Good Till Time 348 | GTD, 349 | /// Fill Or Kill - must fill entirely or cancel 350 | FOK, 351 | /// Fill And Kill - fill what you can, cancel rest 352 | FAK, 353 | } 354 | 355 | impl PolyOrderType { 356 | pub fn as_str(&self) -> &'static str { 357 | match self { 358 | PolyOrderType::GTC => "GTC", 359 | PolyOrderType::GTD => "GTD", 360 | PolyOrderType::FOK => "FOK", 361 | PolyOrderType::FAK => "FAK", 362 | } 363 | } 364 | } 365 | 366 | // ============================================================================ 367 | // GET ORDER RESPONSE 368 | // ============================================================================ 369 | 370 | /// Response from GET /data/order/{order_id} 371 | #[derive(Debug, Clone, Deserialize)] 372 | #[allow(dead_code)] 373 | pub struct PolymarketOrderResponse { 374 | pub id: String, 375 | pub status: String, 376 | pub market: Option, 377 | pub outcome: Option, 378 | pub price: String, 379 | pub side: String, 380 | pub size_matched: String, 381 | pub original_size: String, 382 | pub maker_address: Option, 383 | pub asset_id: Option, 384 | #[serde(default)] 385 | pub associate_trades: Vec, 386 | #[serde(default)] 387 | pub created_at: Option, // Can be string or integer 388 | #[serde(default)] 389 | pub expiration: Option, // Can be string or integer 390 | #[serde(rename = "type")] 391 | pub order_type: Option, 392 | pub owner: Option, 393 | } 394 | 395 | // ============================================================================ 396 | // ASYNC CLIENT 397 | // ============================================================================ 398 | 399 | /// Async Polymarket client for execution 400 | pub struct PolymarketAsyncClient { 401 | host: String, 402 | chain_id: u64, 403 | http: reqwest::Client, // Async client with connection pooling 404 | wallet: Arc, 405 | funder: String, 406 | wallet_address_str: String, 407 | address_header: HeaderValue, 408 | } 409 | 410 | impl PolymarketAsyncClient { 411 | pub fn new(host: &str, chain_id: u64, private_key: &str, funder: &str) -> Result { 412 | let wallet = private_key.parse::()?.with_chain_id(chain_id); 413 | let wallet_address_str = format!("{:?}", wallet.address()); 414 | let address_header = HeaderValue::from_str(&wallet_address_str) 415 | .map_err(|e| anyhow!("Invalid wallet address for header: {}", e))?; 416 | 417 | // Build async client with connection pooling and keepalive 418 | let http = reqwest::Client::builder() 419 | .pool_max_idle_per_host(10) 420 | .pool_idle_timeout(std::time::Duration::from_secs(90)) 421 | .tcp_keepalive(std::time::Duration::from_secs(30)) 422 | .tcp_nodelay(true) 423 | .timeout(std::time::Duration::from_secs(10)) 424 | .build()?; 425 | 426 | Ok(Self { 427 | host: host.trim_end_matches('/').to_string(), 428 | chain_id, 429 | http, 430 | wallet: Arc::new(wallet), 431 | funder: funder.to_string(), 432 | wallet_address_str, 433 | address_header, 434 | }) 435 | } 436 | 437 | /// Build L1 headers for authentication (derive-api-key) 438 | /// wallet.sign_hash() is CPU-bound (~1ms), safe to call in async context 439 | fn build_l1_headers(&self, nonce: u64) -> Result { 440 | let timestamp = current_unix_ts(); 441 | let digest = clob_auth_digest(self.chain_id, &self.wallet_address_str, timestamp, nonce)?; 442 | let sig = self.wallet.sign_hash(digest)?; 443 | let mut headers = HeaderMap::new(); 444 | headers.insert("POLY_ADDRESS", self.address_header.clone()); 445 | headers.insert("POLY_SIGNATURE", HeaderValue::from_str(&format!("0x{}", sig))?); 446 | headers.insert("POLY_TIMESTAMP", HeaderValue::from_str(×tamp.to_string())?); 447 | headers.insert("POLY_NONCE", HeaderValue::from_str(&nonce.to_string())?); 448 | add_default_headers(&mut headers); 449 | Ok(headers) 450 | } 451 | 452 | /// Derive API credentials from L1 wallet signature 453 | pub async fn derive_api_key(&self, nonce: u64) -> Result { 454 | let url = format!("{}/auth/derive-api-key", self.host); 455 | let headers = self.build_l1_headers(nonce)?; 456 | let resp = self.http.get(&url).headers(headers).send().await?; 457 | if !resp.status().is_success() { 458 | let status = resp.status(); 459 | let body = resp.text().await.unwrap_or_default(); 460 | return Err(anyhow!("derive-api-key failed: {} {}", status, body)); 461 | } 462 | Ok(resp.json().await?) 463 | } 464 | 465 | /// Build L2 headers for authenticated requests 466 | fn build_l2_headers(&self, method: &str, path: &str, body: Option<&str>, creds: &PreparedCreds) -> Result { 467 | let timestamp = current_unix_ts(); 468 | let mut message = format!("{}{}{}", timestamp, method, path); 469 | if let Some(b) = body { message.push_str(b); } 470 | 471 | let sig_b64 = creds.sign_b64(message.as_bytes()); 472 | 473 | let mut headers = HeaderMap::with_capacity(9); 474 | headers.insert("POLY_ADDRESS", self.address_header.clone()); 475 | headers.insert("POLY_SIGNATURE", HeaderValue::from_str(&sig_b64)?); 476 | headers.insert("POLY_TIMESTAMP", HeaderValue::from_str(×tamp.to_string())?); 477 | headers.insert("POLY_API_KEY", creds.api_key_header()); 478 | headers.insert("POLY_PASSPHRASE", creds.passphrase_header()); 479 | add_default_headers(&mut headers); 480 | Ok(headers) 481 | } 482 | 483 | /// Post order 484 | pub async fn post_order_async(&self, body: String, creds: &PreparedCreds) -> Result { 485 | let path = "/order"; 486 | let url = format!("{}{}", self.host, path); 487 | let headers = self.build_l2_headers("POST", path, Some(&body), creds)?; 488 | 489 | let resp = self.http 490 | .post(&url) 491 | .headers(headers) 492 | .body(body) 493 | .send() 494 | .await?; 495 | 496 | Ok(resp) 497 | } 498 | 499 | /// Get order by ID 500 | pub async fn get_order_async(&self, order_id: &str, creds: &PreparedCreds) -> Result { 501 | let path = format!("/data/order/{}", order_id); 502 | let url = format!("{}{}", self.host, path); 503 | let headers = self.build_l2_headers("GET", &path, None, creds)?; 504 | 505 | let resp = self.http 506 | .get(&url) 507 | .headers(headers) 508 | .send() 509 | .await?; 510 | 511 | if !resp.status().is_success() { 512 | let status = resp.status(); 513 | let body = resp.text().await.unwrap_or_default(); 514 | return Err(anyhow!("get_order failed {}: {}", status, body)); 515 | } 516 | 517 | Ok(resp.json().await?) 518 | } 519 | 520 | /// Check neg_risk for token - with caching 521 | pub async fn check_neg_risk(&self, token_id: &str) -> Result { 522 | let url = format!("{}/neg-risk?token_id={}", self.host, token_id); 523 | let resp = self.http 524 | .get(&url) 525 | .header("User-Agent", USER_AGENT) 526 | .send() 527 | .await?; 528 | 529 | let val: serde_json::Value = resp.json().await?; 530 | Ok(val["neg_risk"].as_bool().unwrap_or(false)) 531 | } 532 | 533 | #[allow(dead_code)] 534 | pub fn wallet_address(&self) -> &str { 535 | &self.wallet_address_str 536 | } 537 | 538 | #[allow(dead_code)] 539 | pub fn funder(&self) -> &str { 540 | &self.funder 541 | } 542 | 543 | #[allow(dead_code)] 544 | pub fn wallet(&self) -> &LocalWallet { 545 | &self.wallet 546 | } 547 | } 548 | 549 | /// Shared async client wrapper for use in execution engine 550 | pub struct SharedAsyncClient { 551 | inner: Arc, 552 | creds: PreparedCreds, 553 | chain_id: u64, 554 | /// Pre-cached neg_risk lookups 555 | neg_risk_cache: std::sync::RwLock>, 556 | } 557 | 558 | impl SharedAsyncClient { 559 | pub fn new(client: PolymarketAsyncClient, creds: PreparedCreds, chain_id: u64) -> Self { 560 | Self { 561 | inner: Arc::new(client), 562 | creds, 563 | chain_id, 564 | neg_risk_cache: std::sync::RwLock::new(HashMap::new()), 565 | } 566 | } 567 | 568 | /// Load neg_risk cache from JSON file (output of build_sports_cache.py) 569 | pub fn load_cache(&self, path: &str) -> Result { 570 | let data = std::fs::read_to_string(path)?; 571 | let map: HashMap = serde_json::from_str(&data)?; 572 | let count = map.len(); 573 | let mut cache = self.neg_risk_cache.write().unwrap(); 574 | *cache = map; 575 | Ok(count) 576 | } 577 | 578 | /// Execute FAK buy order - 579 | pub async fn buy_fak(&self, token_id: &str, price: f64, size: f64) -> Result { 580 | debug_assert!(!token_id.is_empty(), "token_id must not be empty"); 581 | debug_assert!(price > 0.0 && price < 1.0, "price must be 0 < p < 1"); 582 | debug_assert!(size >= 1.0, "size must be >= 1"); 583 | self.execute_order(token_id, price, size, "BUY").await 584 | } 585 | 586 | /// Execute FAK sell order - 587 | pub async fn sell_fak(&self, token_id: &str, price: f64, size: f64) -> Result { 588 | debug_assert!(!token_id.is_empty(), "token_id must not be empty"); 589 | debug_assert!(price > 0.0 && price < 1.0, "price must be 0 < p < 1"); 590 | debug_assert!(size >= 1.0, "size must be >= 1"); 591 | self.execute_order(token_id, price, size, "SELL").await 592 | } 593 | 594 | async fn execute_order(&self, token_id: &str, price: f64, size: f64, side: &str) -> Result { 595 | // Check neg_risk cache first 596 | let neg_risk = { 597 | let cache = self.neg_risk_cache.read().unwrap(); 598 | cache.get(token_id).copied() 599 | }; 600 | 601 | let neg_risk = match neg_risk { 602 | Some(nr) => nr, 603 | None => { 604 | let nr = self.inner.check_neg_risk(token_id).await?; 605 | let mut cache = self.neg_risk_cache.write().unwrap(); 606 | cache.insert(token_id.to_string(), nr); 607 | nr 608 | } 609 | }; 610 | 611 | // Build signed order 612 | let signed = self.build_signed_order(token_id, price, size, side, neg_risk)?; 613 | // Owner must be the API key (not wallet address or funder!) 614 | let body = signed.post_body(&self.creds.api_key, PolyOrderType::FAK.as_str()); 615 | 616 | // Post order 617 | let resp = self.inner.post_order_async(body, &self.creds).await?; 618 | 619 | if !resp.status().is_success() { 620 | let status = resp.status(); 621 | let body = resp.text().await.unwrap_or_default(); 622 | return Err(anyhow!("Polymarket order failed {}: {}", status, body)); 623 | } 624 | 625 | let resp_json: serde_json::Value = resp.json().await?; 626 | let order_id = resp_json["orderID"].as_str().unwrap_or("unknown").to_string(); 627 | 628 | // Query fill status 629 | let order_info = self.inner.get_order_async(&order_id, &self.creds).await?; 630 | let filled_size: f64 = order_info.size_matched.parse().unwrap_or(0.0); 631 | let order_price: f64 = order_info.price.parse().unwrap_or(price); 632 | 633 | tracing::debug!( 634 | "[POLY-ASYNC] FAK {} {}: status={}, filled={:.2}/{:.2}, price={:.4}", 635 | side, order_id, order_info.status, filled_size, size, order_price 636 | ); 637 | 638 | Ok(PolyFillAsync { 639 | order_id, 640 | filled_size, 641 | fill_cost: filled_size * order_price, 642 | }) 643 | } 644 | 645 | /// Build a signed order 646 | fn build_signed_order( 647 | &self, 648 | token_id: &str, 649 | price: f64, 650 | size: f64, 651 | side: &str, 652 | neg_risk: bool, 653 | ) -> Result { 654 | let price_bps = price_to_bps(price); 655 | let size_micro = size_to_micro(size); 656 | 657 | if !price_valid(price_bps) { 658 | return Err(anyhow!("price {} ({}bps) outside allowed range", price, price_bps)); 659 | } 660 | 661 | let (side_code, maker_amt, taker_amt) = if side.eq_ignore_ascii_case("BUY") { 662 | get_order_amounts_buy(size_micro, price_bps) 663 | } else if side.eq_ignore_ascii_case("SELL") { 664 | get_order_amounts_sell(size_micro, price_bps) 665 | } else { 666 | return Err(anyhow!("side must be BUY or SELL")); 667 | }; 668 | 669 | let salt = generate_seed(); 670 | let maker_amount_str = maker_amt.to_string(); 671 | let taker_amount_str = taker_amt.to_string(); 672 | 673 | // Use references for EIP712 signing 674 | let data = OrderData { 675 | maker: &self.inner.funder, 676 | taker: ZERO_ADDRESS, 677 | token_id, 678 | maker_amount: &maker_amount_str, 679 | taker_amount: &taker_amount_str, 680 | side: side_code, 681 | fee_rate_bps: "0", 682 | nonce: "0", 683 | signer: &self.inner.wallet_address_str, 684 | expiration: "0", 685 | signature_type: 1, 686 | salt, 687 | }; 688 | let exchange = get_exchange_address(self.chain_id, neg_risk)?; 689 | let typed = order_typed_data(self.chain_id, &exchange, &data)?; 690 | let digest = typed.encode_eip712()?; 691 | 692 | let sig = self.inner.wallet.sign_hash(H256::from(digest))?; 693 | 694 | // Only allocate strings once for the final OrderStruct (serialization needs owned) 695 | Ok(SignedOrder { 696 | order: OrderStruct { 697 | salt, 698 | maker: self.inner.funder.clone(), 699 | signer: self.inner.wallet_address_str.clone(), 700 | taker: ZERO_ADDRESS.to_string(), 701 | token_id: token_id.to_string(), 702 | maker_amount: maker_amount_str, 703 | taker_amount: taker_amount_str, 704 | expiration: "0".to_string(), 705 | nonce: "0".to_string(), 706 | fee_rate_bps: "0".to_string(), 707 | side: side_code, 708 | signature_type: 1, 709 | }, 710 | signature: format!("0x{}", sig), 711 | }) 712 | } 713 | } 714 | 715 | /// Async fill result 716 | #[derive(Debug, Clone)] 717 | pub struct PolyFillAsync { 718 | pub order_id: String, 719 | pub filled_size: f64, 720 | pub fill_cost: f64, 721 | } --------------------------------------------------------------------------------