├── .gitignore ├── LICENSE.md ├── README.md ├── api.py ├── bitmex.py ├── bitmex_ws.py ├── broker.py ├── data.py ├── event_types.py ├── exchange.py ├── features.py ├── messaging_clients.py ├── misc testing ├── api_order_submission_test.py ├── bitmex_auth_test.py ├── db_object_queries.py ├── feature_test.py ├── model_test.py ├── pnl_calc_test.py ├── portfolio_analytics_test.py ├── simple_api_test.py ├── snapshot_image_test.py ├── static_image_test.py ├── strategy_test.py ├── tick_parse_test.py └── timestamp_test.py ├── model.py ├── portfolio.py ├── requirements.txt ├── resample.py ├── server.py ├── server_test.py ├── setup.py ├── strategy.py └── trade_types.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | *.pyc 3 | op_data.csv 4 | trade.json 5 | *.png 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # trading-server 2 | A multi-asset, multi-strategy, event-driven trade execution and management platform for running many algorithms/bots at many venues simultaneously, with unified risk management and reporting. 3 | 4 | This is not a standalone trading bot. You need to install and run this on a server or VPS using your own trading algorithms. 5 | 6 | ## Installation 7 | 8 | Using python 3.9 9 | 10 | 1. Install mongodb (https://www.mongodb.com/) 11 | 2. Install TA-Lib python bindings (links to wheels here https://blog.quantinsti.com/install-ta-lib-python/) and binaries (https://mrjbq7.github.io/ta-lib/install.html) 12 | 3. Set up a telegram bot, record the bot key in enviroment variable TELEGRAM_BOT_TOKEN. 13 | 4. Create a whitelist for telegram account ID's you want to have control of the server, recorded in environment variable TELEGRAM_BOT_WHITELIST, eg [, ] 14 | 5. Set up accounts for all venues you will trade at, recording API keys and secret keys in environment variables _API_KEY and _API_SECRET 15 | 6. Configure what venues, instruments, models and timeframes you want to trade in server.py and model.py. 16 | 7. Install dependencies in requirments.txt 17 | 8. Run the server with python server_test.py. Note it will take some time to fetch historical data for the instruments you are trading. 18 | 19 | 20 | ## Current features 21 | Trade any API-accessible market with unified multi-strategy portfolio management, autonomously or semi-autonomously. 22 | 23 | Allocation-based risk management (allocate x% of capital to specific strategies with y% exposure per strategy). 24 | 25 | Porfolio performance metrics and tracking. Tracks the following: 26 | 27 | 28 | 29 | Feature library - assemble new strategies quickly from existing features. 30 | 31 | Trade consent via Telegram (or write your own messaging client). Accept, veto or tweak trade setups before they are actioned. 32 | 33 | 34 | 35 | ## WIP features 36 | 37 | Account multicasting - trade as many accounts on as many platforms as desired. 38 | 39 | UI - web dashboard for portfolio stats and individual trade metrics 40 | 41 | Integration with Backtrader 42 | 43 | Blockchain-based strategy auditing - publish trade signals to IPFS and Ethereum/BSC to empirically prove win rate over time 44 | 45 | Accounting and compliance reporting 46 | 47 | ## Venue support 48 | 49 | Venue | Integration status | Instrument types 50 | ---------|-----------|------------ 51 | [](https://www.bitmex.com/register/hhGBvP) | Complete | Crypto derivatives 52 | [](https://www.binance.com/en/register?ref=39168428) | Planned | Crypto spot & derivatives 53 | IG Markets | Planned | FX, equity, commodity & index CFD's 54 | Interactive Brokers | Planned | FX, equity, commodity & index CFD's 55 | Deribit | Planned | Crypto derivatives & options 56 | 57 | ## Market data 58 | 1 minute resolution OHLCV bars for all watched instruments are stored with MongoDB. 59 | 60 | This software works with 1 minute and above resolution strategies. Tick-resolution support planned later. With this in mind, the software converts tick data to 1 min bars where live tick data is available, but doesn't store ticks locally (i.e. it handles tick data but doesnt use it as is, yet). 61 | 62 | ## Strategy modellling 63 | Individual strategy implementations are not included. A simple moving average cross model is included as an example only. 64 | Custom strategy implementations, collaboration or any other enquiries please email me at sam@sdbgroup.io. 65 | 66 | ## Collaboration 67 | Feature requests and discussion regarding new features are very welcome, please reach out. 68 | 69 | ## External libraries 70 | TA-LIB - https://mrjbq7.github.io/ta-lib/ 71 | 72 | Backtrader - https://www.backtrader.com/ 73 | 74 | ## Acknowledgements 75 | Based on architecture described by Michael Halls-Moore at QuantStart.com (qsTrader), and written works by E. Chan and M. Lopez de Prado. Thanks all. 76 | 77 | ## License 78 | GNU GPLv3 79 | -------------------------------------------------------------------------------- /api.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from pymongo import MongoClient, errors 13 | from threading import Thread 14 | from flask import Flask, Response, request 15 | from time import sleep 16 | import logging 17 | import sys 18 | import json 19 | 20 | 21 | DB_URL = 'mongodb://127.0.0.1:27017/' 22 | DB_PRICES = 'asset_price_master' 23 | DB_OTHER = 'holdings_trades_signals_master' 24 | DB_TIMEOUT_MS = 30 25 | 26 | db_client = MongoClient(DB_URL, serverSelectionTimeoutMS=DB_TIMEOUT_MS) 27 | db_prices = db_client[DB_PRICES] 28 | db_other = db_client[DB_OTHER] 29 | 30 | logger = logging.getLogger() 31 | logger.setLevel(logging.DEBUG) 32 | ch = logging.StreamHandler() 33 | formatter = logging.Formatter( 34 | "%(asctime)s:%(levelname)s:%(module)s - %(message)s", 35 | datefmt="%d-%m-%Y %H:%M:%S") 36 | ch.setFormatter(formatter) 37 | logger.addHandler(ch) 38 | 39 | app = Flask(__name__) 40 | 41 | 42 | # Portfolio data route 43 | @app.route("/portfolio", methods=['GET']) 44 | def return_portfolio(): 45 | if request.method == 'GET': 46 | 47 | portfolio = db_other['portfolio'].find_one({"id": 1}, {"_id": 0}) 48 | if portfolio: 49 | return json.dumps(portfolio), 200, {'ContentType':'application/json'} 50 | else: 51 | return json.dumps({'success': False, 'message': 'Not found'}), 52 | 404, {'ContentType':'application/json'} 53 | 54 | else: 55 | return json.dumps({'success': False, 'message': 'Invalid method'}), 56 | 403, {'ContentType':'application/json'} 57 | 58 | 59 | # Portfolio settings route 60 | @app.route("/portfolio/settings/", methods=['POST']) 61 | def change_portfolio_settings(): 62 | if request.method == 'POST': 63 | return " Posted to /portfolio/settings/ successfully" 64 | 65 | # TODO: set new portfolio settings 66 | 67 | else: 68 | return json.dumps({'success': False, 'message': 'Invalid method'}), 69 | 403, {'ContentType':'application/json'} 70 | 71 | 72 | if __name__ == "__main__": 73 | app.run(host='0.0.0.0', port=80, debug=False) 74 | -------------------------------------------------------------------------------- /bitmex_ws.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from time import sleep 13 | from threading import Thread 14 | import websocket 15 | import json 16 | import traceback 17 | 18 | 19 | class Bitmex_WS: 20 | 21 | def __init__(self, logger, symbols, channels, URL, api_key, api_secret): 22 | self.logger = logger 23 | self.symbols = symbols 24 | self.channels = channels 25 | self.URL = URL 26 | if api_key is not None and api_secret is None: 27 | raise ValueError('Enter both public and secret keys') 28 | if api_key is None and api_secret is not None: 29 | raise ValueError('Enter both public and secret API keys') 30 | self.api_key = api_key 31 | self.api_secret = api_secret 32 | self.data = {} 33 | self.keys = {} 34 | # websocket.enableTrace(True) 35 | 36 | # Data table size - approcimate tick/min capacity per symbol. 37 | self.MAX_SIZE = 15000 * len(symbols) 38 | self.RECONNECT_TIMEOUT = 10 39 | 40 | self.connect() 41 | 42 | def connect(self): 43 | """ 44 | Args: 45 | None 46 | 47 | Returns: 48 | Starts the websocket in a thread and connects to subscription 49 | channels. 50 | 51 | Raises: 52 | None. 53 | """ 54 | 55 | self.ws = websocket.WebSocketApp( 56 | self.URL, 57 | on_message=lambda ws, msg: self.on_message(ws, msg), 58 | on_error=lambda ws, msg: self.on_error(ws, msg), 59 | on_close=lambda ws: self.on_close(ws), 60 | on_open=lambda ws: self.on_open(ws)) 61 | 62 | thread = Thread( 63 | target=lambda: self.ws.run_forever(), 64 | daemon=True) 65 | thread.start() 66 | self.logger.info("Started websocket daemon.") 67 | 68 | timeout = self.RECONNECT_TIMEOUT 69 | while not self.ws.sock or not self.ws.sock.connected and timeout: 70 | sleep(1) 71 | timeout -= 1 72 | if not timeout: 73 | self.logger.info("Websocket connection timed out.") 74 | # Attempt to reconnect 75 | if not self.ws.sock.connected: 76 | sleep(5) 77 | self.connect() 78 | 79 | def on_message(self, ws, msg): 80 | """ 81 | Handles incoming websocket messages. 82 | 83 | Args: 84 | ws: WebSocketApp object 85 | msg: message object 86 | 87 | Returns: 88 | None. 89 | 90 | Raises: 91 | Exception("Unknown") 92 | """ 93 | 94 | msg = json.loads(msg) 95 | # self.logger.info(json.dumps(msg)) 96 | table = msg['table'] if 'table' in msg else None 97 | action = msg['action'] if 'action' in msg else None 98 | try: 99 | 100 | if 'subscribe' in msg: 101 | self.logger.info( 102 | "Subscribed to " + msg['subscribe'] + ".") 103 | 104 | elif action: 105 | if table not in self.data: 106 | self.data[table] = [] 107 | 108 | if action == 'partial': 109 | self.data[table] = msg['data'] 110 | self.keys[table] = msg['keys'] 111 | 112 | elif action == 'insert': 113 | self.data[table] += msg['data'] 114 | 115 | # Trim data table size when it exceeds MAX_SIZE. 116 | if(table not in ['order', 'orderBookL2'] and 117 | len(self.data[table]) > self.MAX_SIZE): 118 | self.data[table] = self.data[table][self.MAX_SIZE // 2:] 119 | 120 | elif action == 'update': 121 | # Locate the item in the collection and update it. 122 | for updateData in msg['data']: 123 | item = self.find_item_by_keys( 124 | self.keys[table], 125 | self.data[table], 126 | updateData) 127 | if not item: 128 | return # No item found to update. 129 | item.update(updateData) 130 | # Remove cancelled / filled orders. 131 | if table == 'order' and not self.match_leaves_quantity(item): # noqa 132 | self.data[table].remove(item) 133 | 134 | elif action == 'delete': 135 | # Locate the item in the collection and remove it. 136 | for deleteData in msg['data']: 137 | item = self.find_item_by_keys( 138 | self.keys[table], 139 | self.data[table], 140 | deleteData) 141 | self.data[table].remove(item) 142 | else: 143 | if action is not None: 144 | raise Exception("Unknown action: %s" % action) 145 | except Exception: 146 | self.logger.info(traceback.format_exc()) 147 | 148 | def on_open(self, ws): 149 | """ 150 | Invoked when websocket starts. Used to subscribe to channels. 151 | 152 | Args: 153 | ws: WebSocketApp object 154 | 155 | Returns: 156 | None. 157 | 158 | Raises: 159 | None. 160 | """ 161 | 162 | ws.send(self.get_channel_subscription_string()) 163 | 164 | def on_error(self, ws, msg): 165 | """ 166 | Invoked when websocket encounters an error. Will attempt to 167 | reconnect websocket after an error. 168 | 169 | Args: 170 | ws: WebSocketApp object 171 | msg: message object 172 | 173 | Returns: 174 | None. 175 | 176 | Raises: 177 | None. 178 | """ 179 | 180 | self.logger.info("BitMEX websocket error: " + str(msg)) 181 | 182 | # attempt to reconnect if ws is not connected 183 | self.ws = None 184 | self.logger.info("Attempting to reconnect.") 185 | sleep(self.RECONNECT_TIMEOUT) 186 | self.connect() 187 | 188 | def on_close(self, ws): 189 | """ 190 | Invoked when websocket closes. 191 | 192 | Args: 193 | ws: WebSocketApp object 194 | 195 | Returns: 196 | Invoked when websocket closes. 197 | 198 | Raises: 199 | None. 200 | """ 201 | 202 | ws.close() 203 | 204 | def get_orderbook(self): 205 | """ 206 | Returns the L2 orderbook. 207 | 208 | Args: 209 | None. 210 | 211 | Returns: 212 | L2 Orderbook (list). 213 | 214 | Raises: 215 | None. 216 | """ 217 | 218 | return self.data['orderBookL2'] 219 | 220 | def get_ticks(self): 221 | """ 222 | Returns ticks for the recent minute. 223 | 224 | Args: 225 | None. 226 | 227 | Returns: 228 | Ticks (list) 229 | 230 | Raises: 231 | None. 232 | """ 233 | 234 | return self.data['trade'] 235 | 236 | def find_item_by_keys(self, keys, table, match_data): 237 | """ 238 | Finds an item in the data table using the provided key. 239 | 240 | Args: 241 | keys: key array object 242 | table: data table object 243 | match_data: key to match 244 | 245 | Returns: 246 | item: matched item. 247 | 248 | Raises: 249 | None. 250 | """ 251 | 252 | for item in table: 253 | matched = True 254 | for key in keys: 255 | if item[key] != match_data[key]: 256 | matched = False 257 | if matched: 258 | return item 259 | 260 | def get_channel_subscription_string(self): 261 | """ 262 | Returns websocket channel subscription string. 263 | 264 | Args: 265 | None. 266 | 267 | Returns: 268 | Subscription payload (string) for all symbols and channels. 269 | 270 | Raises: 271 | None. 272 | """ 273 | 274 | prefix = '{"op": "subscribe", "args": [' 275 | suffix = ']}' 276 | string = "" 277 | 278 | count = 0 279 | for symbol in self.symbols: 280 | for channel in self.channels: 281 | string += '"' + channel + ':' + str(symbol) + '"' 282 | count += 1 283 | if count < len(self.channels) * len(self.symbols): 284 | string += ", " 285 | return prefix + string + suffix 286 | 287 | def match_leaves_quantity(self, o): 288 | """ 289 | Args: 290 | o: item to match 291 | Returns: 292 | True if o['leavesQty'] is zero, False if > 0 293 | 294 | Raises: 295 | None. 296 | """ 297 | if o['leavesQty'] is None: 298 | return True 299 | return o['leavesQty'] > 0 300 | -------------------------------------------------------------------------------- /broker.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from messaging_clients import Telegram 13 | from event_types import FillEvent 14 | from threading import Thread 15 | from time import sleep 16 | import traceback 17 | import datetime 18 | import json 19 | import sys 20 | 21 | 22 | class Broker: 23 | """ 24 | Broker consumes Order events, executes orders, then creates and places 25 | Fill events in the main event queue post-transaction. 26 | """ 27 | 28 | def __init__(self, exchanges, logger, portfolio, db_other, db_client, 29 | live_trading, telegram): 30 | self.exchanges = {i.get_name(): i for i in exchanges} 31 | self.logger = logger 32 | self.pf = portfolio 33 | self.db_other = db_other 34 | self.db_client = db_client 35 | self.live_trading = live_trading 36 | self.tg = telegram 37 | 38 | # Container for order batches {trade_id: [order objects]}. 39 | self.orders = {} 40 | 41 | # Start FillAgent. 42 | self.fill_agent = FillAgent(self.logger, self.pf, self.exchanges) 43 | 44 | def new_order(self, events, order_event): 45 | """ 46 | Process and store incoming order events. 47 | 48 | Args: 49 | events: event queue object. 50 | event: new market event. 51 | 52 | Returns: 53 | None. 54 | 55 | Raises: 56 | None. 57 | """ 58 | 59 | new_order = order_event.get_order_dict() 60 | 61 | # Store incoming orders under trade ID {trade_id: [orders]} 62 | try: 63 | self.orders[new_order['trade_id']].append(new_order) 64 | 65 | except KeyError: 66 | self.orders[new_order['trade_id']] = [new_order] 67 | # traceback.print_exc() 68 | 69 | def check_consent(self, events): 70 | """ 71 | Place orders if all orders present and user accepts pending trades. 72 | 73 | Args: 74 | events: event queue object. 75 | 76 | Returns: 77 | None. 78 | 79 | Raises: 80 | None. 81 | """ 82 | 83 | if self.orders.keys(): 84 | 85 | to_remove = [] 86 | 87 | for trade_id in self.orders.keys(): 88 | 89 | # Action user responses from telegram, if any 90 | self.register_telegram_responses(trade_id) 91 | 92 | # Get stored trade state from DB 93 | trade = dict(self.db_other['trades'].find_one({"trade_id": trade_id}, {"_id": 0})) 94 | 95 | # Count received orders for that trade 96 | order_count = len(self.orders[trade_id]) 97 | venue = self.orders[trade_id][0]['venue'] 98 | 99 | # User has accepted the trade. 100 | if trade['consent'] is True: 101 | if order_count == trade['order_count']: 102 | self.logger.info( 103 | "Trade " + str(trade_id) + " order batch ready.") 104 | 105 | # Place orders. 106 | order_confs = self.exchanges[venue].place_bulk_orders( 107 | self.orders[trade_id]) 108 | 109 | # Update portfolio state with order placement details. 110 | if order_confs: 111 | self.pf.new_order_conf(order_confs, events) 112 | self.logger.info("Orders for trade " + str(trade_id) + " submitted to venue.") 113 | 114 | else: 115 | self.logger.info("Order submission for " + str(trade_id) + " may have failed or only partially succeeded.") 116 | # raise Exception("Caution: manual order and position check required for trade " + str(trade_id) + ".") 117 | 118 | to_remove.append(trade_id) 119 | 120 | else: 121 | self.logger.info("Order batch for trade " + str(trade_id) + " not yet ready.") 122 | 123 | # User has not yet made a decision. 124 | elif trade['consent'] is None: 125 | self.logger.info("Trade " + str(trade_id) + " awaiting user review.") 126 | 127 | # User has rejected the trade. 128 | elif trade['consent'] is False: 129 | self.pf.trade_complete(trade_id) 130 | to_remove.append(trade_id) 131 | 132 | # Unkown consent case 133 | else: 134 | raise Exception("Unknown case for trade consent:", trade['consent']) 135 | 136 | # Remove sent orders after iteration complete. 137 | for t_id in to_remove: 138 | del self.orders[t_id] 139 | 140 | else: 141 | pass 142 | self.logger.info("No trades awaiting review.") 143 | 144 | def check_overdue_trades(self): 145 | """ 146 | Check for trades that have not been accepted by user and dont have pending orders with Broker. 147 | This may occur if system crashes and resumes before user accepts or vetos pending trades. 148 | 149 | Args: 150 | None 151 | 152 | Returns: 153 | None. 154 | 155 | Raises: 156 | None. 157 | """ 158 | pass 159 | 160 | def register_telegram_responses(self, trade_id): 161 | """ 162 | Check telegram messages to determine acceptance/veto of trade. 163 | 164 | Update DB to reflect users choice. 165 | 166 | Args: 167 | trade_id: id of trade to check for 168 | 169 | Returns: 170 | None. 171 | 172 | Raises: 173 | None. 174 | """ 175 | 176 | for response in self.tg.get_updates(): 177 | 178 | u_id = None 179 | msg_type = None 180 | t_id = str(trade_id) 181 | 182 | # Message field may be 'message' or 'edited_message' 183 | try: 184 | u_id = str(response['message']['from']['id']) 185 | msg_type = 'message' 186 | except KeyError: 187 | u_id = str(response['edited_message']['from']['id']) 188 | msg_type = 'edited_message' 189 | 190 | # Response must have came from a whitelisted account. 191 | try: 192 | if u_id in self.tg.whitelist: 193 | 194 | # Response ID must match trade ID. 195 | if str(response[msg_type]['text'][:len(t_id)]) == t_id: 196 | 197 | # Response timestamp must be greater than signal trigger time. 198 | trade_ts = self.db_other['trades'].find_one({"trade_id": trade_id})['signal_timestamp'] 199 | response_ts = response[msg_type]['date'] 200 | if response_ts > trade_ts: 201 | 202 | try: 203 | decision = response[msg_type]['text'].split(" - ", 1) 204 | if decision[1] == "Accept": 205 | self.db_other['trades'].update_one({"trade_id": trade_id}, {"$set": {"consent": True}}) 206 | self.pf.pf['trades'][t_id]['consent'] = True 207 | 208 | elif decision[1] == "Veto": 209 | self.db_other['trades'].update_one({"trade_id": trade_id}, {"$set": {"consent": False}}) 210 | self.pf.pf['trades'][t_id]['consent'] = False 211 | 212 | else: 213 | self.logger.info("Unknown input received as response to trade " + t_id + " consent message: " + decision[1]) 214 | 215 | except Exception: 216 | traceback.print_exc() 217 | 218 | # Unexpected response format in updates 219 | except Exception: 220 | traceback.print_exc() 221 | print(json.dumps(response)) 222 | 223 | def check_fills(self, events): 224 | """ 225 | Check orders have been filled by comparing portfolio and venue order 226 | states. Create fill events when orders have been filled. 227 | """ 228 | 229 | if self.fill_agent.fills: 230 | for fill_event in self.fill_agent.fills: 231 | events.put(fill_event) 232 | 233 | self.fill_agent.fills = [] 234 | self.logger.info("Parsing order fill messages.") 235 | 236 | return events 237 | 238 | 239 | class FillAgent: 240 | """ 241 | Check for new fills in separate thread on specified intervals/conditions. 242 | """ 243 | 244 | # Check for fills on the (60 - CHECK_INTERVAL)th second of each minute. 245 | CHECK_INTERVAL = 25 246 | 247 | def __init__(self, logger, portfolio, exchanges): 248 | self.logger = logger 249 | self.pf = portfolio.load_portfolio() 250 | self.exchanges = exchanges 251 | 252 | self.fills = [] 253 | 254 | thread = Thread(target=lambda: self.start(portfolio), daemon=True) 255 | thread.start() 256 | 257 | self.logger.info("Started FillAgent.") 258 | 259 | def start(self, portfolio): 260 | """ 261 | """ 262 | 263 | sleep(self.seconds_til_next_minute()) 264 | 265 | while True: 266 | sleep(60 - self.CHECK_INTERVAL) 267 | 268 | self.pf = portfolio.load_portfolio() 269 | 270 | # Get snapshot of orders saved locally. 271 | active_venues = set() 272 | portfolio_order_snapshot = [] 273 | for t_id in self.pf['trades'].keys(): 274 | if self.pf['trades'][t_id]['active']: 275 | 276 | active_venues.add(self.pf['trades'][t_id]['venue']) 277 | 278 | for o_id in self.pf['trades'][t_id]['orders'].keys(): 279 | portfolio_order_snapshot.append(( 280 | # (v_id, o_id, status, venue name) 281 | self.pf['trades'][t_id]['orders'][o_id][ 282 | 'venue_id'], 283 | o_id, 284 | self.pf['trades'][t_id]['orders'][o_id]['status'], 285 | self.pf['trades'][t_id]['orders'][o_id]['venue'])) 286 | 287 | # Get orders from all venues with active trades. 288 | orders = [] 289 | for venue in list(active_venues): 290 | orders = orders + self.exchanges[venue].get_orders() 291 | 292 | # Snapshot actual order state. 293 | actual_order_snapshot = [] 294 | for order in portfolio_order_snapshot: 295 | for conf in orders: 296 | if conf['venue_id'] == order[0]: 297 | actual_order_snapshot.append(( 298 | conf['venue_id'], 299 | conf['order_id'], 300 | conf['status'], 301 | conf)) 302 | 303 | # Compare actual order state to local portfolio state. 304 | for port, actual in zip( 305 | portfolio_order_snapshot, actual_order_snapshot): 306 | if port[0] == actual[0]: 307 | if port[2] != actual[2]: 308 | 309 | # Order has been filled or cancelled. 310 | if ( 311 | actual[2] == "FILLED" or actual[2] == "PARTIAL" 312 | or actual[2] == "CANCELLED"): 313 | 314 | # Derive the trade ID from order id. 315 | fill_conf = actual[3] 316 | fill_conf['trade_id'] = actual[1].partition("-")[0] 317 | 318 | # Store the new fill event. 319 | self.fills.append(FillEvent(fill_conf)) 320 | 321 | else: 322 | # Something wrong with code if status is wrong. 323 | raise Exception( 324 | "Order status code error:", actual[2]) 325 | 326 | else: 327 | # Something critically wrong if theres a missing venue ID. 328 | raise Exception("Order ID mistmatch. \nPortfolio v_id:", 329 | port[0], "Actual v_id:", actual[0]) 330 | 331 | # Wait til next minute elapses. 332 | sleep(self.seconds_til_next_minute()) 333 | 334 | def seconds_til_next_minute(self): 335 | now = datetime.datetime.utcnow().second 336 | delay = 60 - now 337 | return delay 338 | -------------------------------------------------------------------------------- /data.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from event_types import MarketEvent 13 | from itertools import groupby, count 14 | from pymongo import MongoClient, errors 15 | from itertools import groupby, count 16 | from event_types import MarketEvent 17 | import pymongo 18 | import queue 19 | import time 20 | import json 21 | 22 | 23 | class Datahandler: 24 | """ 25 | Datahandler wraps exchange data and locally stored data with Market 26 | events and adds it to the event queue as each timeframe period elapses. 27 | 28 | Market events are created from either live or stored data (depending on 29 | if backtesting or live trading) and pushed to the event queue for the 30 | Strategy object to consume. 31 | """ 32 | 33 | def __init__(self, exchanges, logger, db, db_client): 34 | self.exchanges = exchanges 35 | self.logger = logger 36 | self.db = db 37 | self.db_client = db_client 38 | self.db_collections = { 39 | i.get_name(): db[i.get_name()] for i in self.exchanges} 40 | self.live_trading = False 41 | self.ready = False 42 | self.total_instruments = self.get_total_instruments() 43 | self.bars_save_to_db = queue.Queue(0) 44 | 45 | # Data processing performance tracking variables. 46 | self.parse_count = 0 47 | self.total_parse_time = 0 48 | self.mean_parse_time = 0 49 | self.std_dev_parse_time = 0 50 | self.var_parse_time = 0 51 | 52 | def update_market_data(self, events): 53 | """ 54 | Pushes new market events to the event queue. 55 | 56 | Args: 57 | events: empty event queue object. 58 | Returns: 59 | events: event queue object filled with new market events. 60 | Raises: 61 | None. 62 | """ 63 | 64 | if self.live_trading: 65 | market_data = self.get_new_data() 66 | 67 | else: 68 | market_data = self.get_historic_data() 69 | 70 | for event in market_data: 71 | events.put(event) 72 | 73 | return events 74 | 75 | def get_new_data(self): 76 | """ 77 | Return a list of market events (new bars) for all symbols from 78 | all exchanges for the just-elapsed time period. Add new bar data 79 | to queue for storage in DB, after current minutes cycle completes. 80 | 81 | Logs parse time for tick processing. 82 | 83 | Args: 84 | None. 85 | Returns: 86 | new_market_events: list containing new market events. 87 | Raises: 88 | None. 89 | """ 90 | 91 | # Record tick parse performance. 92 | self.logger.info("Started parsing new ticks.") 93 | start_parse = time.time() 94 | for exchange in self.exchanges: 95 | exchange.parse_ticks() 96 | end_parse = time.time() 97 | duration = round(end_parse - start_parse, 5) 98 | 99 | self.logger.info( 100 | "Parsed " + str(self.total_instruments) + 101 | " instruments' ticks in " + str(duration) + " seconds.") 102 | self.track_tick_processing_performance(duration) 103 | 104 | # Wrap new 1 min bars in market events. 105 | new_market_events = [] 106 | for exchange in self.exchanges: 107 | bars = exchange.get_new_bars() 108 | 109 | for symbol in exchange.get_symbols(): 110 | 111 | for bar in bars[symbol]: 112 | event = MarketEvent(exchange, bar) 113 | new_market_events.append(event) 114 | 115 | # Add bars to save-to-db-later queue. 116 | # TODO: store bars concurrently in a separate process. 117 | self.bars_save_to_db.put(event) 118 | 119 | return new_market_events 120 | 121 | def track_tick_processing_performance(self, duration): 122 | """ 123 | Track tick processing time statistics. 124 | 125 | Args: 126 | duration: (float) seconds taken to process events. 127 | 128 | Returns: 129 | None. 130 | 131 | Raises: 132 | None. 133 | """ 134 | 135 | self.parse_count += 1 136 | self.total_parse_time += duration 137 | self.mean_parse_time = self.total_parse_time / self.parse_count 138 | 139 | def run_data_diagnostics(self, output): 140 | """ 141 | Check each symbol's stored data for completeness, repair/replace 142 | missing data as needed. Once complete, set ready flag to True. 143 | 144 | Args: 145 | output: if True, print verbose report. If false, do not print. 146 | Returns: 147 | None. 148 | Raises: 149 | None. 150 | """ 151 | 152 | # Get a status report for each symbols stored data. 153 | reports = [] 154 | if output: 155 | self.logger.info("Started data diagnostics.") 156 | for exchange in self.exchanges: 157 | for symbol in exchange.get_symbols(): 158 | reports.append(self.data_status_report( 159 | exchange, symbol, output)) 160 | 161 | # TODO: oll different venues simultaneously with a processpool 162 | 163 | # Resolve discrepancies in stored data. 164 | if output: 165 | self.logger.info("Resolving missing data.") 166 | 167 | for report in reports: 168 | self.backfill_gaps(report) 169 | self.replace_null_bars(report) 170 | 171 | if output: 172 | self.logger.info("Data diagnostics complete.") 173 | self.ready = True 174 | 175 | def save_new_bars_to_db(self): 176 | """ 177 | Save bars in storage queue to database. 178 | 179 | Args: 180 | None. 181 | Returns: 182 | None. 183 | Raises: 184 | pymongo.errors.DuplicateKeyError. 185 | """ 186 | 187 | count = 0 188 | while True: 189 | 190 | try: 191 | bar = self.bars_save_to_db.get(False) 192 | 193 | except queue.Empty: 194 | self.logger.info( 195 | "Wrote " + str(count) + " new bars to database " + 196 | str(self.db.name) + ".") 197 | break 198 | 199 | else: 200 | if bar is not None: 201 | count += 1 202 | # store bar in relevant db collection 203 | try: 204 | self.db_collections[ 205 | bar.exchange.get_name()].insert_one(bar.get_bar()) 206 | 207 | # Skip duplicates if they exist. 208 | except pymongo.errors.DuplicateKeyError: 209 | continue 210 | 211 | self.bars_save_to_db.task_done() 212 | 213 | def data_status_report(self, exchange, symbol, output=False): 214 | """ 215 | Create a stored data completness report for the given instrment. 216 | 217 | Args: 218 | exchange: exchange object. 219 | symbol: instrument ticker code (string) 220 | output: if True, print verbose report. If false, do not print. 221 | 222 | Returns: 223 | report: dict showing state and completeness of given symbols 224 | stored data. Contains pertinent timestamps, periods of missing bars 225 | and other relevant info. 226 | 227 | Raises: 228 | None. 229 | """ 230 | current_ts = exchange.previous_minute() 231 | max_bin_size = exchange.get_max_bin_size() 232 | result = self.db_collections[exchange.get_name()].find( 233 | {"symbol": symbol}).sort([("timestamp", pymongo.ASCENDING)]) 234 | total_stored = ( 235 | self.db_collections[exchange.get_name()].count_documents({ 236 | "symbol": symbol})) 237 | origin_ts = exchange.get_origin_timestamp(symbol) 238 | # print('Origin ts', symbol, str(origin_ts)) 239 | 240 | # Handle case where there is no existing data (e.g fresh DB). 241 | if total_stored == 0: 242 | oldest_ts = current_ts 243 | newest_ts = current_ts 244 | else: 245 | oldest_ts = result[total_stored - 1]['timestamp'] 246 | newest_ts = result[0]['timestamp'] 247 | 248 | # Make timestamps sort-agnostic, in case of sorting mixups. 249 | if oldest_ts > newest_ts: 250 | oldest_ts, newest_ts = newest_ts, oldest_ts 251 | 252 | # Find gaps (missing bars) in stored data. 253 | actual = {doc['timestamp'] for doc in result} 254 | required = {i for i in range(origin_ts, current_ts + 60, 60)} 255 | gaps = required.difference(actual) 256 | 257 | # Find bars with all null values (if ws drop out, or no trades). 258 | result = self.db_collections[exchange.get_name()].find({"$and": [ 259 | {"symbol": symbol}, 260 | {"high": None}, 261 | {"low": None}, 262 | {"open": None}, 263 | {"close": None}, 264 | {"volume": 0}]}) 265 | null_bars = [doc['timestamp'] for doc in result] 266 | 267 | if output: 268 | self.logger.info( 269 | "Exchange & instrument:......" + 270 | exchange.get_name() + ":" + str(symbol)) 271 | self.logger.info( 272 | "Total required bars:........" + str(len(required))) 273 | self.logger.info( 274 | "Total locally stored bars:.." + str(total_stored)) 275 | self.logger.info( 276 | "Total null-value bars:......" + str(len(null_bars))) 277 | self.logger.info( 278 | "Total missing bars:........." + str(len(gaps))) 279 | 280 | return { 281 | "exchange": exchange, 282 | "symbol": symbol, 283 | "origin_ts": origin_ts, 284 | "oldest_ts": oldest_ts, 285 | "newest_ts": newest_ts, 286 | "current_ts": current_ts, 287 | "max_bin_size": max_bin_size, 288 | "total_stored": total_stored, 289 | "total_needed": len(required), 290 | "gaps": list(gaps), 291 | "null_bars": null_bars} 292 | 293 | def backfill_gaps(self, report): 294 | """ 295 | Get and store small bins of missing bars. Intended to be called 296 | as a data QA measure for patching missing locally saved data incurred 297 | from server downtime. 298 | 299 | Args: 300 | exchange: exchange object. 301 | symbol: instrument ticker code (string) 302 | output: if True, print verbose report. If false, do not print. 303 | 304 | Returns: 305 | report: dict showing state and completeness of given symbols 306 | stored data. Contains pertinent timestamps, periods of missing bars 307 | and other relevant info. 308 | 309 | Raises: 310 | Polling timeout error. 311 | pymongo.errors.DuplicateKeyError. 312 | Timestamp mismatch error. 313 | """ 314 | 315 | # Sort timestamps into sequential bins (to reduce # of polls). 316 | poll_count = 1 317 | if len(report['gaps']) != 0: 318 | bins = [ 319 | list(g) for k, g in groupby( 320 | sorted(report['gaps']), 321 | key=lambda n, c=count(0, 60): n - next(c))] 322 | 323 | # If any bins > max_bin_size, split them into smaller bins. 324 | bins = self.split_oversize_bins(bins, report['max_bin_size']) 325 | 326 | total_polls = str(len(bins)) 327 | 328 | delay = 1.5 # Wait time before attempting re-poll after error. 329 | stagger = 2 # Stagger request polls, increment failed polls. 330 | timeout = 10 # No. of times to repoll before exception raised. 331 | 332 | # Poll venue API for replacement bars. 333 | bars_to_store = [] 334 | for i in bins: 335 | # Progress indicator. 336 | if poll_count: 337 | self.logger.info( 338 | "Poll " + str( 339 | poll_count) + " of " + total_polls + " " + 340 | str(report['symbol']) + " " + str( 341 | report['exchange'].get_name())) 342 | try: 343 | bars = report['exchange'].get_bars_in_period( 344 | report['symbol'], i[0], len(i)) 345 | for bar in bars: 346 | bars_to_store.append(bar) 347 | # Reset stagger to base after successful poll. 348 | stagger = 2 349 | time.sleep(stagger + 0.3) 350 | 351 | except Exception as e: 352 | # Retry polling with an exponential delay. 353 | 354 | for i in range(timeout): 355 | 356 | try: 357 | time.sleep(delay + 1) 358 | bars = report['exchange'].get_bars_in_period( 359 | report['symbol'], i[0], len(i)) 360 | for bar in bars: 361 | bars_to_store.append(bar) 362 | stagger = 2 363 | break 364 | 365 | except Exception as e: 366 | delay *= stagger 367 | if i == timeout - 1: 368 | raise Exception("Polling timeout.") 369 | poll_count += 1 370 | 371 | # Sanity check, check that the retreived bars match gaps. 372 | self.logger.info("Verifying new data...") 373 | timestamps = [i['timestamp'] for i in bars_to_store] 374 | timestamps = sorted(timestamps) 375 | bars = sorted(report['gaps']) 376 | 377 | if timestamps == bars: 378 | query = {"symbol": report['symbol']} 379 | doc_count_before = ( 380 | self.db_collections[report[ 381 | 'exchange'].get_name()].count_documents(query)) 382 | 383 | self.logger.info("Storing new data...") 384 | for bar in bars_to_store: 385 | try: 386 | self.db_collections[ 387 | report['exchange'].get_name()].insert_one(bar) 388 | except pymongo.errors.DuplicateKeyError: 389 | # Skip duplicates that exist in DB. 390 | self.logger.info( 391 | "Stored duplicate bars exist. Skipping.") 392 | continue 393 | 394 | doc_count_after = ( 395 | self.db_collections[report[ 396 | 'exchange'].get_name()].count_documents(query)) 397 | 398 | doc_count = doc_count_after - doc_count_before 399 | 400 | self.logger.info( 401 | "Saved " + str(doc_count) + " missing " + 402 | report['symbol'] + " bars.") 403 | return True 404 | 405 | else: 406 | # Dump the mismatched bars and timestamps to file if error. 407 | with open("bars.json", 'w', encoding='utf-8') as f1: 408 | json.dump(bars, f, ensure_ascii=False, indent=4) 409 | with open("timestamps.json", 'w', encoding='utf-8') as f2: 410 | json.dump(timestamps, f, ensure_ascii=False, indent=4) 411 | 412 | raise Exception( 413 | "Fetched bars do not match missing timestamps.") 414 | else: 415 | # Return false if there is no missing data. 416 | self.logger.info("No missing data.") 417 | return False 418 | 419 | def split_oversize_bins(self, original_bins, max_bin_size): 420 | """ 421 | Splits oversize lists into smaller lists. 422 | 423 | Args: 424 | original_bins: list of lists (timestamps in bins) 425 | max_bin_size: int, maximum items per api respons (bin). 426 | 427 | Returns: 428 | bins: list of lists (timestamps in bins) containing 429 | the timestamps from orignal_bins, but split into bins 430 | not larger than max_bin_size. 431 | 432 | Raises: 433 | None. 434 | """ 435 | 436 | bins = original_bins 437 | 438 | # Identify oversize bins and their positions in original list. 439 | to_split = [] 440 | indices_to_remove = [] 441 | for i in bins: 442 | if len(i) > max_bin_size: 443 | # Save the bins. 444 | to_split.append(bins.index(i)) 445 | # Save the indices. 446 | indices_to_remove.append(bins.index(i)) 447 | 448 | # Split into smaller bins. 449 | split_bins = [] 450 | for i in to_split: 451 | new_bins = [(bins[i])[x:x+max_bin_size] for x in range( 452 | 0, len((bins[i])), max_bin_size)] 453 | split_bins.append(new_bins) 454 | 455 | final_bins = [] 456 | for i in split_bins: 457 | for j in i: 458 | final_bins.append(j) 459 | 460 | # Remove the oversize bins by their indices, add the smaller split bins 461 | for i in indices_to_remove: 462 | del bins[i] 463 | 464 | for i in final_bins: 465 | bins.append(i) 466 | 467 | return bins 468 | 469 | def replace_null_bars(self, report): 470 | """ 471 | Replace null bars in db with newly fetched ones. Null bar means 472 | all OHLCV values are None or zero. 473 | 474 | Args: 475 | report: dict showing state and completeness of given symbols 476 | stored data. Contains pertinent timestamps, periods of missing bars 477 | and other relevant info. 478 | 479 | Returns: 480 | True if all null bars are successfully replaces, False if not. 481 | 482 | Raises: 483 | Polling timeout error. 484 | pymongo.errors.DuplicateKeyError. 485 | Timestamp mismatch error. 486 | """ 487 | 488 | if len(report['null_bars']) != 0: 489 | # sort timestamps into sequential bins (to reduce polls) 490 | bins = [ 491 | list(g) for k, g in groupby( 492 | sorted(report['null_bars']), 493 | key=lambda n, c=count(0, 60): n - next(c))] 494 | 495 | delay = 1 # wait time before attmepting to re-poll after error 496 | stagger = 2 # delay co-efficient 497 | timeout = 10 # number of times to repoll before exception raised. 498 | 499 | # poll exchange REST endpoint for missing bars 500 | bars_to_store = [] 501 | for i in bins: 502 | try: 503 | bars = report['exchange'].get_bars_in_period( 504 | report['symbol'], i[0], len(i)) 505 | for bar in bars: 506 | bars_to_store.append(bar) 507 | stagger = 2 # reset stagger to base after successful poll 508 | time.sleep(stagger) 509 | except Exception as e: 510 | # retry poll with an exponential delay after each error 511 | for i in range(timeout): 512 | try: 513 | time.sleep(delay) 514 | bars = report['exchange'].get_bars_in_period( 515 | report['symbol'], i[0], len(i)) 516 | for bar in bars: 517 | bars_to_store.append(bar) 518 | stagger = 2 519 | break 520 | except Exception as e: 521 | delay *= stagger 522 | if i == timeout - 1: 523 | raise Exception("Polling timeout.") 524 | 525 | # sanity check, check that the retreived bars match gaps 526 | timestamps = [i['timestamp'] for i in bars_to_store] 527 | timestamps = sorted(timestamps) 528 | bars = sorted(report['null_bars']) 529 | if timestamps == bars: 530 | doc_count = 0 531 | for bar in bars_to_store: 532 | try: 533 | query = {"$and": [ 534 | {"symbol": bar['symbol']}, 535 | {"timestamp": bar['timestamp']}]} 536 | new_values = {"$set": { 537 | "open": bar['open'], 538 | "high": bar['high'], 539 | "low": bar['low'], 540 | "close": bar['close'], 541 | "volume": bar['volume']}} 542 | self.db_collections[ 543 | report['exchange'].get_name()].update_one( 544 | query, new_values) 545 | doc_count += 1 546 | except pymongo.errors.DuplicateKeyError: 547 | continue # skip duplicates if they exist 548 | doc_count_after = ( 549 | self.db_collections[report[ 550 | 'exchange'].get_name()].count_documents( 551 | {"symbol": report['symbol']})) 552 | self.logger.info( 553 | "Replaced " + str(doc_count) + " " + report['symbol'] + 554 | " null bars.") 555 | return True 556 | else: 557 | raise Exception( 558 | "Fetched bars do not match missing timestamps.") 559 | self.logger.info( 560 | "Bars length: " + str(len(bars)) + 561 | " Timestamps length: " + str(len(timestamps))) 562 | else: 563 | return False 564 | 565 | def split_oversize_bins(self, original_bins, max_bin_size): 566 | """Given a list of lists (timestamp bins), if any top-level 567 | element length > max_bin_size, split that element into 568 | lists of max_bin_size, remove original element, replace with 569 | new smaller elements, then return the new modified list.""" 570 | 571 | bins = original_bins 572 | 573 | # Identify oversize bins and their positions in original list. 574 | to_split = [] 575 | indices_to_remove = [] 576 | for i in bins: 577 | if len(i) > max_bin_size: 578 | # Save the bins. 579 | to_split.append(bins.index(i)) 580 | # Save the indices. 581 | indices_to_remove.append(bins.index(i)) 582 | 583 | # split into smaller bins 584 | split_bins = [] 585 | for i in to_split: 586 | new_bins = [(bins[i])[x:x+max_bin_size] for x in range( 587 | 0, len((bins[i])), max_bin_size)] 588 | split_bins.append(new_bins) 589 | 590 | final_bins = [] 591 | for i in split_bins: 592 | for j in i: 593 | final_bins.append(j) 594 | 595 | # Remove the oversize bins by their indices, add the smaller split bins 596 | for i in indices_to_remove: 597 | del bins[i] 598 | 599 | for i in final_bins: 600 | bins.append(i) 601 | 602 | return bins 603 | 604 | def get_total_instruments(self): 605 | """ 606 | Return total number of monitored instruments. 607 | 608 | Args: 609 | None. 610 | Returns: 611 | total: int, all instruments grand total. 612 | Raises: 613 | None. 614 | """ 615 | 616 | total = 0 617 | for exchange in self.exchanges: 618 | total += len(exchange.symbols) 619 | 620 | return total 621 | 622 | def get_instrument_symbols(self): 623 | """ 624 | Return a list containing all instrument symbols. 625 | 626 | Args: 627 | None. 628 | Returns: 629 | instruments: list of all instruments ticker codes. 630 | Raises: 631 | None. 632 | """ 633 | instruments = [] 634 | for exchange in self.exchanges: 635 | for symbol in exchange.get_symbols(): 636 | instruments.append( 637 | exchange.get_name() + "-" + symbol) 638 | 639 | return instruments 640 | -------------------------------------------------------------------------------- /event_types.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from dateutil import parser 13 | from datetime import datetime 14 | 15 | 16 | class Event(object): 17 | """ 18 | Base class for system events. 19 | """ 20 | 21 | 22 | class MarketEvent(Event): 23 | """ 24 | Wrapper for new market data. Consumed by Strategy object to 25 | produce Signal events. 26 | """ 27 | 28 | # Datetime object format string 29 | DTFMT = '%Y-%m-%d %H:%M' 30 | 31 | def __init__(self, exchange, bar): 32 | self.type = 'MARKET' 33 | self.exchange = exchange 34 | self.bar = bar 35 | 36 | def __str__(self): 37 | return str("MarketEvent - Exchange: " + self.exchange.get_name() + 38 | " Symbol: " + self.bar['symbol'] + " TS: " + 39 | self.get_datetime() + " Close: " + self.bar['close']) 40 | 41 | def get_bar(self): 42 | return self.bar 43 | 44 | def get_exchange(self): 45 | return self.exchange 46 | 47 | def get_datetime(self): 48 | return datetime.fromtimestamp( 49 | self.bar['timestamp']).strftime(self.DTFMT), 50 | 51 | 52 | class SignalEvent(Event): 53 | """ 54 | Entry signal. Consumed by Portfolio to produce Order events. 55 | """ 56 | 57 | def __init__(self, symbol: str, entry_ts, direction: str, timeframe: str, 58 | strategy: str, venue, entry_price: float, entry_type: str, 59 | targets: list, stop_price: float, void_price: float, 60 | trail: bool, note: str, dataset, ic=1): 61 | 62 | self.type = 'SIGNAL' 63 | self.entry_ts = entry_ts # Entry bar timestamp. 64 | self.timeframe = timeframe # Signal timeframe. 65 | self.strategy = strategy # Signal strategy name. 66 | self.venue = venue # Signal venue name. 67 | self.symbol = symbol # Ticker code for instrument. 68 | self.direction = direction # LONG or SHORT. 69 | self.entry_price = entry_price # Trade entry price. 70 | self.entry_type = entry_type.upper() # Order type for entry. 71 | self.targets = targets # [(price target, int % to close)] 72 | self.stop_price = stop_price # Stop-loss order price. 73 | self.void_price = void_price # Invalidation price. 74 | self.instrument_count = ic # # of instruments in use. 75 | self.trail = trail # True or False for trailing stop. 76 | self.op_data = dataset # Dataset used to generate signal. 77 | self.note = note # Signal notes. 78 | 79 | def __str__(self): 80 | return str("Signal Event: " + self.direction + " Symbol: " + 81 | self.symbol + " Entry price: " + str(self.entry_price) + 82 | " Entry timestamp: " + str(self.entry_ts) + " Timeframe: " + 83 | self.timeframe + " Strategy: " + self.strategy + 84 | " Venue: " + self.venue.get_name() + " Order type: " + 85 | self.entry_type + " Note: " + self.note) 86 | 87 | def get_signal_dict(self): 88 | return { 89 | 'strategy': self.strategy, 90 | 'venue': self.venue.get_name(), 91 | 'symbol': self.symbol, 92 | 'entry_timestamp': self.entry_ts, 93 | 'timeframe': self.timeframe, 94 | 'direction': self.direction, 95 | 'entry_price': self.entry_price, 96 | 'entry_type': self.entry_type, 97 | 'targets': self.targets, 98 | 'stop_price': self.stop_price, 99 | 'void_price': self.void_price, 100 | 'instrument_count': self.instrument_count, 101 | 'trail': self.trail, 102 | 'note': self.note, 103 | 'op_data': self.op_data} 104 | 105 | def inverse_direction(self): 106 | """ 107 | Return the opposite direction of 'direction' variable. 108 | """ 109 | 110 | if self.direction.upper() == "LONG": 111 | return "SHORT" 112 | elif self.direction.upper() == "SHORT": 113 | return "LONG" 114 | 115 | 116 | class OrderEvent(Event): 117 | """ 118 | Contains trade details to be sent to a broker/exchange. 119 | """ 120 | 121 | def __init__(self, order_dict): 122 | self.type = 'ORDER' 123 | self.order_dict = order_dict 124 | self.trade_id = order_dict['trade_id'] 125 | self.order_id = order_dict['order_id'] 126 | self.timestamp = order_dict['timestamp'] 127 | self.avg_fill_price = order_dict['avg_fill_price'] 128 | self.currency = order_dict['currency'] 129 | self.venue_id = order_dict['venue_id'] 130 | self.direction = order_dict['direction'] 131 | self.size = order_dict['size'] 132 | self.price = order_dict['price'] 133 | self.order_type = order_dict['order_type'] 134 | self.metatype = order_dict['metatype'] 135 | self.void_price = order_dict['void_price'] 136 | self.trail = order_dict['trail'] 137 | self.reduce_only = order_dict['reduce_only'] 138 | self.post_only = order_dict['post_only'] 139 | self.batch_size = order_dict['batch_size'] 140 | self.status = order_dict['status'] 141 | 142 | def __str__(self): 143 | return str(" ") 144 | 145 | def get_order_dict(self): 146 | return self.order_dict 147 | 148 | 149 | class FillEvent(Event): 150 | """ 151 | Holds transaction data including fees/commissions, slippage, brokerage, 152 | actual fill price, timestamp, etc. 153 | """ 154 | 155 | def __init__(self, order_conf): 156 | self.type = 'FILL' 157 | self.order_conf = order_conf 158 | 159 | # TODO 160 | self.fees = None 161 | 162 | def __str__(self): 163 | return str(" ") 164 | 165 | def get_order_conf(self): 166 | return self.order_conf 167 | -------------------------------------------------------------------------------- /exchange.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from abc import ABC, abstractmethod 13 | from datetime import datetime, timedelta 14 | import os 15 | 16 | 17 | class Exchange(ABC): 18 | """ 19 | Exchange abstract class, concrete brokers/exchange classes to inherit this. 20 | """ 21 | 22 | def __init__(self): 23 | pass 24 | 25 | def get_new_bars(self): 26 | """ 27 | Args: 28 | None. 29 | 30 | Returns: 31 | self.bars[symbol] tree (dict). 32 | 33 | Raises: 34 | None. 35 | """ 36 | 37 | return self.bars 38 | 39 | def get_max_bin_size(self): 40 | """ 41 | Args: 42 | None. 43 | 44 | Returns: 45 | Max amount of items returned per REST poll for http api (int). 46 | 47 | Raises: 48 | None. 49 | """ 50 | 51 | return self.MAX_BARS_PER_REQUEST 52 | 53 | def get_symbols(self): 54 | """ 55 | Args: 56 | None. 57 | 58 | Returns: 59 | List of all symbols ticker code strings. 60 | 61 | Raises: 62 | None. 63 | """ 64 | 65 | return self.symbols 66 | 67 | def get_name(self): 68 | """ 69 | Args: 70 | None. 71 | 72 | Returns: 73 | Venue name string. 74 | 75 | Raises: 76 | None. 77 | """ 78 | 79 | return self.name 80 | 81 | def previous_minute(self): 82 | """ 83 | Args: 84 | None. 85 | 86 | Returns: 87 | Previous minute epoch timestamp (int). 88 | 89 | Raises: 90 | None. 91 | """ 92 | 93 | d1 = datetime.now().second 94 | d2 = datetime.now().microsecond 95 | timestamp = datetime.now() - timedelta( 96 | minutes=1, seconds=d1, microseconds=d2) 97 | 98 | # convert to epoch 99 | timestamp = int(timestamp.timestamp()) 100 | 101 | # # replace final digit with zero, can be 1 or more during a slow cycle 102 | timestamp_str = list(str(timestamp)) 103 | timestamp_str[len(timestamp_str) - 1] = "0" 104 | timestamp = int(''.join(timestamp_str)) 105 | 106 | return timestamp 107 | 108 | def seconds_til_next_minute(self): 109 | """ 110 | Args: 111 | None. 112 | 113 | Returns: 114 | Number of second to next minute (int). 115 | 116 | Raises: 117 | None. 118 | """ 119 | 120 | now = datetime.datetime.utcnow().second 121 | delay = 60 - now - 1 122 | return delay 123 | 124 | def build_OHLCV( 125 | self, ticks: list, symbol: str, close_as_open=True, offset=60): 126 | 127 | """ 128 | Args: 129 | ticks: A list of ticks to aggregate. Assumes the list's first tick 130 | is from the previous minute, this tick is used for open price. 131 | symbol: instrument ticker code (string) 132 | close_as_open: If true, first tick in arg "ticks" must be the final 133 | tick from the previous minute, to be used for bar open price, 134 | resulting in no gaps between bars (some exchanges follow this 135 | practice as standard, some dont). If false, use arg "ticks" 1st 136 | tick as the open price. 137 | offset: number of second to advance timestamps by. Some venues 138 | timestamp their bars differently. Tradingview bars are 139 | timestamped 1 minute behind bitmex, for example. 140 | 141 | Returns: 142 | A 1 min OHLCV bar (dict). 143 | 144 | Raises: 145 | Tick data timestamp mismatch error. 146 | 147 | """ 148 | 149 | if ticks: 150 | 151 | if close_as_open: 152 | 153 | # Convert incoming timestamp format if required. 154 | if type(ticks[0]['timestamp']) is not datetime: 155 | median = parser.parse( 156 | ticks[int((len(ticks) / 2))]['timestamp']) 157 | first = parser.parse(ticks[0]['timestamp']) 158 | else: 159 | median = ticks[int((len(ticks) / 2))]['timestamp'] 160 | first = ticks[0]['timestamp'] 161 | 162 | # This should be the most common case if close_as_open=True. 163 | # Dont include the first tick for volume and price calc. 164 | if first.minute == median.minute - 1: 165 | volume = sum(i['size'] for i in ticks) - ticks[0]['size'] 166 | prices = [i['price'] for i in ticks] 167 | prices.pop(0) 168 | 169 | # If the timestamps are same, may mean there were no early 170 | # trades, proceed as though close_as_open=False 171 | elif first.minute == median.minute: 172 | volume = sum(i['size'] for i in ticks) 173 | prices = [i['price'] for i in ticks] 174 | 175 | # There's a timing/data problem is neither case above is true. 176 | else: 177 | raise Exception( 178 | "Tick data timestamp error: timestamp mismatch." + 179 | "\nFirst tick minute: " + str(first) + 180 | "\nMedian tick minute: " + str(median)) 181 | 182 | elif not close_as_open or close_as_open is False: 183 | volume = sum(i['size'] for i in ticks) 184 | prices = [i['price'] for i in ticks] 185 | 186 | high_price = max(prices) if len(prices) >= 1 else None 187 | low_price = min(prices) if len(prices) >= 1 else None 188 | open_price = ticks[0]['price'] if len(prices) >= 1 else None 189 | close_price = ticks[-1]['price'] if len(prices) >= 1 else None 190 | 191 | bar = {'symbol': symbol, 192 | 'timestamp': self.previous_minute() + offset, 193 | 'open': open_price, 194 | 'high': high_price, 195 | 'low': low_price, 196 | 'close': close_price, 197 | 'volume': volume} 198 | return bar 199 | 200 | elif ticks is None or not ticks: 201 | bar = {'symbol': symbol, 202 | 'timestamp': self.previous_minute() + offset, 203 | 'open': None, 204 | 'high': None, 205 | 'low': None, 206 | 'close': None, 207 | 'volume': 0} 208 | return bar 209 | 210 | def finished_parsing_ticks(self): 211 | return self.finished_parsing_ticks 212 | 213 | def load_api_keys(self): 214 | """ 215 | Loads key and secret from environment variables. 216 | 217 | Keys must be stored as follows (all capitalised): 218 | API key: VENUE_NAME_API_KEY 219 | API secret: VENUE_NAME_SECRET_KEY 220 | 221 | Args: 222 | None. 223 | 224 | Returns: 225 | key: api key matching exchange name. 226 | secret: api secret key matching venue name. 227 | 228 | Raises: 229 | None. 230 | """ 231 | 232 | venue_name = self.get_name().upper() 233 | key = os.environ[venue_name + '_API_KEY'] 234 | secret = os.environ[venue_name + '_API_SECRET'] 235 | 236 | return key, secret 237 | 238 | def round_increment(self, number, symbol): 239 | """ 240 | Round a given number to its nearest minimum increment 241 | """ 242 | 243 | inc = self.symbol_min_increment[symbol] 244 | 245 | if number < 1: 246 | quote = number 247 | else: 248 | quote = (number // inc) * inc 249 | 250 | # print("Rounded quote:", quote) 251 | return quote 252 | 253 | 254 | @abstractmethod 255 | def place_bulk_orders(self, orders: list): 256 | """ 257 | Given a list of order events objects, place corresponding orders with 258 | the respective trading venue. 259 | 260 | Args: 261 | orders: list of order objects 262 | 263 | Returns: 264 | 265 | Raises: 266 | 267 | """ 268 | 269 | @abstractmethod 270 | def place_single_order(self, order): 271 | """ 272 | Place a single order with the respective trading venue. 273 | 274 | Args: 275 | order: order object 276 | 277 | Returns: 278 | 279 | Raises: 280 | 281 | """ 282 | 283 | @abstractmethod 284 | def cancel_orders(self, order_ids: list): 285 | """ 286 | Cancel all orders matching list of given order IDs. 287 | 288 | Args: 289 | orders: list of orders. 290 | 291 | Returns: 292 | cancel_status: dict, {order_id: cancel succcess/fail message} 293 | 294 | Raises: 295 | 296 | """ 297 | 298 | @abstractmethod 299 | def close_position(self, symbol, qty, direction): 300 | """ 301 | Close 'qty' units of 'symbol' instrument in 'direction' direction. 302 | 303 | Args: 304 | symbol: instrument symbol to close. 305 | qty: number of units of instrument to close. 306 | direction: LONG or SHORT 307 | 308 | Returns: 309 | True if successful close, False if not. 310 | Raises: 311 | 312 | """ 313 | 314 | @abstractmethod 315 | def format_orders(self, orders: list): 316 | """ 317 | Converts internally formatted orders into relevant venue order format. 318 | 319 | Args: 320 | orders: list of order objects 321 | 322 | Returns: 323 | formatted_orders: new list of venue-appropirate formatted orders 324 | 325 | Raises: 326 | 327 | """ 328 | 329 | @abstractmethod 330 | def get_executions(self, symbol, start_timestamp, count): 331 | """ 332 | Args: 333 | symbol: instrument ticker code (string) 334 | start_timestamp: epoch timestamp (int) 335 | count: amount of results to fetch (int) 336 | 337 | Returns: 338 | List of balance-affecting executions for the given symbol. 339 | Each execution should be a dict with the following format: 340 | { 341 | 'order_id': ???, 342 | 'venue_id': ???, 343 | 'timestamp': ???, epooch timestamp (int) 344 | 'avg_exc_price': ???, 345 | 'currency': ???, 346 | 'symbol': ???, 347 | 'direction': ???, LONG or SHORT 348 | 'size': ???, 349 | 'order_type': ???, 350 | 'fee_type': ???, 351 | 'fee_amt': ???, multiplicand to find total fee cost 352 | 'total_fee': ???, fees charged for transaction in USD 353 | 'status' ???: FILLED, CANCELLED, NEW, PARTIAL 354 | } 355 | 356 | Raises: 357 | None. 358 | """ 359 | 360 | @abstractmethod 361 | def get_bars_in_period(self, symbol: str, start_time: int, total: int): 362 | """ 363 | Args: 364 | symbol: instrument ticker code (string) 365 | start_time: epoch timestamp (int) 366 | total: amount of bars to fetch (int) 367 | 368 | Returns: 369 | List of historic 1min OHLCV bars for specified period. Returns 370 | specified amount of 1 min bars starting from start_time. 371 | 372 | Raises: 373 | None. 374 | """ 375 | 376 | @abstractmethod 377 | def get_recent_bars(self, timeframe: str, symbol: str, n: int): 378 | """ 379 | Args: 380 | timeframe: timeframe code (string) 381 | symbol: instrument ticker code (string) 382 | n: amount of bars 383 | 384 | Returns: 385 | List of n recent 1-min bars of specified timeframe and symbol. 386 | 387 | Raises: 388 | None. 389 | """ 390 | 391 | @abstractmethod 392 | def get_origin_timestamp(self, symbol: str): 393 | """ 394 | Args: 395 | symbol: instrument ticker code (string) 396 | 397 | Returns: 398 | Epoch timestamp (int) of first available (oldest) 1 min bar. 399 | 400 | Raises: 401 | """ 402 | 403 | @abstractmethod 404 | def get_recent_ticks(symbol: str, n: int): 405 | """ 406 | Args: 407 | symbol: 408 | n: number of minutes worth of ticks (int) 409 | 410 | Returns: 411 | Instrument ticker code (string). 412 | 413 | Raises: 414 | Tick data timestamp mismatch error. 415 | 416 | """ 417 | 418 | @abstractmethod 419 | def parse_ticks(self): 420 | """ 421 | Args: 422 | None. 423 | 424 | Returns: 425 | Converts streamed websocket tick data into a 1-min OHLCV bars, then 426 | appends new bars to the exchange object self.bars[symbol] tree. 427 | 428 | Raises: 429 | None. 430 | """ 431 | 432 | @abstractmethod 433 | def get_position(self, symbol): 434 | """ 435 | Args: 436 | symbol: Instrument ticker code (string). 437 | 438 | Returns: 439 | Position dict for the specified symbol. 440 | 441 | Raises: 442 | None. 443 | """ 444 | 445 | @abstractmethod 446 | def get_orders(self, symbol): 447 | """ 448 | Final versions of this method in subclasses should only return orders 449 | placed by this program. Fetch only orders with venue ids. 450 | 451 | Args: 452 | symbol: Instrument ticker code (string). 453 | 454 | Returns: 455 | List containing all active and inactive orders for symbol. 456 | 457 | Raises: 458 | None. 459 | """ 460 | -------------------------------------------------------------------------------- /features.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven trade execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | Copyright (C) 2020 Marc Goulding 7 | 8 | Licensed under GNU General Public License 3.0 or later. 9 | 10 | Some rights reserved. See LICENSE.md, AUTHORS.md. 11 | """ 12 | 13 | from scipy.signal import savgol_filter as smooth 14 | import matplotlib.pyplot as plt 15 | import talib as ta 16 | import pandas as pd 17 | import numpy as np 18 | 19 | 20 | class Features: 21 | """ 22 | Model feature library. 23 | """ 24 | 25 | def trending(self, lookback_period: int, bars): 26 | """ 27 | Return True if price action (bars) forming successive higher or 28 | lower swings. Return direction = -1 for downtrend, 0 for no trend, 29 | 1 for uptrend. 30 | 31 | Returns: 32 | trending 33 | """ 34 | 35 | self.check_bars_type(bars) 36 | 37 | fractals = self.fractals(bars[lookback_period:], window=window) 38 | highs = np.multiply(bars.high.values, fractals) 39 | highs = highs[highs > 0] 40 | lows = np.multiply(bars.low.values, fractals) 41 | lows = lows[lows < 0]*(-1) 42 | 43 | trending = False 44 | direction = 0 45 | 46 | if (highs[-1] > highs[-2] and highs[-2] > highs[-3] 47 | and lows[-1] > lows[-2] and lows[-2] > lows[-3]): 48 | trending = True 49 | direction = 1 50 | 51 | elif (highs[-1] < highs[-2] and highs[-2] < highs[-3] 52 | and lows[-1] < lows[-2] and lows[-2] < lows[-3]): 53 | trending = True 54 | direction = -1 55 | 56 | else: 57 | trending = False 58 | direction = 0 59 | 60 | return trending, direction 61 | 62 | def new_trend(self, bars: list): 63 | """ 64 | Return True if price has formed a new trend, False if not. 65 | """ 66 | 67 | return new_trend 68 | 69 | def j_curve(self, bars: list): 70 | """ 71 | Identify optimal price action geometry (j-curve) for trends. 72 | """ 73 | 74 | return j_curve 75 | 76 | def small_bar(self, bars: list, n: int): 77 | """ 78 | Identify if the current bar is "small" relative to the last n bars. 79 | 80 | """ 81 | 82 | small_bar 83 | 84 | def reversal_bar(self, bars: list, n: int): 85 | """ 86 | Identify if the last n bars contain a reversal pattern. 87 | """ 88 | 89 | return reversal_bar 90 | 91 | def convergent(self, lookback_period: int, bars: list, indicator: list): 92 | """ Return True if price and indicator swings are convergent.""" 93 | 94 | self.check_bars_type(bars) 95 | 96 | convergent = False 97 | return convergent 98 | 99 | def sr_levels(bars, n=8, t=0.02, s=3, f=3): 100 | """ 101 | Find support and resistance levels using smoothed close price. 102 | 103 | Args: 104 | bars: OHLCV dataframe. 105 | n: bar window size. 106 | t: tolerance, % variance between min/maxima to be considered a level. 107 | s: smoothing factor. Lower is more sensitive. 108 | f: number of filter passes. 109 | 110 | Returns: 111 | support: list of support levels 112 | resistance: list of resistance levels 113 | 114 | Raises: 115 | None. 116 | 117 | """ 118 | 119 | # Convert n to next even number. 120 | if n % 2 != 0: 121 | n += 1 122 | 123 | # Find number of bars. 124 | n_ltp = bars.close.values.shape[0] 125 | 126 | # Smooth close data. 127 | ltp_smoothed = smooth(bars.close.values, (n + 1), s) 128 | 129 | # Find delta (difference in adjacent prices). 130 | ltp_delta = np.zeros(n_ltp) 131 | ltp_delta[1:] = np.subtract(ltp_smoothed[1:], ltp_smoothed[:-1]) 132 | 133 | resistance = [] 134 | support = [] 135 | 136 | # Identify initial levels. 137 | for i in range(n_ltp - n): 138 | 139 | # Get window for current bar. 140 | window = ltp_delta[i:(i + n)] 141 | 142 | # Split window in half. 143 | first = window[:int((n / 2))] # first half 144 | last = window[int((n / 2)):] # second half 145 | 146 | # Find highs and lows for both halves of window. 147 | # First/last being higher or lower indicates asc/desc price. 148 | r_1 = np.sum(first > 0) 149 | r_2 = np.sum(last < 0) 150 | s_1 = np.sum(first < 0) 151 | s_2 = np.sum(last > 0) 152 | 153 | # Detect local maxima. If two points match, its a level. 154 | if r_1 == (n / 2) and r_2 == (n / 2): 155 | try: 156 | resistance.append(bars.close.values[i + (int((n / 2)) - 1)]) 157 | # Catch empty list error if no levels are present. 158 | except Exception as ex: 159 | pass 160 | 161 | # Detect local minima. If two points match, its a level. 162 | if s_1 == (n / 2) and s_2 == (n / 2): 163 | try: 164 | support.append(bars.close.values[i + (int((n / 2)) - 1)]) 165 | # Catch empty list error if no levels are present. 166 | except Exception as ex: 167 | pass 168 | 169 | # Filter levels f times. 170 | levels = np.sort(np.append(support, resistance)) 171 | filtered_levels = cluster_filter(levels, t, multipass=True) 172 | for i in range(f - 1): 173 | filtered_levels = cluster_filter(filtered_levels, t, multipass=True) 174 | 175 | return filtered_levels 176 | 177 | def cluster_filter(levels: list, t: float, multipass: bool): 178 | """ 179 | Given a list of prices, identify groups of levels within t% of each other. 180 | 181 | Args: 182 | levels: list of price levels. 183 | t: tolerance, % variance between min/maxima to be considered a level. 184 | multipass: if True, run the filter for cluster sizes=3 or more. If 185 | False, filter only once (will pick up clusters size=2). 186 | Returns: 187 | None. 188 | Raises: 189 | None. 190 | """ 191 | 192 | # Identify initial level clusters (single pass). 193 | temp_levels = [] 194 | for lvl_1 in levels: 195 | for lvl_2 in levels: 196 | range_max = lvl_1 + lvl_1 * t 197 | range_min = lvl_1 - lvl_1 * t 198 | if lvl_2 >= range_min and lvl_2 <= range_max: 199 | cluster = sorted([lvl_1, lvl_2]) 200 | if lvl_2 != lvl_1: 201 | if cluster not in temp_levels: 202 | temp_levels.append(cluster) 203 | 204 | # Identify strong clusters of 3 or more levels (multipass). 205 | if multipass: 206 | flattened = [item for sublist in temp_levels for item in sublist] 207 | c_count = 0 208 | to_append = [] 209 | for cluster in temp_levels: 210 | for lvl_1 in cluster: 211 | range_max = lvl_1 + lvl_1 * t 212 | range_min = lvl_1 - lvl_1 * t 213 | for lvl_2 in flattened: 214 | if lvl_2 >= range_min and lvl_2 <= range_max: 215 | to_append.append([c_count, lvl_2]) 216 | c_count += 1 217 | 218 | # Add levels to their respective clusters and remove duplicates. 219 | for pair in to_append: 220 | temp_levels[pair[0]].append(pair[1]) 221 | temp_levels[pair[0]] = sorted(list(set(temp_levels[pair[0]]))) 222 | 223 | # Aggregate similar levels and remove temp levels. 224 | agg_levels = [(sum(i) / len(i)) for i in temp_levels] 225 | to_remove = [i for cluster in temp_levels for i in cluster] 226 | 227 | # Catch second-pass np.array > list conversion error. 228 | if type(levels) != list: 229 | final_levels = [i for i in levels.tolist() if i not in to_remove] 230 | else: 231 | final_levels = [i for i in levels if i not in to_remove] 232 | 233 | return final_levels + agg_levels 234 | 235 | def SMA(self, period: int, bars: int): 236 | """ 237 | Simple moving average of previous n bars close price. 238 | 239 | SMA = (sum of all closes in period) / period. 240 | """ 241 | self.check_bars_type(bars) 242 | 243 | ma = ta.MA(bars['close'], timeperiod=period, matype=0) 244 | 245 | return ma 246 | 247 | def EMA(self, period: int, bars: list): 248 | """ 249 | Exponential moving average of previous n bars close price. 250 | 251 | EMA = price(t) * k + EMA(y) * ( 1 − k ) 252 | 253 | where: 254 | t = today (current bar for any period) 255 | y = yesterday (previous bar close price) 256 | N = number of bars (period) 257 | k = 2 / (N + 1) (weight factor) 258 | """ 259 | 260 | self.check_bars_type(bars) 261 | 262 | ema = ta.EMA(bars['close'], timeperiod=period) 263 | 264 | return ema 265 | 266 | def MACD(self, name, bars: list): 267 | """ 268 | Return MACD for given time series. Bars list must be 26 bars 269 | in length (last 26 bars for period). 270 | 271 | MACD = EMA(12) - EMA(26) 272 | 273 | Note we only use the MACD, not signal or histogram. 274 | """ 275 | 276 | self.check_bars_type(bars) 277 | 278 | macd, signal, hist = ta.MACD( 279 | bars['close'], fastperiod=12, slowperiod=26, signalperiod=9) 280 | 281 | return macd 282 | 283 | def RSI(self, bars, timeperiod: int = 14): 284 | """ 285 | Return RSI for given time series. 286 | """ 287 | 288 | self.check_bars_type(bars) 289 | 290 | rsi = ta.RSI(bars['close'], timeperiod) 291 | 292 | return rsi 293 | 294 | def CCI(self, period: int, bars: list): 295 | """ 296 | Return CCI (Commodity Chanel Index) for n bars close price. 297 | ​ 298 | CCI = (Typical Price − MA) / 0.015 * Mean Deviation 299 | 300 | where: 301 | Typical Price = ∑P((H + L + C) / 3)) 302 | P = number of bars (period) 303 | MA = Moving Average = (∑P Typical Price) / P 304 | Mean Deviation=(∑P | Typical Price - MA |) / P 305 | """ 306 | 307 | self.check_bars_type(bars) 308 | 309 | cci = ta.CCI( 310 | bars['high'], bars['low'], bars['close'], timeperiod=period) 311 | 312 | return cci 313 | 314 | def BB(self, bars, period: int): 315 | """ 316 | Return top, bottom and mid Bollinger Bands for n bars close price. 317 | 318 | It is assumed that: 319 | -- Bollinger Bands are desired at 2 standard deviation's from the mean. 320 | -- moving average used is a simple moving average 321 | """ 322 | 323 | self.check_bars_type(bars) 324 | 325 | upperband, middleband, lowerband = ta.BBANDS( 326 | close, timeperiod=period, nbdevup=2, nbdevdn=2, matype=0) 327 | 328 | return upperband, middleband, lowerband 329 | 330 | def fractals(self, bars, window: int = 5): 331 | """ 332 | Returns a list of size len(bars) containing a value for each bar. 333 | The value will state whether its corresponding bar is a top 334 | fractal or a bottom fractal. Returns 1 for top fractals, 0 for 335 | non-fractals, -1 for bottom fractals. 336 | 337 | The Formulas for Fractals Are: 338 | Bearish Fractal (-1)= 339 | High(N)>High(N−2) and 340 | High(N)>High(N−1) and 341 | High(N)>High(N+1) and 342 | High(N)>High(N+2) 343 | 344 | Bullish Fractal (1) = 345 | Low(N) bars['high'][bar-2] 365 | and bars['high'][bar] > bars['high'][bar-1] 366 | and bars['high'][bar] > bars['high'][bar+1] 367 | and bars['high'][bar] > bars['high'][bar+2]): 368 | 369 | frac[bar] = 1 370 | 371 | elif (bars['low'][bar] < bars['low'][bar-2] 372 | and bars['low'][bar] < bars['low'][bar-1] 373 | and bars['low'][bar] < bars['low'][bar+1] 374 | and bars['low'][bar] < bars['low'][bar+2]): 375 | 376 | frac[bar] = -1 377 | 378 | return frac 379 | 380 | def check_bars_type(self, bars): 381 | 382 | assert isinstance(bars, pd.DataFrame) 383 | -------------------------------------------------------------------------------- /messaging_clients.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven trade execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from abc import ABC 13 | import json 14 | import os 15 | import requests 16 | 17 | 18 | class MessagingClient(ABC): 19 | """ 20 | """ 21 | 22 | def __init__(self): 23 | pass 24 | 25 | 26 | class Telegram(MessagingClient): 27 | 28 | URL = "https://api.telegram.org/bot" 29 | 30 | def __init__(self, logger): 31 | super().__init__() 32 | self.logger = logger 33 | self.token = self.get_token() 34 | self.whitelist = self.get_whitelist() 35 | 36 | def send_image(self, image_path, text): 37 | 38 | url = self.URL + self.token + "/sendPhoto" 39 | files = {'photo': open(image_path, 'rb')} 40 | 41 | # Send image only to whitelisted users 42 | for user_id in json.loads(self.whitelist): 43 | 44 | data = {'chat_id': user_id, 'caption': text} 45 | r = requests.post(url, files=files, data=data) 46 | 47 | if int(r.status_code) == 200: 48 | self.logger.info("Setup snapshot sent to " + str(user_id) + ".") 49 | else: 50 | self.logger.info("Sending snapshot to " + str(user_id) + " failed.") 51 | print(r.status_code) 52 | 53 | def send_option_keyboard(self, keyboard): 54 | 55 | url = self.URL + self.token + "/sendMessage" 56 | reply_markup = {"keyboard": keyboard, "one_time_keyboard": True} 57 | 58 | # Send only to whitelisted users 59 | for user_id in json.loads(self.whitelist): 60 | text = {'text': "Accept or veto trade:", 'chat_id': user_id, 'reply_markup': reply_markup} 61 | 62 | r = requests.post(url, json=text) 63 | 64 | if int(r.status_code) == 200: 65 | self.logger.info("Consent query sent to " + str(user_id) + ".") 66 | else: 67 | self.logger.info("Sending consent query to " + str(user_id) + " failed.") 68 | print(r.status_code) 69 | print(r.json()) 70 | 71 | def send_message(self, text): 72 | url = self.URL + self.token + "/sendMessage" 73 | 74 | # Send image only to whitelisted users 75 | for user_id in json.loads(self.whitelist): 76 | 77 | data = {'chat_id': user_id, 'text': text} 78 | r = requests.post(url, data=data) 79 | 80 | if int(r.status_code) == 200: 81 | self.logger.info("Text message sent to " + str(user_id) + ".") 82 | else: 83 | self.logger.info("Sending message to " + str(user_id) + " failed.") 84 | print(r.json()) 85 | 86 | def get_updates(self): 87 | url = self.URL + self.token + "/getUpdates" 88 | r = requests.get(url).json() 89 | return r['result'] 90 | 91 | def get_token(self): 92 | """ 93 | Load bot token from environment variable. 94 | """ 95 | 96 | if os.environ['TELEGRAM_BOT_TOKEN'] is not None: 97 | return os.environ['TELEGRAM_BOT_TOKEN'] 98 | else: 99 | raise Exception("Telegram bot token missing.") 100 | 101 | def get_whitelist(self): 102 | """ 103 | Load whitelist from environment variable. 104 | """ 105 | 106 | if os.environ['TELEGRAM_BOT_WHITELIST'] is not None: 107 | return os.environ['TELEGRAM_BOT_WHITELIST'] 108 | else: 109 | raise Exception("Telegram bot token missing.") 110 | -------------------------------------------------------------------------------- /misc testing/api_order_submission_test.py: -------------------------------------------------------------------------------- 1 | from datetime import timezone, datetime, timedelta 2 | from pymongo import MongoClient, errors 3 | from requests import Request, Session 4 | from requests.auth import AuthBase 5 | from urllib.parse import urlparse 6 | from dateutil import parser 7 | import traceback 8 | import requests 9 | import hashlib 10 | import os 11 | import json 12 | import hmac 13 | import time 14 | 15 | 16 | MAX_BARS_PER_REQUEST = 750 17 | TIMESTAMP_FORMAT = '%Y-%m-%d%H:%M:%S.%f' 18 | 19 | BASE_URL = "https://www.bitmex.com/api/v1" 20 | BASE_URL_TESTNET = "https://testnet.bitmex.com/api/v1" 21 | WS_URL = "wss://www.bitmex.com/realtime" 22 | BARS_URL = "/trade/bucketed?binSize=" 23 | TICKS_URL = "/trade?symbol=" 24 | POSITIONS_URL = "/position" 25 | ORDERS_URL = "/order" 26 | BULK_ORDERS_URL = "/order/bulk" 27 | TRADE_HIST_URL = "/execution/tradeHistory" 28 | 29 | DB_URL = 'mongodb://127.0.0.1:27017/' 30 | DB_PRICES = 'asset_price_master' 31 | DB_OTHER = 'holdings_trades_signals_master' 32 | DB_TIMEOUT_MS = 10 33 | 34 | symbol_min_increment = { 35 | 'XBTUSD': 0.5, 36 | 'ETHUSD': 0.05, 37 | 'XRPUSD': 0.0001} 38 | 39 | db_client = MongoClient( 40 | DB_URL, 41 | serverSelectionTimeoutMS=DB_TIMEOUT_MS) 42 | db_prices = db_client[DB_PRICES] 43 | db_other = db_client[DB_OTHER] 44 | 45 | 46 | def load_api_keys(): 47 | venue_name = "BITMEX" 48 | key = os.environ[venue_name + '_API_KEY'] 49 | secret = os.environ[venue_name + '_API_SECRET'] 50 | return key, secret 51 | 52 | 53 | api_key, api_secret = load_api_keys() 54 | 55 | 56 | def generate_request_signature(secret, request_type, url, nonce, 57 | data): 58 | 59 | parsed_url = urlparse(url) 60 | path = parsed_url.path 61 | 62 | if parsed_url.query: 63 | path = path + '?' + parsed_url.query 64 | 65 | if isinstance(data, (bytes, bytearray)): 66 | data = data.decode('utf8') 67 | 68 | message = str(request_type).upper() + path + str(nonce) + data 69 | signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), 70 | digestmod=hashlib.sha256).hexdigest() 71 | 72 | return signature 73 | 74 | 75 | def generate_request_headers(request, api_key, api_secret): 76 | 77 | nonce = str(int(round(time.time()) + 20)) 78 | request.headers['api-expires'] = nonce 79 | request.headers['api-key'] = api_key 80 | request.headers['api-signature'] = generate_request_signature( 81 | api_secret, request.method, request.url, nonce, request.body or '') 82 | request.headers['Content-Type'] = 'application/json' 83 | request.headers['Accept'] = 'application/json' 84 | request.headers['X-Requested-With'] = 'XMLHttpRequest' 85 | 86 | return request 87 | 88 | 89 | def get_positions(): 90 | s = Session() 91 | prepared_request = Request( 92 | 'GET', 93 | BASE_URL_TESTNET + POSITIONS_URL, 94 | params='').prepare() 95 | request = generate_request_headers(prepared_request, api_key, 96 | api_secret) 97 | response = s.send(request).json() 98 | 99 | return response 100 | 101 | 102 | def round_increment(number, symbol): 103 | inc = symbol_min_increment[symbol] 104 | if number < 1: 105 | quote = number 106 | else: 107 | quote = (number // inc) * inc 108 | return quote 109 | 110 | 111 | def format_orders(orders): 112 | formatted = [] 113 | for order in orders: 114 | price = round_increment(order['price'], order['symbol']) 115 | 116 | # TODO: add logic for below three fields. 117 | execInst = None 118 | stopPx = None 119 | timeInForce = None 120 | 121 | symbol = order['symbol'] 122 | side = "Buy" if order['direction'] == "LONG" else "Sell" 123 | orderQty = round_increment(order['size'], order['symbol']) 124 | clOrdID = order['order_id'] 125 | text = order['metatype'] 126 | 127 | if order['order_type'] == "LIMIT": 128 | ordType = "Limit" 129 | elif order['order_type'] == "MARKET": 130 | ordType = "Market" 131 | price = None 132 | elif order['order_type'] == "STOP_LIMIT": 133 | ordType = "StopLimit" 134 | elif order['order_type'] == "STOP": 135 | ordType = "Stop" 136 | stopPx = price 137 | price = None 138 | else: 139 | ordType = None 140 | 141 | formatted.append({ 142 | 'symbol': symbol, 143 | 'side': side, 144 | 'orderQty': orderQty, 145 | 'price': price, 146 | 'stopPx': stopPx, 147 | 'clOrdID': int(order['order_id']), 148 | 'ordType': ordType, 149 | 'timeInForce': timeInForce, 150 | 'execInst': execInst, 151 | 'text': text}) 152 | 153 | return formatted 154 | 155 | 156 | def place_single_order(order): 157 | payload = format_orders([order])[0] 158 | 159 | s = Session() 160 | 161 | prepared_request = Request( 162 | 'POST', 163 | BASE_URL_TESTNET + ORDERS_URL, 164 | json=payload, 165 | params='').prepare() 166 | 167 | request = generate_request_headers( 168 | prepared_request, 169 | api_key, 170 | api_secret) 171 | 172 | response = s.send(request) 173 | 174 | return response 175 | 176 | 177 | def place_bulk_orders(orders): 178 | 179 | # Separate market orders as BitMEX doesnt allow bulk market orders. 180 | m_o = [o for o in orders if o['order_type'] == "MARKET"] 181 | nm_o = [o for o in orders if o not in m_o] 182 | 183 | # Send market orders individually amd store responses. 184 | responses = [place_single_order(o) for o in m_o if m_o] 185 | 186 | # Submit non-market orders in a single batch. 187 | response = None 188 | if nm_o: 189 | payload = {'orders': format_orders(nm_o)} 190 | 191 | s = Session() 192 | 193 | prepared_request = Request( 194 | 'POST', 195 | BASE_URL_TESTNET + BULK_ORDERS_URL, 196 | json=payload, 197 | params='').prepare() 198 | 199 | request = generate_request_headers( 200 | prepared_request, 201 | api_key, 202 | api_secret) 203 | 204 | response = s.send(request) 205 | 206 | # Unpack successful order confirmations and handle errors. 207 | order_confirmations = [] 208 | for r in responses + [response]: 209 | if r.status_code == 200: 210 | 211 | res = r.json() 212 | 213 | if isinstance(res, list): 214 | for item in res: 215 | order_confirmations.append(item) 216 | 217 | elif isinstance(res, dict): 218 | order_confirmations.append(res) 219 | 220 | elif 400 <= r.status_code <= 404: 221 | # Syntax, auth or system limit error messages, raise exception. 222 | raise Exception(r.status_code, r.json()['error']['message']) 223 | 224 | elif r.status_code == 503: 225 | # Server overloaded, retry after 500ms, dont raise exception. 226 | print(r.status_code, r.json()['error']['message']) 227 | 228 | # TODO: Check what orders were placed (if any) and re-submit. 229 | 230 | else: 231 | print(r.status_code, r.json()) 232 | 233 | updated_orders = [] 234 | if order_confirmations: 235 | for res in order_confirmations: 236 | for order in orders: 237 | if int(order['order_id']) == int(res['clOrdID']): 238 | 239 | if res['ordStatus'] == 'Filled': 240 | fill = "FILLED" 241 | elif res['ordStatus'] == 'New': 242 | fill = "NEW" 243 | 244 | updated_orders.append({ 245 | 'trade_id': order['trade_id'], 246 | 'position_id': order['position_id'], 247 | 'order_id': order['order_id'], 248 | 'timestamp': res['timestamp'], 249 | 'avg_fill_price': res['avgPx'], 250 | 'currency': res['currency'], 251 | 'venue_id': res['orderID'], 252 | 'venue': order['venue'], 253 | 'symbol': order['symbol'], 254 | 'direction': order['direction'], 255 | 'size': res['orderQty'], 256 | 'price': res['price'], 257 | 'order_type': order['order_type'], 258 | 'metatype': order['metatype'], 259 | 'void_price': order['void_price'], 260 | 'trail': order['trail'], 261 | 'reduce_only': order['reduce_only'], 262 | 'post_only': order['post_only'], 263 | 'batch_size': order['batch_size'], 264 | 'status': fill}) 265 | 266 | return updated_orders 267 | 268 | 269 | def cancel_orders(order_ids: list): 270 | payload = {"orderID": order_ids} 271 | print(payload) 272 | s = Session() 273 | prepared_request = Request( 274 | "DELETE", 275 | BASE_URL_TESTNET + ORDERS_URL, 276 | json=payload, 277 | params='').prepare() 278 | 279 | request = generate_request_headers( 280 | prepared_request, 281 | api_key, 282 | api_secret) 283 | 284 | response = s.send(request).json() 285 | 286 | return response 287 | 288 | 289 | def close_position(symbol: str): 290 | positions = get_positions() 291 | for pos in positions: 292 | if pos['symbol'] == symbol: 293 | position = pos 294 | break 295 | 296 | if position: 297 | payload = { 298 | 'symbol': symbol, 299 | 'orderQty': -pos['currentQty'], 300 | 'ordType': "Market"} 301 | 302 | s = Session() 303 | 304 | prepared_request = Request( 305 | 'POST', 306 | BASE_URL_TESTNET + ORDERS_URL, 307 | json=payload, 308 | params='').prepare() 309 | 310 | request = generate_request_headers( 311 | prepared_request, 312 | api_key, 313 | api_secret) 314 | 315 | response = s.send(request).json() 316 | if response['ordStatus'] == "Filled": 317 | return True 318 | else: 319 | return False 320 | 321 | 322 | def get_executions(symbol, start_timestamp=None, count=500): 323 | payload = { 324 | 'symbol': symbol, 325 | 'count': count, 326 | 'start': start_timestamp, 327 | 'reverse': True} 328 | 329 | prepared_request = Request( 330 | 'GET', 331 | BASE_URL_TESTNET + TRADE_HIST_URL, 332 | json=payload, 333 | params='').prepare() 334 | 335 | request = generate_request_headers( 336 | prepared_request, 337 | api_key, 338 | api_secret) 339 | 340 | response = Session().send(request).json() 341 | 342 | executions = [] 343 | for res in response: 344 | 345 | fee_type = "TAKER" if res['lastLiquidityInd'] == "RemovedLiquidity" else "MAKER" 346 | direction = "LONG" if res['side'] == "Buy" else "SHORT" 347 | 348 | if res['ordStatus'] == "Filled": 349 | fill = "FILLED" 350 | elif res['ordStatus'] == "Cancelled": 351 | fill = "CANCELLED" 352 | elif res['ordStatus'] == "New": 353 | fill = "NEW" 354 | elif res['ordStatus'] == "PartiallyFilled": 355 | fill = "PARTIAL" 356 | else: 357 | raise Exception(res['ordStatus']) 358 | 359 | if res['ordType'] == "Limit": 360 | order_type = "LIMIT" 361 | elif res['ordType'] == "Market": 362 | order_type = "MARKET" 363 | elif res['ordType'] == "StopLimit": 364 | order_type = "STOP_LIMIT" 365 | elif res['ordType'] == "Stop": 366 | order_type = "STOP" 367 | else: 368 | raise Exception(res['ordType']) 369 | 370 | executions.append({ 371 | 'order_id': res['clOrdID'], 372 | 'venue_id': res['orderID'], 373 | 'timestamp': int(parser.parse(res['timestamp']).timestamp()), 374 | 'avg_exc_price': res['avgPx'], 375 | 'currency': res['currency'], 376 | 'symbol': res['symbol'], 377 | 'direction': direction, 378 | 'size': res['lastQty'], 379 | 'order_type': res['ordType'], 380 | 'fee_type': fee_type, 381 | 'fee_amt': res['commission'], 382 | 'total_fee': res['execComm'] / res['avgPx'], 383 | 'status': fill}) 384 | 385 | return executions 386 | 387 | 388 | def get_orders(symbol, start_timestamp=None, count=500): 389 | payload = { 390 | 'symbol': symbol, 391 | 'count': count, 392 | 'start': start_timestamp, 393 | 'reverse': True} 394 | 395 | prepared_request = Request( 396 | 'GET', 397 | BASE_URL_TESTNET + ORDERS_URL, 398 | params='', json=payload).prepare() 399 | 400 | request = generate_request_headers( 401 | prepared_request, 402 | api_key, 403 | api_secret) 404 | 405 | response = Session().send(request).json() 406 | 407 | # return response 408 | 409 | orders = [] 410 | for res in response: 411 | # if res['clOrdID']: 412 | 413 | direction = "LONG" if res['side'] == "Buy" else "SHORT" 414 | 415 | if res['ordStatus'] == "Filled": 416 | fill = "FILLED" 417 | elif res['ordStatus'] == "Canceled": 418 | fill = "CANCELLED" 419 | elif res['ordStatus'] == "New": 420 | fill = "NEW" 421 | elif res['ordStatus'] == "PartiallyFilled": 422 | fill = "PARTIAL" 423 | else: 424 | raise Exception(res['ordStatus']) 425 | 426 | if res['ordType'] == "Limit": 427 | order_type = "LIMIT" 428 | elif res['ordType'] == "Market": 429 | order_type = "MARKET" 430 | elif res['ordType'] == "StopLimit": 431 | order_type = "STOP_LIMIT" 432 | elif res['ordType'] == "Stop": 433 | order_type = "STOP" 434 | else: 435 | raise Exception(res['ordType']) 436 | 437 | # If "\n" in response text field, use substring after "\n". 438 | if "\n" in res['text']: 439 | text = res['text'].split("\n") 440 | metatype = text[1] 441 | elif ( 442 | res['text'] == "ENTRY" or res['text'] == "STOP" or res['text'] == 443 | "TAKE_PROFIT" or res['text'] == "FINAL_TAKE_PROFIT"): 444 | metatype = res['text'] 445 | else: 446 | # raise Exception("Order metatype error:", res['text']) 447 | print("Order metatype error:", res['text']) 448 | metatype = res['text'] 449 | 450 | orders.append({ 451 | 'order_id': res['clOrdID'], 452 | 'venue_id': res['orderID'], 453 | 'timestamp': int(parser.parse(res['timestamp']).timestamp()), 454 | 'price': res['price'], 455 | 'avg_fill_price': res['avgPx'], 456 | 'currency': res['currency'], 457 | 'venue': "BitMEX", 458 | 'symbol': res['symbol'], 459 | 'direction': direction, 460 | 'size': res['orderQty'], 461 | 'order_type': order_type, 462 | 'metatype': metatype, 463 | 'void_price': res['stopPx'], 464 | 'status': fill}) 465 | 466 | return orders 467 | 468 | # id_pairs = { 469 | # 'e5f4bbcf-ec61-c2c5-0365-c0b1d57d4e57': '64-1', 470 | # 'd76349e3-4d27-3764-7c71-c58a8b6955f3': '63-1'} 471 | 472 | # ids_for_cancellation = [ 473 | # 'e5f4bbcf-ec61-c2c5-0365-c0b1d57d4e57', 474 | # 'd76349e3-4d27-3764-7c71-c58a8b6955f3'] 475 | 476 | # print(cancel_orders(ids_for_cancellation)) 477 | 478 | 479 | portfolio = db_other['portfolio'].find_one({"id": 1}, {"_id": 0}) 480 | 481 | print(json.dumps(portfolio, indent=2)) 482 | -------------------------------------------------------------------------------- /misc testing/bitmex_auth_test.py: -------------------------------------------------------------------------------- 1 | from requests.packages.urllib3.util.retry import Retry 2 | from requests.adapters import HTTPAdapter 3 | from requests import Request, Session 4 | from requests.auth import AuthBase 5 | from urllib.parse import urlparse 6 | 7 | import hmac 8 | import hashlib 9 | import time 10 | 11 | 12 | REQUEST_TIMEOUT = 10 13 | 14 | api_key = "" 15 | api_secret = "" 16 | 17 | BASE_URL = "https://www.bitmex.com/api/v1" 18 | BASE_URL_TESTNET = "https://testnet.bitmex.com/api/v1" 19 | ORDERS_URL = "/order" 20 | 21 | 22 | def generate_request_signature(secret, request_type, url, nonce, 23 | data): 24 | """ 25 | Generate BitMEX-compatible authenticated request signature header. 26 | 27 | Args: 28 | secret: API secret key. 29 | request_type: Request type (GET, POST, etc). 30 | url: full request url. 31 | validity: seconds request will be valid for after creation. 32 | Returns: 33 | signature: hex(HMAC_SHA256(apiSecret, verb + path + expires + data) 34 | Raises: 35 | None. 36 | """ 37 | 38 | parsed_url = urlparse(url) 39 | path = parsed_url.path 40 | 41 | if parsed_url.query: 42 | path = path + '?' + parsed_url.query 43 | 44 | if isinstance(data, (bytes, bytearray)): 45 | data = data.decode('utf8') 46 | 47 | message = str(request_type).upper() + path + str(nonce) + data 48 | signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), 49 | digestmod=hashlib.sha256).hexdigest() 50 | 51 | return signature 52 | 53 | 54 | def generate_request_headers(request, api_key, api_secret): 55 | """ 56 | Add BitMEX-compatible authentication headers to a request object. 57 | 58 | Args: 59 | api_key: API key. 60 | api_secret: API secret key. 61 | request: Request object to be amended. 62 | Returns: 63 | request: Modified request object. 64 | Raises: 65 | None. 66 | """ 67 | 68 | nonce = str(int(round(time.time()) + REQUEST_TIMEOUT)) 69 | request.headers['api-expires'] = nonce 70 | request.headers['api-key'] = api_key 71 | request.headers['api-signature'] = generate_request_signature( 72 | api_secret, request.method, request.url, nonce, request.body or '') # noqa 73 | request.headers['Content-Type'] = 'application/json' 74 | request.headers['Accept'] = 'application/json' 75 | request.headers['X-Requested-With'] = 'XMLHttpRequest' 76 | 77 | return request 78 | 79 | 80 | payload = { 81 | 'symbol': "ETHUSD", 82 | 'side': 'Buy', 83 | 'orderQty': 10, 84 | 'price': None, 85 | 'stopPx': None, 86 | 'clOrdID': None, 87 | 'ordType': 'Market', 88 | 'timeInForce': 'ImmediateOrCancel', 89 | 'execInst': None, 90 | 'text': None} 91 | 92 | prepared_request = Request( 93 | 'POST', 94 | BASE_URL_TESTNET + ORDERS_URL, 95 | json=payload, 96 | params='').prepare() 97 | 98 | request = generate_request_headers( 99 | prepared_request, 100 | api_key, 101 | api_secret) 102 | 103 | retries = Retry( 104 | total=5, 105 | backoff_factor=0.25, 106 | status_forcelist=[502, 503, 504], 107 | method_whitelist=False) 108 | session = Session() 109 | session.mount('https://', HTTPAdapter(max_retries=retries)) 110 | 111 | response = session.send(request) 112 | 113 | print(response, response.text) 114 | -------------------------------------------------------------------------------- /misc testing/db_object_queries.py: -------------------------------------------------------------------------------- 1 | from pymongo import MongoClient 2 | import pandas as pd 3 | from datetime import datetime 4 | import json 5 | 6 | 7 | db_client = MongoClient('mongodb://127.0.0.1:27017/') 8 | db_other = db_client['holdings_trades_signals_master'] 9 | # coll = db_other['signals'] 10 | # coll = db_other['trades'] 11 | coll = db_other['portfolio'] 12 | 13 | # result = coll.find({}, {"_id": 0}).sort([("entry_timestamp", -1)]) # signals 14 | result = list(coll.find({}, {"_id": 0}).sort([("id", -1)])) # portfolio 15 | # result = coll.find({"trade_id": 1}, {"_id": 0}).sort([("trade_id", -1)]) # trades 16 | # coll.update_one({"trade_id": 5}, {"$set": {"consent": True}}) 17 | # result = coll.find_one({"trade_id": 1}, {"_id": 0}) # trades 18 | 19 | [print((json.dumps(i, indent=4))) for i in result] 20 | 21 | # print(json.dumps(result, indent=2)) 22 | 23 | 24 | -------------------------------------------------------------------------------- /misc testing/feature_test.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import pandas as pd 3 | import numpy as np 4 | 5 | 6 | def cluster_filter(levels: list, t: float, multipass: bool): 7 | """ 8 | Given a list of prices, identify groups of levels within t% of each other. 9 | 10 | Args: 11 | levels: list of price levels. 12 | t: tolerance, % variance between min/maxima to be considered a level. 13 | multipass: if True, run the filter for cluster sizes=3 or more. If 14 | False, filter only once (will pick up clusters size=2). 15 | Returns: 16 | 17 | Raises: 18 | 19 | """ 20 | 21 | # Identify initial level clusters (single pass). 22 | temp_levels = [] 23 | for lvl_1 in levels: 24 | for lvl_2 in levels: 25 | range_max = lvl_1 + lvl_1 * t 26 | range_min = lvl_1 - lvl_1 * t 27 | if lvl_2 >= range_min and lvl_2 <= range_max: 28 | cluster = sorted([lvl_1, lvl_2]) 29 | if lvl_2 != lvl_1: 30 | if cluster not in temp_levels: 31 | temp_levels.append(cluster) 32 | 33 | # Identify strong clusters of 3 or more levels (multipass). 34 | if multipass: 35 | flattened = [item for sublist in temp_levels for item in sublist] 36 | c_count = 0 37 | to_append = [] 38 | for cluster in temp_levels: 39 | for lvl_1 in cluster: 40 | range_max = lvl_1 + lvl_1 * t 41 | range_min = lvl_1 - lvl_1 * t 42 | for lvl_2 in flattened: 43 | if lvl_2 >= range_min and lvl_2 <= range_max: 44 | to_append.append([c_count, lvl_2]) 45 | c_count += 1 46 | 47 | # Add levels to their respective clusters and remove duplicates. 48 | for pair in to_append: 49 | temp_levels[pair[0]].append(pair[1]) 50 | temp_levels[pair[0]] = sorted(list(set(temp_levels[pair[0]]))) 51 | 52 | # Aggregate similar levels and remove temp levels. 53 | agg_levels = [(sum(i) / len(i)) for i in temp_levels] 54 | to_remove = [i for cluster in temp_levels for i in cluster] 55 | 56 | # Catch second-pass np.array > list conversion error 57 | if type(levels) != list: 58 | final_levels = [i for i in levels.tolist() if i not in to_remove] 59 | else: 60 | final_levels = [i for i in levels if i not in to_remove] 61 | 62 | # print("Levels:") 63 | # for level in levels: 64 | # print(level) 65 | 66 | # print("\nLevel clusters:") 67 | # for level in temp_levels: 68 | # print(level) 69 | 70 | # print("\nAggregate levels:") 71 | # for level in sorted(list(set(agg_levels))): 72 | # print(level) 73 | 74 | # print("\nFinal levels:") 75 | # for level in sorted(list(set(final_levels))): 76 | # print(level) 77 | 78 | return final_levels + agg_levels 79 | 80 | 81 | def sr_levels(bars, n=8, t=0.02, s=3, f=3): 82 | """ 83 | Find support and resistance levels using smoothed close price. 84 | 85 | Args: 86 | bars: OHLCV dataframe. 87 | n: bar window size. 88 | t: tolerance, % variance between min/maxima to be considered a level. 89 | s: smoothing factor. lower is more sensitive. 90 | f: number of filter passes. 91 | 92 | Returns: 93 | support: list of support levels 94 | resistance: list of resistance levels 95 | 96 | Raises: 97 | None. 98 | 99 | """ 100 | from scipy.signal import savgol_filter as smooth 101 | 102 | # Convert n to next even number. 103 | if n % 2 != 0: 104 | n += 1 105 | 106 | # Find number of bars. 107 | n_ltp = bars.close.values.shape[0] 108 | 109 | # Smooth close data. 110 | ltp_smoothed = smooth(bars.close.values, (n + 1), s) 111 | 112 | # Find delta (difference in adjacent prices). 113 | ltp_delta = np.zeros(n_ltp) 114 | ltp_delta[1:] = np.subtract(ltp_smoothed[1:], ltp_smoothed[:-1]) 115 | 116 | resistance = [] 117 | support = [] 118 | 119 | # Identify initial levels. 120 | for i in range(n_ltp - n): 121 | 122 | # Get window for current bar. 123 | window = ltp_delta[i:(i + n)] 124 | 125 | # Split window in half. 126 | first = window[:int((n / 2))] # first half 127 | last = window[int((n / 2)):] # second half 128 | 129 | # Find highs and lows for both halves of window. 130 | # First/last being higher or lower indicates asc/desc price. 131 | r_1 = np.sum(first > 0) 132 | r_2 = np.sum(last < 0) 133 | s_1 = np.sum(first < 0) 134 | s_2 = np.sum(last > 0) 135 | 136 | # Detect local maxima. If two points match, its a level. 137 | if r_1 == (n / 2) and r_2 == (n / 2): 138 | try: 139 | resistance.append(bars.close.values[i + (int((n / 2)) - 1)]) 140 | # Catch empty list error if no levels are present. 141 | except Exception as ex: 142 | pass 143 | 144 | # Detect local minima. If two points match, its a level. 145 | if s_1 == (n / 2) and s_2 == (n / 2): 146 | try: 147 | support.append(bars.close.values[i + (int((n / 2)) - 1)]) 148 | # Catch empty list error if no levels are present. 149 | except Exception as ex: 150 | pass 151 | 152 | # Filter levels f times. 153 | levels = np.sort(np.append(support, resistance)) 154 | filtered_levels = cluster_filter(levels, t, multipass=True) 155 | for i in range(f - 1): 156 | filtered_levels = cluster_filter(filtered_levels, t, multipass=True) 157 | 158 | return filtered_levels 159 | 160 | 161 | lookback = 200 162 | n = 10 163 | t = 0.02 164 | s = 3 165 | f = 3 166 | 167 | bars = pd.read_csv("XBTUSD1D.csv", delimiter=',').tail(lookback) 168 | 169 | levels = sr_levels(bars, n, t, s, f) 170 | 171 | # Plot high and low values. 172 | plt.plot(bars.high.values) 173 | plt.plot(bars.low.values) 174 | 175 | # Plot levels. 176 | for i in range(len(levels)): 177 | plt.hlines(levels[i], 0, lookback) 178 | 179 | 180 | # plt.hlines(2900, 0, 150, colors='r') 181 | 182 | 183 | plt.show() 184 | -------------------------------------------------------------------------------- /misc testing/model_test.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | timeframes = ["1D"] 5 | 6 | 7 | def required_timeframes(timeframes): 8 | """ 9 | Add the equivalent doubled timeframe for each timeframe in 10 | the given list of operating timeframes. 11 | 12 | eg. if "1H" is present, add "2H" to the list. 13 | """ 14 | 15 | to_add = [] 16 | for timeframe in timeframes: 17 | 18 | # 1Min use 3Min as the "doubled" trigger timeframe. 19 | if timeframe == "1Min": 20 | if "3Min" not in timeframes and "3Min" not in to_add: 21 | to_add.append("3Min") 22 | 23 | # 3Min use 5Min as the "doubled" trigger timeframe. 24 | elif timeframe == "3Min": 25 | if "5Min" not in timeframes and "5Min" not in to_add: 26 | to_add.append("5Min") 27 | 28 | # 5Min use 15Min as the "doubled" trigger timeframe. 29 | elif timeframe == "5Min": 30 | if "15Min" not in timeframes and "15Min" not in to_add: 31 | to_add.append("15Min") 32 | 33 | # 12H and 16H use 1D as the "doubled" trigger timeframe. 34 | elif timeframe == "12H" or timeframe == "16H": 35 | if "1D" not in timeframes and "1D" not in to_add: 36 | to_add.append("1D") 37 | 38 | # 30Min use 1H as the "doubled" trigger timeframe. 39 | elif timeframe == "30Min": 40 | if "1H" not in timeframes and "1H" not in to_add: 41 | to_add.append("1H") 42 | 43 | # All other timeframes just double the numeric value. 44 | else: 45 | num = int(''.join(filter(str.isdigit, timeframe))) 46 | code = re.findall("[a-zA-Z]+", timeframe) 47 | to_add.append((str(num * 2) + code[0])) 48 | 49 | for new_item in to_add: 50 | timeframes.append(new_item) 51 | 52 | 53 | required_timeframes(timeframes) 54 | 55 | print(timeframes) 56 | -------------------------------------------------------------------------------- /misc testing/pnl_calc_test.py: -------------------------------------------------------------------------------- 1 | from requests.packages.urllib3.util.retry import Retry 2 | from requests.adapters import HTTPAdapter 3 | from requests import Request, Session 4 | from requests.auth import AuthBase 5 | from urllib.parse import urlparse 6 | from datetime import datetime 7 | from dateutil import parser 8 | 9 | import json 10 | import hmac 11 | import hashlib 12 | import time 13 | 14 | 15 | REQUEST_TIMEOUT = 10 16 | 17 | api_key = "" 18 | api_secret = "" 19 | 20 | BASE_URL = "https://www.bitmex.com/api/v1" 21 | BASE_URL_TESTNET = "https://testnet.bitmex.com/api/v1" 22 | ORDERS_URL = "/order" 23 | TRADE_HIST_URL = "/execution/tradeHistory" 24 | 25 | retries = Retry( 26 | total=5, 27 | backoff_factor=0.25, 28 | status_forcelist=[502, 503, 504], 29 | method_whitelist=False) 30 | session = Session() 31 | session.mount('https://', HTTPAdapter(max_retries=retries)) 32 | 33 | demo_trade = { 34 | "trade_id": 1, 35 | "signal_timestamp": 1613627160, 36 | "type": "SINGLE_INSTRUMENT", 37 | "active": False, 38 | "venue_count": 1, 39 | "instrument_count": 1, 40 | "model": "EMA Cross - Testing only", 41 | "direction": "SHORT", 42 | "timeframe": "1Min", 43 | "entry_price": 52288.0, 44 | "u_pnl": 0, 45 | "r_pnl": 0, 46 | "fees": 0, 47 | "exposure": None, 48 | "venue": "BitMEX", 49 | "symbol": "XBTUSD", 50 | "position": None, 51 | "order_count": 2, 52 | "orders": { 53 | "1-1": { 54 | "trade_id": 1, 55 | "order_id": "1-1", 56 | "timestamp": None, 57 | "avg_fill_price": None, 58 | "currency": None, 59 | "venue_id": None, 60 | "venue": "BitMEX", 61 | "symbol": "XBTUSD", 62 | "direction": "SHORT", 63 | "size": 49.0, 64 | "price": 52288.0, 65 | "order_type": "MARKET", 66 | "metatype": "ENTRY", 67 | "void_price": 53333.76, 68 | "trail": False, 69 | "reduce_only": False, 70 | "post_only": False, 71 | "batch_size": 0, 72 | "status": "UNFILLED" 73 | }, 74 | "1-2": { 75 | "trade_id": 1, 76 | "order_id": "1-2", 77 | "timestamp": None, 78 | "avg_fill_price": None, 79 | "currency": None, 80 | "venue_id": None, 81 | "venue": "BitMEX", 82 | "symbol": "XBTUSD", 83 | "direction": "LONG", 84 | "size": 49.0, 85 | "price": 53333.76, 86 | "order_type": "STOP", 87 | "metatype": "STOP", 88 | "void_price": None, 89 | "trail": False, 90 | "reduce_only": True, 91 | "post_only": False, 92 | "batch_size": 0, 93 | "status": "UNFILLED" 94 | } 95 | }, 96 | "consent": True 97 | } 98 | 99 | 100 | def generate_request_signature(secret, request_type, url, nonce, 101 | data): 102 | """ 103 | Generate BitMEX-compatible authenticated request signature header. 104 | 105 | Args: 106 | secret: API secret key. 107 | request_type: Request type (GET, POST, etc). 108 | url: full request url. 109 | validity: seconds request will be valid for after creation. 110 | Returns: 111 | signature: hex(HMAC_SHA256(apiSecret, verb + path + expires + data) 112 | Raises: 113 | None. 114 | """ 115 | 116 | parsed_url = urlparse(url) 117 | path = parsed_url.path 118 | 119 | if parsed_url.query: 120 | path = path + '?' + parsed_url.query 121 | 122 | if isinstance(data, (bytes, bytearray)): 123 | data = data.decode('utf8') 124 | 125 | message = str(request_type).upper() + path + str(nonce) + data 126 | signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), 127 | digestmod=hashlib.sha256).hexdigest() 128 | 129 | return signature 130 | 131 | 132 | def generate_request_headers(request, api_key, api_secret): 133 | """ 134 | Add BitMEX-compatible authentication headers to a request object. 135 | 136 | Args: 137 | api_key: API key. 138 | api_secret: API secret key. 139 | request: Request object to be amended. 140 | Returns: 141 | request: Modified request object. 142 | Raises: 143 | None. 144 | """ 145 | 146 | nonce = str(int(round(time.time()) + REQUEST_TIMEOUT)) 147 | request.headers['api-expires'] = nonce 148 | request.headers['api-key'] = api_key 149 | request.headers['api-signature'] = generate_request_signature( 150 | api_secret, request.method, request.url, nonce, request.body or '') # noqa 151 | request.headers['Content-Type'] = 'application/json' 152 | request.headers['Accept'] = 'application/json' 153 | request.headers['X-Requested-With'] = 'XMLHttpRequest' 154 | 155 | return request 156 | 157 | 158 | def get_executions(symbol, start_timestamp=None, end_timestamp=None, count=500): 159 | 160 | # Convert epoch ts's to utc human-readable 161 | start = str(datetime.utcfromtimestamp(start_timestamp)) if start_timestamp else None 162 | end = str(datetime.utcfromtimestamp(end_timestamp)) if end_timestamp else None 163 | 164 | payload = { 165 | 'symbol': symbol, 166 | 'count': count, 167 | 'startTime': start, 168 | 'endTime': end, 169 | 'reverse': True} 170 | 171 | prepared_request = Request( 172 | 'GET', 173 | BASE_URL_TESTNET + TRADE_HIST_URL, 174 | json=payload, 175 | params='').prepare() 176 | 177 | request = generate_request_headers( 178 | prepared_request, 179 | api_key, 180 | api_secret) 181 | 182 | response = session.send(request).json() 183 | 184 | executions = [] 185 | 186 | for res in response: 187 | 188 | fee_type = "TAKER" if res['lastLiquidityInd'] == "RemovedLiquidity" else "MAKER" 189 | direction = "LONG" if res['side'] == "Buy" else "SHORT" 190 | 191 | if res['ordStatus'] == "Filled": 192 | fill = "FILLED" 193 | elif res['ordStatus'] == "Canceled": 194 | fill = "CANCELLED" 195 | elif res['ordStatus'] == "New": 196 | fill = "NEW" 197 | elif res['ordStatus'] == "PartiallyFilled": 198 | fill = "PARTIAL" 199 | else: 200 | raise Exception(res['ordStatus']) 201 | 202 | if res['ordType'] == "Limit": 203 | order_type = "LIMIT" 204 | elif res['ordType'] == "Market": 205 | order_type = "MARKET" 206 | elif res['ordType'] == "StopLimit": 207 | order_type = "STOP_LIMIT" 208 | elif res['ordType'] == "Stop": 209 | order_type = "STOP" 210 | else: 211 | raise Exception(res['ordType']) 212 | 213 | executions.append({ 214 | 'order_id': res['clOrdID'], 215 | 'venue_id': res['orderID'], 216 | 'timestamp': int(parser.parse(res['timestamp']).timestamp()), 217 | 'avg_exc_price': res['avgPx'], 218 | 'currency': res['currency'], 219 | 'symbol': res['symbol'], 220 | 'direction': direction, 221 | 'size': res['lastQty'], 222 | 'order_type': order_type, 223 | 'fee_type': fee_type, 224 | 'fee_amt': res['commission'], 225 | 'total_fee': res['lastQty'] * res['commission'], 226 | 'status': fill}) 227 | 228 | return executions 229 | 230 | 231 | def calculate_pnl_by_trade(trade_id): 232 | 233 | trade = demo_trade 234 | t_id = str(trade_id) 235 | 236 | # Get order executions for trade in period from trade signal to now. 237 | execs = get_executions(trade['symbol'], trade['signal_timestamp'], int(datetime.now().timestamp())) 238 | 239 | # Handle two-order trades. Single exit, single entry. 240 | if len(trade['orders']) == 2: 241 | entry_oid = trade['orders'][t_id + "-1"]['order_id'] 242 | exit_oid = trade['orders'][t_id + "-2"]['order_id'] 243 | 244 | # TODO: Handle trade types with more than 2 orders 245 | elif len(trade['orders']) >= 3: 246 | entry_oid = None 247 | exit_oid = None 248 | # tp_oids = [] 249 | 250 | # Entry executions will match direction of trade and bear the entry order id. 251 | entries = [i for i in execs if i['direction'] == trade['direction'] and i['order_id'] == entry_oid] 252 | 253 | # API-submitted exit executions should be the reverse 254 | exits = [i for i in execs if i['direction'] != trade['direction'] and i['order_id'] == exit_oid] 255 | manual_exit = False 256 | 257 | # Exit orders placed manually wont bear the order id and cant be evaluated with certainty 258 | # if there were multiple trades with executions in the same period as the current trade. 259 | # If manual exit, notify user if the exit total is differnt to entry total. 260 | if not exits: 261 | exits = [i for i in execs if i['direction'] != trade['direction']] 262 | manual_exit = True 263 | 264 | if entries and exits: 265 | avg_entry = sum(i['avg_exc_price'] for i in entries) / len(entries) 266 | avg_exit = (sum(i['avg_exc_price'] for i in exits) / len(exits)) + 5000 267 | fees = sum(i['total_fee'] for i in (entries + exits)) 268 | percent_change = abs((avg_entry - avg_exit) / avg_entry) * 100 269 | abs_pnl = abs((trade['orders'][t_id + "-1"]['size'] / 100) * percent_change) - fees 270 | if trade['direction'] == "LONG": 271 | final_pnl = abs_pnl if avg_exit > avg_entry + fees else -abs_pnl 272 | 273 | elif trade['direction'] == "SHORT": 274 | final_pnl = abs_pnl if avg_exit < avg_entry - fees else -abs_pnl 275 | 276 | print(avg_entry, avg_exit) 277 | print(percent_change) 278 | print(final_pnl) 279 | 280 | # No matching entry or exit executions exist 281 | else: 282 | pass 283 | 284 | 285 | calculate_pnl_by_trade(1) 286 | -------------------------------------------------------------------------------- /misc testing/portfolio_analytics_test.py: -------------------------------------------------------------------------------- 1 | from pymongo import MongoClient 2 | import pandas as pd 3 | from datetime import datetime 4 | import json 5 | 6 | 7 | db_client = MongoClient('mongodb://127.0.0.1:27017/') 8 | db_other = db_client['holdings_trades_signals_master'] 9 | # coll = db_other['signals'] 10 | # coll = db_other['trades'] 11 | coll = db_other['portfolio'] 12 | 13 | 14 | pf = coll.find_one({}, {"_id": 0}) # portfolio 15 | 16 | # orders = pf['trades'][trade_id]['orders'].values() 17 | # entry, stop = None, None 18 | # exit = pf['trades'][trade_id]['exit_price'] 19 | 20 | 21 | balance_history = [i for i in list(pf['balance_history'].values())[1:]] 22 | if balance_history: 23 | 24 | winners_r, losers_r, total_r = [], [], [] 25 | 26 | for transaction in balance_history: 27 | trade = pf['trades'][transaction['trade_id']] 28 | entry = trade['position']['avg_entry_price'] 29 | stop = list(trade['orders'].values())[-1]['price'] 30 | exit = trade["exit_price"] 31 | rr = (exit - entry) / (entry - stop) 32 | 33 | total_r.append(rr) 34 | 35 | if transaction['amt'] > 0: 36 | winners_r.append(rr) 37 | 38 | elif transaction['amt'] < 0: 39 | losers_r.append(rr) 40 | 41 | # 'avg_r_per_trade' 42 | pf['avg_r_per_trade'] = round(sum(total_r) / len(total_r), 2) 43 | 44 | # 'avg_r_per_winner' 45 | pf['avg_r_per_winner'] = round(sum(winners_r) / len(winners_r), 2) 46 | 47 | # 'avg_r_per_loser' 48 | pf['avg_r_per_loser'] = sround(sum(losers_r) / len(losers_r), 2) 49 | 50 | # 'win_loss_ratio' 51 | if pf['total_winning_trades'] and pf['total_losing_trades']: 52 | pf['win_loss_ratio'] = pf['total_winning_trades'] / pf['total_losing_trades'] 53 | elif pf['total_winning_trades'] and not pf['total_losing_trades']: 54 | pf['win_loss_ratio'] = pf['total_winning_trades'] 55 | 56 | # 'gain_to_pain_ratio' 57 | # TODO 58 | 59 | print(pf['avg_r_per_trade'], pf['avg_r_per_winner'], pf['avg_r_per_loser']) 60 | 61 | 62 | # print(json.dumps(pf['trades'][trade_id], indent=2)) -------------------------------------------------------------------------------- /misc testing/simple_api_test.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | headers = { 4 | 'Content-Type': 'application/x-www-form-urlencoded', 5 | 'Accept': 'application/json', 6 | 'X-Requested-With': 'XMLHttpRequest', 7 | } 8 | 9 | data = { 10 | 'symbol': 'XBTUSD', 11 | 'side': 'Buy', 12 | 'orderQty': '1000', 13 | 'price': '8850', 14 | 'stopPx': 'null', 15 | 'clOrdID': 'null', 16 | 'ordType': 'Limit', 17 | 'timeInForce': 'null', 18 | 'execInst': 'null', 19 | 'text': 'null' 20 | } 21 | 22 | response = requests.post('https://testnet.bitmex.com/api/v1/order', headers=headers, data=data) 23 | print(response.content) -------------------------------------------------------------------------------- /misc testing/snapshot_image_test.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-brez/trading-server/d3b0405c62cfa28b48cca0f3e71b66fba1db4533/misc testing/snapshot_image_test.py -------------------------------------------------------------------------------- /misc testing/static_image_test.py: -------------------------------------------------------------------------------- 1 | from datetime import timezone, datetime, timedelta 2 | from pymongo import MongoClient, errors 3 | from requests import Request, Session 4 | from requests.auth import AuthBase 5 | from urllib.parse import urlparse 6 | 7 | import mplfinance as mpl 8 | from io import BytesIO 9 | from PIL import Image, ImageGrab, ImageDraw 10 | import IPython.display as IPydisplay 11 | 12 | from dateutil import parser 13 | import pandas as pd 14 | import numpy as np 15 | 16 | 17 | import traceback 18 | import requests 19 | import hashlib 20 | import json 21 | import hmac 22 | import time 23 | import os 24 | 25 | 26 | DB_URL = 'mongodb://127.0.0.1:27017/' 27 | DB_PRICES = 'asset_price_master' 28 | DB_OTHER = 'holdings_trades_signals_master' 29 | DB_TIMEOUT_MS = 10 30 | 31 | trade = { 32 | "trade_id": 91, 33 | "signal_timestamp": 1592390100, 34 | "type": "SINGLE_INSTRUMENT", 35 | "active": False, 36 | "venue_count": 1, 37 | "instrument_count": 1, 38 | "model": "EMA Cross - Testing only", 39 | "direction": "SHORT", 40 | "u_pnl": 0, 41 | "r_pnl": 0, 42 | "fees": 0, 43 | "timeframe": "1Min", 44 | "entry_price": 9481.5, 45 | "exposure": None, 46 | "venue": "BitMEX", 47 | "symbol": "XBTUSD", 48 | "position": None, 49 | "order_count": 2, 50 | "orders": { 51 | "91-1": { 52 | "trade_id": 91, 53 | "order_id": "91-1", 54 | "timestamp": None, 55 | "avg_fill_price": None, 56 | "currency": None, 57 | "venue_id": None, 58 | "venue": "BitMEX", 59 | "symbol": "XBTUSD", 60 | "direction": "SHORT", 61 | "size": 100.0, 62 | "price": 9481.5, 63 | "order_type": "MARKET", 64 | "metatype": "ENTRY", 65 | "void_price": 9671.13, 66 | "trail": False, 67 | "reduce_only": False, 68 | "post_only": False, 69 | "batch_size": 0, 70 | "status": "UNFILLED" 71 | }, 72 | "91-2": { 73 | "trade_id": 91, 74 | "order_id": "91-2", 75 | "timestamp": None, 76 | "avg_fill_price": None, 77 | "currency": None, 78 | "venue_id": None, 79 | "venue": "BitMEX", 80 | "symbol": "XBTUSD", 81 | "direction": "LONG", 82 | "size": 100.0, 83 | "price": 9671.13, 84 | "order_type": "STOP", 85 | "metatype": "STOP", 86 | "void_price": None, 87 | "trail": False, 88 | "reduce_only": True, 89 | "post_only": False, 90 | "batch_size": 0, 91 | "status": "UNFILLED" 92 | } 93 | } 94 | } 95 | 96 | SNAPSHOT_SIZE = 50 97 | 98 | db_client = MongoClient( 99 | DB_URL, 100 | serverSelectionTimeoutMS=DB_TIMEOUT_MS) 101 | db_prices = db_client[DB_PRICES] 102 | db_other = db_client[DB_OTHER] 103 | 104 | df = pd.read_csv('op_data.csv') 105 | 106 | # Format time column. 107 | df['timestamp'] = df['timestamp'].apply( 108 | lambda x: parser.parse(x)) 109 | 110 | # Set index 111 | df.set_index("timestamp", inplace=True) 112 | 113 | # Pad any null bars forward. 114 | df.fillna(method="pad", inplace=True) 115 | 116 | # Rename columns for mpl. 117 | df.rename({'open': 'Open', 'high': 'High', 'low': 'Low', 118 | 'close': 'Close', 'volume': 'Volume'}, axis=1, inplace=True) 119 | 120 | # Use only the last x bars for the image. 121 | df = df.tail(SNAPSHOT_SIZE) 122 | 123 | entry = datetime.utcfromtimestamp(trade['signal_timestamp']) 124 | 125 | # Add entry marker 126 | entry_marker = [np.nan for i in range(SNAPSHOT_SIZE)] 127 | entry_marker[-1] = trade['entry_price'] 128 | 129 | print(df) 130 | 131 | 132 | def create_addplots(df, mpl): 133 | """ 134 | """ 135 | 136 | adps, hlines = [], {'hlines': [], 'colors': [], 'linestyle': '--', 137 | 'linewidths': 0.75} 138 | 139 | # Add technical feature data (indicator values, etc). 140 | for col in list(df): 141 | if ( 142 | col != "Open" and col != "High" and col != "Low" 143 | and col != "Close" and col != "Volume"): 144 | adps.append(mpl.make_addplot(df[col])) 145 | 146 | # Add entry marker 147 | color = 'limegreen' if trade['direction'] == "LONG" else 'crimson' 148 | adps.append(mpl.make_addplot( 149 | entry_marker, type='scatter', markersize=200, marker='.', color=color)) 150 | 151 | # Plotting Stop and TP levels cause incorrect scaling when stop/TP are 152 | # far away from entry. Fix later. Not urgent or required 153 | 154 | # # Add stop and TP levels. 155 | # o_ids = [i for i in trade['orders'].keys()] 156 | # for o_id in o_ids: 157 | # if trade['orders'][o_id]['metatype'] == "STOP": 158 | # hlines['hlines'].append(trade['orders'][o_id]['price']) 159 | # hlines['colors'].append('crimson') 160 | 161 | # elif trade['orders'][o_id]['metatype'] == "TAKE_PROFIT": 162 | # hlines['hlines'].append(trade['orders'][o_id]['price']) 163 | # hlines['colors'].append('limegreen') 164 | 165 | # elif trade['orders'][o_id]['metatype'] == "FINAL_TAKE_PROFIT": 166 | # hlines['hlines'].append(trade['orders'][o_id]['price']) 167 | # hlines['colors'].append('limegreen') 168 | 169 | # # Add an invisible hline to re-scale, in case stop/TP is far away. 170 | # difference = max([abs(trade['entry_price'] - i) for i in hlines['hlines']]) 171 | # if max(hlines['hlines']) > difference: 172 | # hlines['hlines'].append(trade['entry_price'] - difference) 173 | # hlines['colors'].append('white') 174 | # elif max(hlines['hlines']) < differe7nce: 175 | # hlines['hlines'].append(trade['entry_price'] + difference) 176 | # hlines['colors'].append('white') 177 | 178 | return adps, hlines 179 | 180 | 181 | adp, hlines = create_addplots(df, mpl) 182 | style = mpl.make_mpf_style(gridstyle='') 183 | 184 | filename = str(trade['trade_id']) + "_" + trade['model'] + "_" + trade['timeframe'] 185 | 186 | imgbuffer = BytesIO() 187 | 188 | plot = mpl.plot(df, type='candle', addplot=adp, style=style, hlines=hlines, 189 | title="\n" + trade['model'] + ", " + trade['timeframe'], 190 | datetime_format='%d-%m %H:%M', figscale=1, savefig=imgbuffer, 191 | tight_layout=False) 192 | 193 | img = Image.open(imgbuffer).show() 194 | -------------------------------------------------------------------------------- /misc testing/strategy_test.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime, timedelta 2 | from model import TrendFollowing 3 | from itertools import groupby, count 4 | from pymongo import MongoClient, errors 5 | import pandas as pd 6 | import pymongo 7 | import pandas as pd 8 | import calendar 9 | from broker import Broker 10 | from time import sleep 11 | import time 12 | import logging 13 | import queue 14 | import datetime 15 | 16 | 17 | class Exchange: 18 | 19 | def get_name(self): 20 | return "BitMEX" 21 | 22 | def get_symbols(self): 23 | return ["XBTUSD", "ETHUSD"] 24 | 25 | 26 | class strategy_test: 27 | 28 | db_client = MongoClient('mongodb://127.0.0.1:27017/') 29 | db = db_client['asset_price_master'] 30 | coll = db['BitMEX'] 31 | 32 | ALL_TIMEFRAMES = [ 33 | "1Min", "3Min", "5Min", "15Min", "30Min", "1H", "2H", "3H", "4H", 34 | "6H", "8H", "12H", "1D", "2D", "3D", "7D", "14D", "28D"] 35 | 36 | RESAMPLE_KEY = { 37 | 'open': 'first', 'high': 'max', 'low': 'min', 38 | 'close': 'last', 'volume': 'sum'} 39 | 40 | MINUTE_TIMEFRAMES = [1, 3, 5, 15, 30] 41 | HOUR_TIMEFRAMES = [1, 2, 3, 4, 6, 8, 12] 42 | DAY_TIMEFRAMES = [1, 2, 3, 7, 14, 28] 43 | 44 | # Timeframe strings: minute int values. 45 | TF_MINS = { 46 | "1Min": 1, "3Min": 3, "5Min": 5, "15Min": 15, "30Min": 30, "1H": 60, 47 | "2H": 120, 48 | "3H": 180, "4H": 240, "6H": 360, "8H": 480, "12H": 720, "1D": 1440, 49 | "2D": 2880, "3D": 4320, "7D": 10080, "14D": 20160, "28D": 40320} 50 | 51 | def __init__(self): 52 | self.logger = self.setup_logger() 53 | self.exchanges = [Exchange()] 54 | 55 | 56 | def setup_logger(self): 57 | """Create and configure logger""" 58 | 59 | logger = logging.getLogger() 60 | logger.setLevel(logging.DEBUG) 61 | ch = logging.StreamHandler() 62 | formatter = logging.Formatter( 63 | "%(asctime)s:%(levelname)s:%(module)s - %(message)s") 64 | ch.setFormatter(formatter) 65 | logger.addHandler(ch) 66 | 67 | # supress requests/urlib3/connectionpool messages 68 | # logging.DEBUG produces messages with each https request... 69 | logging.getLogger("urllib3").propagate = False 70 | requests_log = logging.getLogger("requests") 71 | requests_log.addHandler(logging.NullHandler()) 72 | requests_log.propagate = False 73 | 74 | return logger 75 | 76 | def get_relevant_timeframes(self, time): 77 | """Return a list of timeframes relevant to the just-elapsed period. 78 | E.g if time has just struck UTC 10:30am the list will contain "1Min", 79 | "3Min", "5Min", "m15" and "30Min" strings. The first minute of a new 80 | day or week will add daily/weekly/monthly timeframe strings. Timeframes 81 | in use are 1, 3, 5, 15 and 30 mins, 1, 2, 3, 4, 6, 8 and 12 hours, 1, 2 82 | and 3 days, weekly and monthly.""" 83 | 84 | # check against the previous minute - the just-elapsed period. 85 | ts = time 86 | if type(ts) is not datetime.datetime: 87 | ts = datetime.datetime.utcfromtimestamp(time) 88 | timestamp = ts - timedelta(hours=0, minutes=1) 89 | timeframes = [] 90 | 91 | for i in self.MINUTE_TIMEFRAMES: 92 | self.minute_timeframe(i, timestamp, timeframes) 93 | 94 | for i in self.HOUR_TIMEFRAMES: 95 | self.hour_timeframe(i, timestamp, timeframes) 96 | 97 | for i in self.DAY_TIMEFRAMES: 98 | self.day_timeframe(i, timestamp, timeframes) 99 | 100 | if (timestamp.minute == 0 and timestamp.hour == 0 and 101 | calendar.day_name[date.today().weekday()] == "Monday"): 102 | timeframes.append("1w") 103 | 104 | return timeframes 105 | 106 | def minute_timeframe(self, minutes, timestamp, timeframes): 107 | for i in range(0, 60, minutes): 108 | if timestamp.minute == i: 109 | timeframes.append(f"{minutes}Min") 110 | print("minute tf added:", f"{minutes}Min") 111 | 112 | def hour_timeframe(self, hours, timestamp, timeframes): 113 | if timestamp.minute == 0 and timestamp.hour % hours == 0: 114 | timeframes.append(f"{hours}h") 115 | print("hour tf added:", f"{hours}Min") 116 | 117 | def day_timeframe(self, days, timestamp, timeframes): 118 | if (timestamp.minute == 0 and timestamp.hour == 0 and 119 | timestamp.day % days == 0): 120 | timeframes.append(f"{days}d") 121 | print("day tf added:", f"{days}Min") 122 | 123 | def load_data(self, exchange): 124 | """Create and return a dictionary of dataframes for all symbols and 125 | timeframes for the given exchange.""" 126 | 127 | dicts = {} 128 | for symbol in exchange.get_symbols(): 129 | dicts[symbol] = { 130 | tf: self.build_dataframe( 131 | exchange, symbol, tf) for tf in self.ALL_TIMEFRAMES} 132 | return dicts 133 | 134 | def build_dataframe(self, exc, sym, tf, lookback=5): 135 | """Return a dataframe of size lookback for the given symbol (sym), 136 | exchange (exc) and timeframe (tf). 137 | 138 | Lookback is the number of previous bars required by a model to perform 139 | to perform its analysis. E.g for a dataframe with tf = 4h, lookback = 140 | 50, we will need to fetch and resample 4*60*50 1 min bars (12000 bars) 141 | into 50 4h bars.""" 142 | 143 | # Find the total number of 1min bars needed using TFM dict. 144 | size = self.TF_MINS[tf] * lookback 145 | 146 | # Use a projection to remove mongo "_id" field and symbol. 147 | result = self.coll.find( 148 | {"symbol": sym}, { 149 | "_id": 0, "symbol": 0}).limit( 150 | size).sort([("timestamp", -1)]) 151 | 152 | # Pass cursor to DataFrame, format time and set index 153 | df = pd.DataFrame(result) 154 | df['timestamp'] = df['timestamp'].apply( 155 | lambda x: datetime.datetime.fromtimestamp(x)) 156 | df.set_index("timestamp", inplace=True) 157 | 158 | # Downsample 1 min data to target timeframe 159 | resampled_df = pd.DataFrame() 160 | try: 161 | resampled_df = (df.resample(tf).agg(self.RESAMPLE_KEY)) 162 | except Exception as e: 163 | print(e) 164 | print(resampled_df.sort_values(by="timestamp", ascending=False)) 165 | return resampled_df 166 | 167 | 168 | strategy = strategy_test() 169 | print(strategy.data["BitMEX"]['XBTUSD']) 170 | print(strategy.data["BitMEX"]['ETHUSD']) 171 | 172 | 173 | # print(strategy.get_relevant_timeframes(1567871460)) 174 | -------------------------------------------------------------------------------- /misc testing/tick_parse_test.py: -------------------------------------------------------------------------------- 1 | from datetime import timezone, datetime, timedelta 2 | from bitmex_ws import Bitmex_WS 3 | from dateutil.tz import gettz 4 | from dateutil import parser 5 | from time import sleep 6 | import traceback 7 | import requests 8 | import logging 9 | import sys 10 | 11 | 12 | # For debugging/testimg ing parse_ticks() using bitmex_WS. 13 | 14 | logger = logging.getLogger() 15 | logger.setLevel(logging.DEBUG) 16 | ch = logging.StreamHandler() 17 | formatter = logging.Formatter( 18 | "%(asctime)s:%(levelname)s:%(module)s - %(message)s") 19 | ch.setFormatter(formatter) 20 | logger.addHandler(ch) 21 | logging.getLogger("urllib3").propagate = False 22 | requests_log = logging.getLogger("requests") 23 | requests_log.addHandler(logging.NullHandler()) 24 | requests_log.propagate = False 25 | 26 | BASE_URL = "https://www.bitmex.com/api/v1" 27 | BARS_URL = "/trade/bucketed?binSize=" 28 | TICKS_URL = "/trade?symbol=" 29 | 30 | WS_URL = "wss://www.bitmex.com/realtime" 31 | symbols = ["XBTUSD", "ETHUSD"] 32 | channels = ["trade"] 33 | api_key = None 34 | api_secret = None 35 | 36 | ws = Bitmex_WS( 37 | logger, symbols, channels, WS_URL, 38 | api_key, api_secret) 39 | 40 | if not ws.ws.sock.connected: 41 | logger.debug("Failed to to connect to BitMEX websocket.") 42 | 43 | 44 | def get_recent_bar(timeframe, symbol, n=1): 45 | """ Return n recent 1-min bars of desired timeframe and symbol. """ 46 | 47 | sleep(0.5) 48 | payload = str( 49 | BASE_URL + BARS_URL + timeframe + "&partial=false&symbol=" + 50 | symbol + "&count=" + str(n) + "&reverse=true") 51 | 52 | # print(payload)nnnnnn 53 | 54 | result = requests.get(payload).json() 55 | 56 | bars = [] 57 | for i in result: 58 | bars.append({ 59 | 'symbol': symbol, 60 | 'timestamp': i['timestamp'], 61 | 'open': i['open'], 62 | 'high': i['high'], 63 | 'low': i['low'], 64 | 'close': i['close'], 65 | 'volume': i['volume']}) 66 | return bars 67 | 68 | 69 | def seconds_til_next_minute(): 70 | """ Return number of seconds to next minute.""" 71 | 72 | now = datetime.utcnow().second 73 | delay = 60 - now 74 | return delay 75 | 76 | 77 | def previous_minute(): 78 | """ Return the previous minute UTC ms epoch timestamp.""" 79 | 80 | d1 = datetime.now().second 81 | d2 = datetime.now().microsecond 82 | timestamp = datetime.now() - timedelta( 83 | minutes=1, seconds=d1, microseconds=d2) 84 | 85 | # convert to epoch 86 | timestamp = int(timestamp.timestamp()) 87 | 88 | # Replace final digit with zero, can be 1 or more during a slow cycle. 89 | timestamp_str = list(str(timestamp)) 90 | timestamp_str[len(timestamp_str) - 1] = "0" 91 | timestamp = int(''.join(timestamp_str)) 92 | 93 | return timestamp 94 | 95 | 96 | def build_OHLCV(ticks: list, symbol: str, close_as_open=True): 97 | """ 98 | Args: 99 | ticks: A list of ticks to aggregate. Assumes the list's first tick 100 | is from the previous minute, this tick is used for open price. 101 | symbol: Ticker code. 102 | close_as_open: If true, the first tick in arg "ticks" must be the final 103 | tick from the previous minute, to be used for bar open price, 104 | resulting in no gaps between bars (some exchanges follow this 105 | practice as standard, some dont). If false, use arg "ticks" first 106 | tick as the open price. 107 | 108 | Note: Some venues use a 1 min offset for bar timestamps. Tradingview 109 | bars are timestamped 1 minute behind bitmex, for example. 110 | 111 | Returns: 112 | 1-minute OHLCV bar (dict). 113 | 114 | Raises: 115 | Tick data timestamp mismatch error. 116 | """ 117 | 118 | if ticks: 119 | 120 | if close_as_open: 121 | 122 | # Convert incoming timestamp format if required. 123 | if type(ticks[0]['timestamp']) is not datetime: 124 | median = parser.parse( 125 | ticks[int((len(ticks) / 2))]['timestamp']) 126 | first = parser.parse(ticks[0]['timestamp']) 127 | else: 128 | median = ticks[int((len(ticks) / 2))]['timestamp'] 129 | first = ticks[0]['timestamp'] 130 | 131 | # This should be the most common case if close_as_open=True. 132 | # Dont include the first tick for volume and price calc. 133 | if first.minute == median.minute - 1: 134 | volume = sum(i['size'] for i in ticks) - ticks[0]['size'] 135 | prices = [i['price'] for i in ticks] 136 | prices.pop(0) 137 | 138 | # If the timestamps are same, may mean there were no early 139 | # trades, proceed as though close_as_open=False 140 | elif first.minute == median.minute: 141 | volume = sum(i['size'] for i in ticks) 142 | prices = [i['price'] for i in ticks] 143 | 144 | # There's a timing/data problem is neither case above is true. 145 | else: 146 | raise Exception( 147 | "Tick data timestamp error: timestamp mismatch." + 148 | "\nFirst tick minute: " + str(first) + 149 | "\nMedian tick minute: " + str(median)) 150 | 151 | elif not close_as_open or close_as_open is False: 152 | volume = sum(i['size'] for i in ticks) 153 | prices = [i['price'] for i in ticks] 154 | 155 | high_price = max(prices) if len(prices) >= 1 else None 156 | low_price = min(prices) if len(prices) >= 1 else None 157 | open_price = ticks[0]['price'] if len(prices) >= 1 else None 158 | close_price = ticks[-1]['price'] if len(prices) >= 1 else None 159 | 160 | bar = {'symbol': symbol, 161 | 'timestamp': previous_minute() + 60, 162 | 'open': open_price, 163 | 'high': high_price, 164 | 'low': low_price, 165 | 'close': close_price, 166 | 'volume': volume} 167 | return bar 168 | 169 | elif ticks is None or not ticks: 170 | bar = {'symbol': symbol, 171 | 'timestamp': previous_minute() + 60, 172 | 'open': None, 173 | 'high': None, 174 | 'low': None, 175 | 'close': None, 176 | 'volume': 0} 177 | return bar 178 | 179 | def parse_ticks(): 180 | if not ws.ws: 181 | logger.debug("BitMEX websocket disconnected.") 182 | else: 183 | all_ticks = ws.get_ticks() 184 | target_minute = datetime.now().minute - 1 185 | ticks_target_minute = [] 186 | tcount = 0 187 | 188 | # search from end of tick list to grab newest ticks first 189 | for i in reversed(all_ticks): 190 | try: 191 | ts = i['timestamp'] 192 | if type(ts) is not datetime: 193 | ts = parser.parse(ts) 194 | except Exception: 195 | logger.debug(traceback.format_exc()) 196 | # scrape prev minutes ticks 197 | if ts.minute == target_minute: 198 | ticks_target_minute.append(i) 199 | ticks_target_minute[tcount]['timestamp'] = ts 200 | tcount += 1 201 | # store the previous-to-target bar's last 202 | # traded price to use as the open price for target bar 203 | if ts.minute == target_minute - 1: 204 | ticks_target_minute.append(i) 205 | ticks_target_minute[tcount]['timestamp'] = ts 206 | break 207 | 208 | ticks_target_minute.reverse() 209 | 210 | # group ticks by symbol 211 | ticks = {i: [] for i in symbols} 212 | for tick in ticks_target_minute: 213 | ticks[tick['symbol']].append(tick) 214 | 215 | # build bars from ticks 216 | bars = {i: [] for i in symbols} 217 | for symbol in symbols: 218 | bar = build_OHLCV(ticks[symbol], symbol) 219 | bars[symbol].append(bar) 220 | 221 | return bars, ticks 222 | 223 | 224 | def get_recent_ticks(symbol, n=1): 225 | """ 226 | Args: 227 | symbol: 228 | n: 229 | 230 | Returns: 231 | List containing n minutes of recent ticks for the desired symbol. 232 | 233 | Raises: 234 | Tick data timestamp mismatch error. 235 | """ 236 | 237 | # find difference between start and end of period 238 | delta = n * 60 239 | 240 | # find start timestamp and convert to ISO1806 241 | start_epoch = previous_minute() + 60 - delta 242 | start_iso = datetime.utcfromtimestamp(start_epoch).isoformat() 243 | 244 | # find end timestamp and convert to ISO1806 245 | end_epoch = previous_minute() + 60 246 | end_iso = datetime.utcfromtimestamp(end_epoch).isoformat() 247 | 248 | # initial poll 249 | sleep(1) 250 | payload = str( 251 | BASE_URL + TICKS_URL + symbol + "&count=" + 252 | "1000&reverse=false&startTime=" + start_iso + "&endTime" + end_iso) 253 | # print(payload) 254 | 255 | # print("Starting timestamp", start_iso) 256 | # print("End timestamp ", end_iso) 257 | ticks = [] 258 | initial_result = requests.get(payload).json() 259 | for tick in initial_result: 260 | ticks.append(tick) 261 | 262 | # if 1000 ticks in result (max size), keep polling until 263 | # we get a response with length <1000 264 | if len(initial_result) == 1000: 265 | print("Over 1000 ticks exist in the previous minute.") 266 | 267 | maxed_out = True 268 | while maxed_out: 269 | 270 | # Dont use endTime as it seems to cut off the final few ticks. 271 | payload = str( 272 | BASE_URL + TICKS_URL + symbol + "&count=" + 273 | "1000&reverse=false&startTime=" + ticks[-1]['timestamp']) 274 | 275 | interim_result = requests.get(payload).json() 276 | for tick in interim_result: 277 | ticks.append(tick) 278 | 279 | if len(interim_result) != 1000: 280 | maxed_out = False 281 | 282 | # check median tick timestamp matches start_iso 283 | median_dt = parser.parse(ticks[int((len(ticks) / 2))]['timestamp']) 284 | match_dt = parser.parse(start_iso) 285 | if median_dt.minute != match_dt.minute: 286 | raise Exception("Tick data timestamp error: timestamp mismatch.") 287 | 288 | # populate list with matching-timestamped ticks only 289 | final_ticks = [ 290 | i for i in ticks if parser.parse( 291 | i['timestamp']).minute == match_dt.minute] 292 | 293 | return final_ticks 294 | 295 | 296 | count = 0 297 | sleep(seconds_til_next_minute()) 298 | while True: 299 | if count == 0 or count % 3: 300 | print("Waiting for full minute to elapse..") 301 | sleep(seconds_til_next_minute()) 302 | 303 | bars, pticks = parse_ticks() 304 | 305 | # print("Parsed ticks:") 306 | # for tick in pticks["XBTUSD"]: 307 | # print( 308 | # tick['timestamp'], tick['side'], 309 | # tick['size'], tick['price']) 310 | 311 | print("Parsed bars:") 312 | print(bars["XBTUSD"]) 313 | # print(bars["ETHUSD"]) 314 | 315 | # stats 316 | # print( 317 | # "Open:", pticks["XBTUSD"][0]['price']) 318 | # "High:", 319 | # "Low:", 320 | # "Close:", 321 | # "Volume:", ) 322 | 323 | # print("\nReference ticks:") 324 | ticks = get_recent_ticks("XBTUSD") 325 | # for tick in ticks: 326 | # print( 327 | # tick['timestamp'], tick['side'], tick['size'], 328 | # tick['price']) 329 | 330 | print("Reference bars:") 331 | print(get_recent_bar("1m", "XBTUSD")) 332 | # print(get_recent_bars("1m", "ETHUSD", 1)) 333 | 334 | # stats 335 | 336 | count += 1 337 | if count == 1: 338 | sys.exit(0) 339 | -------------------------------------------------------------------------------- /misc testing/timestamp_test.py: -------------------------------------------------------------------------------- 1 | from dateutil import parser 2 | from dateutil.tz import gettz 3 | from datetime import datetime, timezone 4 | 5 | 6 | bar = {"timestamp":"2020-02-07T12:42:00.000Z","symbol":"XBTUSD","open":9754.5,"high":9754.5,"low":9748,"close":9748,"trades":944,"volume":5269528,"vwap":9749.4394,"lastSize":17287,"turnover":54049961870,"homeNotional":540.4996187,"foreignNotional":5269528} 7 | 8 | final_datetime = parser.parse(bar['timestamp']) 9 | final_timestamp = final_datetime.replace(tzinfo=timezone.utc).timestamp() 10 | 11 | print(final_timestamp) 12 | 13 | print((final_datetime - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds()) -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from abc import ABC, abstractmethod 13 | from features import Features as f 14 | from event_types import SignalEvent 15 | import traceback 16 | import sys 17 | 18 | 19 | class Model(ABC): 20 | """ 21 | Base class for strategy models. 22 | """ 23 | 24 | def __init__(self): 25 | super().__init__() 26 | 27 | def get_operating_timeframes(self): 28 | """ 29 | Return list of operating timeframes. 30 | """ 31 | 32 | return self.operating_timeframes 33 | 34 | def get_lookback(self): 35 | """ 36 | Return model's required lookback (number of 37 | previous bars to analyse) for a given timeframe. 38 | """ 39 | 40 | return self.lookback 41 | 42 | def get_features(self): 43 | """ 44 | Return list of features in use by the model. 45 | """ 46 | 47 | return self.features 48 | 49 | def get_name(self): 50 | """ 51 | Return model name. 52 | """ 53 | 54 | return self.name 55 | 56 | def get_instruments(self): 57 | """ 58 | Return dict of instrument amd venues the model is applicable to. 59 | """ 60 | 61 | return self.instruments 62 | 63 | @abstractmethod 64 | def run(self): 65 | """ 66 | Run model with given data. 67 | """ 68 | 69 | @abstractmethod 70 | def get_required_timeframes(self, timeframes, result=False): 71 | """ 72 | Given a list of operating timeframes, append additional required 73 | timeframe strings to the list (amend in-place, no new list created). 74 | 75 | To be overwritten in each model. 76 | 77 | Args: 78 | timeframes: list of current-period operating timeframes. 79 | result: boolean, if True, return a new list. Othewise append req 80 | timeframes to the list passed in (timeframes). 81 | 82 | Returns: 83 | None. 84 | 85 | Raises: 86 | None. 87 | """ 88 | 89 | 90 | class EMACrossTestingOnly(Model): 91 | """ 92 | For testing use only. 93 | 94 | Entry: 95 | Market entry when EMA's cross 96 | 97 | Stop-loss: 98 | None. 99 | 100 | Take-profit: 101 | Close trade and re-open in opposite direction on opposing signal. 102 | """ 103 | 104 | name = "EMA Cross - Testing only" 105 | 106 | instruments = { 107 | "BitMEX": { 108 | "XBTUSD": "XBTUSD", 109 | # "ETHUSD": "ETHUSD", 110 | # "XRPUSD": "XRPUSD", 111 | }, 112 | 113 | "Binance": { 114 | 115 | }, 116 | 117 | "FTX": { 118 | 119 | }} 120 | 121 | # Timeframes the strategy runs on. 122 | operating_timeframes = [ 123 | "1Min"] 124 | 125 | # Need to tune each timeframes ideal lookback, 150 default for now. 126 | lookback = { 127 | "1Min": 150, "3Min": 150, "5Min": 150, "15Min": 150, "30Min": 150, 128 | "1H": 150, "2H": 150, "3H": 150, "4H": 150, "6H": 150, "8H": 150, 129 | "12H": 150, "16H": 150, "1D": 150, "2D": 150, "3D": 150, "4D": 150, 130 | "7D": 150, "14D": 150} 131 | 132 | # First tuple element in tuple is feature type. 133 | # Second tuple element is feature function. 134 | # Third tuple element is feature param. 135 | features = [ 136 | ("indicator", f.EMA, 10), 137 | ("indicator", f.EMA, 20)] 138 | 139 | def __init__(self, logger): 140 | super() 141 | 142 | self.logger = logger 143 | 144 | def run(self, op_data: dict, req_data: list, timeframe: str, symbol: str, 145 | exchange): 146 | """ 147 | Run the model with the given data. 148 | 149 | Args: 150 | None: 151 | 152 | Returns: 153 | SignalEvent if signal is produced, otherwise None. 154 | 155 | Raises: 156 | None. 157 | 158 | """ 159 | 160 | self.logger.info( 161 | "Running " + str(timeframe) + " " + self.get_name() + ".") 162 | 163 | if timeframe in self.operating_timeframes: 164 | 165 | features = list(zip( 166 | op_data[timeframe].index, op_data[timeframe]['open'], 167 | op_data[timeframe].EMA10, op_data[timeframe].EMA20)) 168 | 169 | longs = {'price': [], 'time': []} 170 | shorts = {'price': [], 'time': []} 171 | 172 | # Check for EMA crosses. 173 | for i in range(len(op_data[timeframe].index)): 174 | fast = features[i][2] 175 | slow = features[i][3] 176 | fast_minus_1 = features[i - 1][2] 177 | slow_minus_1 = features[i - 1][3] 178 | fast_minus_2 = features[i - 2][2] 179 | slow_minus_2 = features[i - 2][3] 180 | 181 | if fast is not None and slow is not None: 182 | 183 | # Short cross. 184 | if slow > fast: 185 | if slow_minus_1 < fast_minus_1 and slow_minus_2 < fast_minus_2: 186 | shorts['price'].append(features[i][1]) 187 | shorts['time'].append(features[i][0]) 188 | 189 | # Long cross. 190 | elif slow < fast: 191 | if slow_minus_1 > fast_minus_1 and slow_minus_2 > fast_minus_2: 192 | longs['price'].append(features[i][1]) 193 | longs['time'].append(features[i][0]) 194 | 195 | # print(op_data[timeframe]) 196 | # print(longs['time']) 197 | # print(shorts['time']) 198 | 199 | if len(longs['time']) > 0 or len(shorts['time']) > 0: 200 | 201 | # print(len(longs['time'])) 202 | # print(len(shorts['time'])) 203 | 204 | try: 205 | 206 | signal = False 207 | 208 | # Generate trade signal if current bar has an entry. 209 | if features[-1][0] == longs['time'][-1]: 210 | direction = "LONG" 211 | entry_price = longs['price'][-1] 212 | entry_ts = longs['time'][-1] 213 | signal = True 214 | 215 | elif features[-1][0] == shorts['time'][-1]: 216 | direction = "SHORT" 217 | entry_price = shorts['price'][-1] 218 | entry_ts = shorts['time'][-1] 219 | signal = True 220 | 221 | if signal: 222 | return SignalEvent(symbol, int(entry_ts.timestamp()), 223 | direction, timeframe, self.name, 224 | exchange, entry_price, "Market", None, 225 | None, None, False, None, 226 | op_data[timeframe]) 227 | else: 228 | return None 229 | 230 | except IndexError: 231 | traceback.print_exc() 232 | print(type(features), len(features[-1]), features[-1][0]) 233 | print(type(longs), len(longs), longs['time']) 234 | print(type(shorts), len(shorts), shorts['time']) 235 | sys.exit(0) 236 | 237 | def get_required_timeframes(self, timeframes: list, result=False): 238 | """ 239 | No additional (other than current) timeframes required for this model. 240 | """ 241 | 242 | if result: 243 | return timeframes 244 | else: 245 | pass 246 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | TA_Lib == 0.4.17 2 | matplotlib == 3.1.3 3 | numpy == 1.17.4 4 | pandas == 0.25.3 5 | plotly == 4.5.4 6 | pymongo == 3.9.0 7 | python_dateutil == 2.8.0 8 | python_telegram_bot == 12.7 9 | requests == 2.22.0 10 | scipy == 1.3.3 11 | websocket_client == 0.56.0 12 | -------------------------------------------------------------------------------- /resample.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime, timedelta 2 | from pymongo import MongoClient 3 | from dateutil import parser 4 | import pandas as pd 5 | import calendar 6 | import time 7 | 8 | 9 | db_client = MongoClient('mongodb://127.0.0.1:27017/') 10 | db = db_client['asset_price_master'] 11 | coll = db['BitMEX'] 12 | 13 | tf = "30Min" 14 | symbol = "XBTUSD" 15 | 16 | RESAMPLE_KEY = { 17 | 'open': 'first', 'high': 'max', 'low': 'min', 18 | 'close': 'last', 'volume': 'sum'} 19 | 20 | result = coll.find( 21 | {"symbol": symbol}, { 22 | "_id": 0, "symbol": 0}).sort([("timestamp", -1)]) 23 | 24 | # Pass cursor to DataFrame constructor 25 | df = pd.DataFrame(result) 26 | 27 | # Format time column 28 | df['timestamp'] = df['timestamp'].apply( 29 | lambda x: datetime.fromtimestamp(x)) 30 | 31 | # Set index 32 | df.set_index("timestamp", inplace=True) 33 | 34 | # Downsample 1 min data to target timeframe 35 | resampled_df = pd.DataFrame() 36 | try: 37 | resampled_df = (df.resample(tf).agg(RESAMPLE_KEY)) 38 | except Exception as exc: 39 | print("Resampling error", exc) 40 | 41 | resampled_df.to_csv(symbol + tf + ".csv") 42 | -------------------------------------------------------------------------------- /server.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from pymongo import MongoClient, errors 13 | from threading import Thread 14 | from time import sleep 15 | import subprocess 16 | import datetime 17 | 18 | import logging 19 | import time 20 | import queue 21 | 22 | from messaging_clients import Telegram 23 | from portfolio import Portfolio 24 | from strategy import Strategy 25 | from data import Datahandler 26 | from broker import Broker 27 | from bitmex import Bitmex 28 | 29 | 30 | class Server: 31 | """ 32 | Server routes system events amongst worker components via a queue in 33 | an event handling loop. Objects in queue processed at start of each minute. 34 | 35 | Event loop lifecycle: 36 | 1. A new minute begins - Tick data is parsed into 1 min bars. 37 | 2. Datahander wraps new bars and other data in Market Events. 38 | 3. Datahandler pushes Market Events into event queue. 39 | 4. Market Events are consumed by Strategy object. 40 | 5. Strategy creates a Signal event and places it in event queque. 41 | 6. Signal events consumed by Portfolio. 42 | 7. Portfolio creates Order event from Signal, places it in queue. 43 | 8. Broker executes Order events, creates Fill event post-transaction. 44 | 9. Portfolio consumes Fill event, updates values. 45 | 10. Repeat 1-9 until queue empty. 46 | 11. Strategy prepares data for the next minutes calculuations. 47 | 12. Sleep until current minute elapses.""" 48 | 49 | DB_URL = 'mongodb://127.0.0.1:27017/' 50 | DB_PRICES = 'asset_price_master' 51 | DB_OTHER = 'holdings_trades_signals_master' 52 | DB_TIMEOUT_MS = 10 53 | 54 | VENUES = ["Binance", "BitMEX"] 55 | DB_OTHER_COLLS = ['trades', 'portfolio', 'signals'] 56 | 57 | # Mins between recurring data diagnostics. 58 | DIAG_DELAY = 45 59 | 60 | def __init__(self): 61 | 62 | # Set False for forward testing. 63 | self.live_trading = True 64 | 65 | self.log_level = logging.INFO 66 | self.logger = self.setup_logger() 67 | 68 | # Check DB state OK before connecting to any exchanges 69 | self.db_client = MongoClient( 70 | self.DB_URL, 71 | serverSelectionTimeoutMS=self.DB_TIMEOUT_MS) 72 | self.db_prices = self.db_client[self.DB_PRICES] 73 | self.db_other = self.db_client[self.DB_OTHER] 74 | self.check_db_status(self.VENUES) 75 | 76 | self.exchanges = self.exchange_wrappers(self.logger, self.VENUES) 77 | self.telegram = Telegram(self.logger) 78 | 79 | # Main event queue. 80 | self.events = queue.Queue(0) 81 | 82 | # Producer/consumer worker classes. 83 | self.data = Datahandler(self.exchanges, self.logger, self.db_prices, 84 | self.db_client) 85 | 86 | self.strategy = Strategy(self.exchanges, self.logger, self.db_prices, 87 | self.db_other, self.db_client) 88 | 89 | self.portfolio = Portfolio(self.exchanges, self.logger, self.db_other, 90 | self.db_client, self.strategy.models, 91 | self.telegram) 92 | 93 | self.broker = Broker(self.exchanges, self.logger, self.portfolio, 94 | self.db_other, self.db_client, self.live_trading, 95 | self.telegram) 96 | 97 | self.portfolio.broker = self.broker 98 | 99 | # Start flask api in separate process 100 | # p = subprocess.Popen(["python", "api.py"]) 101 | # self.logger.info("Started flask API.") 102 | 103 | # Processing performance tracking variables. 104 | self.start_processing = None 105 | self.end_processing = None 106 | self.cycle_count = 0 107 | 108 | def run(self): 109 | """ 110 | Core event handling loop. 111 | """ 112 | 113 | self.data.live_trading = self.live_trading 114 | self.broker.live_trading = self.live_trading 115 | 116 | # Check data is current, repair if necessary before live trading. 117 | # No need to do so if backtesting, just use existing stored data. 118 | if self.live_trading: 119 | self.data.run_data_diagnostics(1) 120 | 121 | # Run twice to account for first diag runtime 122 | self.data.run_data_diagnostics(0) 123 | 124 | self.cycle_count = 0 125 | 126 | sleep(self.seconds_til_next_minute()) 127 | 128 | while True: 129 | if self.live_trading: 130 | 131 | # Only update data after at least one minute of new data 132 | # has been collected, plus datahandler and strategy ready. 133 | if self.cycle_count >= 1 and self.data.ready: 134 | self.start_processing = time.time() 135 | 136 | # Fetch and queue events for processing. 137 | self.events = self.broker.check_fills(self.events) 138 | self.events = self.data.update_market_data(self.events) 139 | self.clear_event_queue() 140 | 141 | # Run diagnostics at 3 and 7 mins to be sure missed 142 | # bars are rectified before ongoing system operation. 143 | # if (self.cycle_count == 2 or self.cycle_count == 5): 144 | # thread = Thread( 145 | # target=lambda: self.data.run_data_diagnostics(0)) 146 | # thread.daemon = True 147 | # thread.start() 148 | 149 | # # Check data integrity periodically thereafter. 150 | # if (self.cycle_count % self.DIAG_DELAY == 0): 151 | # thread = Thread( 152 | # target=lambda: self.data.run_data_diagnostics(0)) 153 | # thread.daemon = True 154 | # thread.start() 155 | 156 | # Sleep til the next minute begins. 157 | sleep(self.seconds_til_next_minute()) 158 | self.cycle_count += 1 159 | 160 | # Update data w/o delay when backtesting, no diagnostics. 161 | elif not self.live_trading: 162 | self.events = self.data.update_market_data(self.events) 163 | self.clear_event_queue() 164 | 165 | def clear_event_queue(self): 166 | """ 167 | Routes events to worker classes for processing. 168 | """ 169 | 170 | count = 0 171 | 172 | while True: 173 | 174 | try: 175 | 176 | # Check for user commands 177 | 178 | # Get events from queue 179 | event = self.events.get(False) 180 | 181 | except queue.Empty: 182 | # Log processing performance stats 183 | self.end_processing = time.time() 184 | duration = round( 185 | self.end_processing - self.start_processing, 5) 186 | self.logger.info( 187 | "Processed " + str(count) + " events in " + 188 | str(duration) + " seconds.") 189 | 190 | # Do non-time critical work now that events are processed. 191 | self.data.save_new_bars_to_db() 192 | self.strategy.trim_datasets() 193 | self.strategy.save_new_signals_to_db() 194 | # self.portfolio.save_new_trades_to_db() 195 | self.broker.check_consent(self.events) 196 | 197 | break 198 | 199 | else: 200 | if event is not None: 201 | count += 1 202 | 203 | # Signal Event generation. 204 | if event.type == "MARKET": 205 | self.strategy.new_data( 206 | self.events, event, self.cycle_count) 207 | self.portfolio.update_price(self.events, event) 208 | 209 | # Order Event generation. 210 | elif event.type == "SIGNAL": 211 | self.logger.info("Processing signal event.") 212 | self.portfolio.new_signal(self.events, event) 213 | 214 | # Order placement and Fill Event generation. 215 | elif event.type == "ORDER": 216 | self.logger.info("Processing order event.") 217 | self.broker.new_order(self.events, event) 218 | 219 | # Final portolio update. 220 | elif event.type == "FILL": 221 | self.logger.info("Processing fill event.") 222 | self.portfolio.new_fill(event) 223 | 224 | # Finished all jobs in queue. 225 | self.events.task_done() 226 | 227 | def setup_logger(self): 228 | """ 229 | Create and configure logger. 230 | 231 | Args: 232 | None. 233 | 234 | Returns: 235 | logger: configured logger object. 236 | 237 | Raises: 238 | None. 239 | """ 240 | 241 | logger = logging.getLogger() 242 | logger.setLevel(self.log_level) 243 | ch = logging.StreamHandler() 244 | formatter = logging.Formatter( 245 | "%(asctime)s:%(levelname)s:%(module)s - %(message)s", 246 | datefmt="%d-%m-%Y %H:%M:%S") 247 | ch.setFormatter(formatter) 248 | logger.addHandler(ch) 249 | 250 | return logger 251 | 252 | def exchange_wrappers(self, logger, op_venues): 253 | """ 254 | Create and return a list of exchange wrappers. 255 | 256 | Args: 257 | op_venues: list of exchange/venue names to int. 258 | 259 | Returns: 260 | exchanges: list of exchange connector objects. 261 | 262 | Raises: 263 | None. 264 | """ 265 | 266 | # TODO: load exchange wrappers from 'op_venues' list param 267 | 268 | venues = [Bitmex(logger)] 269 | self.logger.info("Initialised exchange connectors.") 270 | 271 | return venues 272 | 273 | def seconds_til_next_minute(self: int): 274 | """ 275 | Args: 276 | None. 277 | 278 | Returns: 279 | Number of second to next minute (int). 280 | 281 | Raises: 282 | None. 283 | """ 284 | 285 | now = datetime.datetime.utcnow().second 286 | delay = 60 - now 287 | return delay 288 | 289 | def check_db_status(self, op_venues): 290 | """ 291 | Check DB connection, set up collections and indexing. 292 | 293 | Args: 294 | op_venues: list of operating venue names. 295 | 296 | Returns: 297 | None. 298 | 299 | Raises: 300 | Database connection failure error. 301 | """ 302 | 303 | try: 304 | 305 | # If no exception, DBs exist 306 | time.sleep(self.DB_TIMEOUT_MS) 307 | self.db_client.server_info() 308 | self.logger.info("Connected to DB client at " + self.DB_URL + ".") 309 | 310 | price_colls = self.db_prices.list_collection_names() 311 | other_colls = self.db_other.list_collection_names() 312 | 313 | # Check price DB collections and indexing 314 | for venue_name in op_venues: 315 | if venue_name not in price_colls: 316 | 317 | self.logger.info("Creating indexing for " + venue_name + 318 | " in " + self.DB_PRICES + ".") 319 | 320 | self.db_prices[venue_name].create_index( 321 | [('timestamp', 1), ('symbol', 1)], 322 | name='timestamp_1_symbol_1', 323 | **{'unique': True, 'background': False}) 324 | 325 | # Check other DB collections and indexing 326 | for coll_name in self.DB_OTHER_COLLS: 327 | if coll_name not in other_colls: 328 | 329 | self.logger.info("Creating indexing for " + coll_name + 330 | " in " + self.DB_OTHER + ".") 331 | 332 | # No indexing required for other DB categories (yet) 333 | # Add here if required later 334 | 335 | except errors.ServerSelectionTimeoutError as e: 336 | self.logger.info("Failed to connect to " + self.DB_PRICES + 337 | " at " + self.DB_URL + ".") 338 | raise Exception() 339 | 340 | def db_indices(self): 341 | """ 342 | Return index information as a list of dicts. 343 | 344 | """ 345 | 346 | indices = [] 347 | for venue_name in self.VENUES: 348 | for name, index_info in self.db_prices[venue_name].index_information().items(): 349 | keys = index_info['key'] 350 | del(index_info['ns']) 351 | del(index_info['v']) 352 | del(index_info['key']) 353 | indices.append({'db': self.DB_PRICES, 'collection': venue_name, 354 | 'keys': keys, 'info': index_info}) 355 | 356 | for coll_name in self.DB_OTHER_COLLS: 357 | for name, index_info in self.db_prices[coll_name].index_information().items(): 358 | keys = index_info['key'] 359 | del(index_info['ns']) 360 | del(index_info['v']) 361 | del(index_info['key']) 362 | indices.append({'db': self.DB_OTHER, 'collection': coll_name, 363 | 'keys': keys, 'info': index_info}) 364 | 365 | return indices 366 | -------------------------------------------------------------------------------- /server_test.py: -------------------------------------------------------------------------------- 1 | from urllib3.exceptions import NewConnectionError, MaxRetryError, TimeoutError 2 | from requests.exceptions import ConnectionError 3 | from time import sleep 4 | from os import system 5 | import subprocess 6 | import platform 7 | import socket 8 | 9 | from server import Server 10 | 11 | RETRY_TIME = 60 12 | 13 | host_os = platform.system() 14 | server = Server() 15 | 16 | try: 17 | server.run() 18 | 19 | except (ConnectionError, NewConnectionError, MaxRetryError, TimeoutError): 20 | 21 | # kill all python proccesses, wait and restart 22 | if host_os == "Windows": 23 | system('cmd /k "taskkill /IM python.exe /F"') 24 | print("Server restart in 1 minute.") 25 | sleep(RETRY_TIME) 26 | system('cmd /k "python server_test.py"') 27 | 28 | elif host_os == "Linux": 29 | subprocess.check_output(["pkill" "-9" "python"]) 30 | print("Server restart in 1 minute.") 31 | sleep(RETRY_TIME) 32 | subprocess.check_output(["python", "server_test.py"]) 33 | 34 | else: 35 | print("Unknown kernel. Terminating.") 36 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-brez/trading-server/d3b0405c62cfa28b48cca0f3e71b66fba1db4533/setup.py -------------------------------------------------------------------------------- /strategy.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from datetime import date, datetime, timedelta 13 | from model import EMACrossTestingOnly 14 | from pymongo import MongoClient, errors 15 | from features import Features 16 | from dateutil import parser 17 | import pandas as pd 18 | import calendar 19 | import pymongo 20 | import queue 21 | import time 22 | import copy 23 | 24 | 25 | class Strategy: 26 | """ 27 | Ccontrol layer for all individual strategy models. Consumes 28 | market events from the event queue, updates strategy models with new data 29 | and generating Signal events. 30 | 31 | Signal events are pushed to the main event-handling queue, and also put 32 | into a save-later queue for db storage, after time-intensive work is done. 33 | """ 34 | 35 | # For reampling with pandas. 36 | ALL_TIMEFRAMES = [ 37 | "1Min", "3Min", "5Min", "15Min", "30Min", "1H", "2H", "3H", "4H", 38 | "6H", "8H", "12H", "16H", "1D", "2D", "3D", "4D", "7D", "14D", "28D"] 39 | 40 | PREVIEW_TIMEFRAMES = ["1H", "1D"] 41 | 42 | RESAMPLE_KEY = { 43 | 'open': 'first', 'high': 'max', 'low': 'min', 44 | 'close': 'last', 'volume': 'sum'} 45 | 46 | MINUTE_TIMEFRAMES = [1, 3, 5, 15, 30] 47 | HOUR_TIMEFRAMES = [1, 2, 3, 4, 6, 8, 12, 16] 48 | DAY_TIMEFRAMES = [1, 2, 3, 4, 7, 14, 28] 49 | 50 | TF_MINS = { 51 | "1Min": 1, "3Min": 3, "5Min": 5, "15Min": 15, "30Min": 30, "1H": 60, 52 | "2H": 120, "3H": 180, "4H": 240, "6H": 360, "8H": 480, "12H": 720, 53 | "16H": 960, "1D": 1440, "2D": 2880, "3D": 4320, "4D": 5760, 54 | "7D": 10080, "14D": 20160, "28D": 40320} 55 | 56 | # Extra bars to include in resample requests to account for indicator lag. 57 | LOOKBACK_PAD = 50 58 | 59 | # Maximum lookback in use by any strategy. 60 | MAX_LOOKBACK = 150 61 | 62 | def __init__(self, exchanges, logger, db_prices, db_other, db_client): 63 | self.exchanges = exchanges 64 | self.logger = logger 65 | self.db_prices = db_prices 66 | self.db_other = db_other 67 | self.db_client = db_client 68 | self.db_collections_price = { 69 | i.get_name(): db_prices[i.get_name()] for i in self.exchanges} 70 | 71 | # Save-later queue. 72 | self.signals_save_to_db = queue.Queue(0) 73 | 74 | # DataFrame container: data[exchange][symbol][timeframe]. 75 | self.data = {} 76 | self.init_dataframes(empty=True) 77 | 78 | # Strategy models. 79 | self.models = self.load_models(self.logger) 80 | 81 | # Signal container: signals[exchange][symbol][timeframe]. 82 | self.signals = {} 83 | 84 | # persistent reference to features library. 85 | self.feature_ref = Features() 86 | 87 | def new_data(self, events, event, count): 88 | """ 89 | Process incoming market data and update all models with new data. 90 | 91 | Args: 92 | events: event queue object. 93 | event: new market event. 94 | 95 | Returns: 96 | None. 97 | 98 | Raises: 99 | None. 100 | """ 101 | 102 | # Wait for 1 mins of operation to clear up any null bars. 103 | if count >= 1: 104 | 105 | # Get operating timeframes for the current period. 106 | 107 | timestamp = event.get_bar()['timestamp'] 108 | timeframes = self.get_relevant_timeframes(timestamp) 109 | 110 | self.logger.info("Event timestamp just in: " + str( 111 | datetime.utcfromtimestamp(timestamp))) 112 | 113 | # Store trigger timeframes (operating timeframes). 114 | op_timeframes = copy.deepcopy(timeframes) 115 | 116 | # Get additional timeframes required by models. 117 | for model in self.models: 118 | model.get_required_timeframes(timeframes) 119 | 120 | # Update datasets for all required timeframes. 121 | self.update_dataframes(event, timeframes, op_timeframes) 122 | 123 | # Calculate new feature values. 124 | self.calculate_features(event, timeframes) 125 | 126 | # Run models with new data. 127 | self.run_models(event, op_timeframes, events) 128 | 129 | def update_dataframes(self, event, timeframes, op_timeframes): 130 | """ 131 | Update dataframes for the given event and list of timeframes. 132 | 133 | Args: 134 | event: new market event. 135 | timeframes: list of relevant timeframes to the just-elapsed period. 136 | 137 | Returns: 138 | None. 139 | 140 | Raises: 141 | None. 142 | """ 143 | 144 | sym = event.get_bar()['symbol'] 145 | bar = self.remove_element(event.get_bar(), "symbol") 146 | exc = event.get_exchange() 147 | venue = exc.get_name() 148 | 149 | timestamp = datetime.utcfromtimestamp(bar['timestamp']) 150 | 151 | # Update each relevant dataframe. 152 | for tf in timeframes: 153 | 154 | size = len(self.data[venue][sym][tf].index) 155 | 156 | # If dataframe already populated, append the new bar. Only update 157 | # op_timeframes if appending, as required tf data will be mid-bar. 158 | if size > 0 and tf in op_timeframes: 159 | 160 | new_row = self.single_bar_resample( 161 | venue, sym, tf, bar, timestamp) 162 | 163 | # If timestamps out of order, rebuild the dataset. 164 | 165 | # if existing row timestamp not tf period from current, rebuild 166 | 167 | # Append. 168 | self.data[venue][sym][tf] = self.data[venue][sym][tf].append( 169 | new_row) 170 | 171 | # If dataframe is empty, populate a new one. 172 | elif size == 0: 173 | self.data[venue][sym][tf] = self.build_dataframe( 174 | venue, sym, tf, bar) 175 | 176 | # Final pad in case of null bars. 177 | self.data[venue][sym][tf].fillna(method="pad", inplace=True) 178 | 179 | # TODO: df.append() is slow and copies the whole dataframe. Later 180 | # need to swap to a data structure other than a dataframe for live 181 | # data addition. Like an in-memory csv/DB, or list of dicts, etc. 182 | 183 | # Log model and timeframe details. 184 | for model in self.models: 185 | 186 | venue = exc.get_name() 187 | inst = model.get_instruments()[venue][sym] 188 | 189 | if inst == sym: 190 | self.logger.info( 191 | model.get_name() + ": " + venue + ": " + inst) 192 | self.logger.info( 193 | "Operating timeframes: " + str(op_timeframes)) 194 | self.logger.info( 195 | "Required timeframes: " + str(timeframes)) 196 | 197 | def calculate_features(self, event, timeframes): 198 | """ 199 | Calculate features required for each model, append the values to each 200 | timeframe dataset. 201 | 202 | Args: 203 | None. 204 | Returns: 205 | None. 206 | Raises: 207 | None. 208 | """ 209 | sym = event.get_bar()['symbol'] 210 | exc = event.get_exchange() 211 | 212 | # Calculate feature data for each model/feature/timeframe. 213 | for model in self.models: 214 | 215 | lb = model.get_lookback() 216 | venue = exc.get_name() 217 | inst = model.get_instruments()[venue][sym] 218 | 219 | # Check if model is applicable to the event. 220 | if inst == sym: 221 | for tf in timeframes: 222 | 223 | features = model.get_features() 224 | data = self.data[venue][sym][tf] 225 | 226 | # Calculate feature data. 227 | for feature in features: 228 | 229 | # f[0] is feature type 230 | # f[1] is feature function 231 | # f[2] is feature param 232 | f = feature[1]( 233 | self.feature_ref, 234 | feature[2], 235 | data) 236 | 237 | # Handle indicator and time-series feature data. 238 | if (f[0] == "indicator" or 239 | (type(f) == pd.core.series.Series) or 240 | (type(f) == pd.Series)): 241 | 242 | # Use feature param as dataframe col name. 243 | ID = "" if feature[2] is None else str(feature[2]) 244 | 245 | # Round and append to dataframe. 246 | self.data[venue][sym][tf][ 247 | feature[1].__name__ + 248 | ID] = f.round(6) 249 | 250 | # Handle boolean feature data. 251 | elif f[0] == "boolean": 252 | pass 253 | 254 | # TODO 255 | 256 | def run_models(self, event, op_timeframes: list, events): 257 | """ 258 | Run models for the just-elpased period. 259 | 260 | Args: 261 | event: new market event. 262 | op_timeframes: relevant timeframes to the just-elapsed period. 263 | 264 | Returns: 265 | None. 266 | 267 | Raises: 268 | None. 269 | 270 | """ 271 | sym = event.get_bar()['symbol'] 272 | exc = event.get_exchange() 273 | 274 | for model in self.models: 275 | 276 | venue = exc.get_name() 277 | inst = model.get_instruments()[venue][sym] 278 | 279 | if inst == sym: 280 | for tf in op_timeframes: 281 | if tf in model.get_operating_timeframes(): 282 | 283 | # Get non-op, but still required timeframe codes. 284 | req_tf = model.get_required_timeframes( 285 | [tf], result=True) 286 | 287 | # Get non-trigger data as list of {tf : dataframe}. 288 | req_data = [ 289 | {i: self.data[venue][sym][i]} for i in req_tf] 290 | 291 | # Run model. 292 | result = model.run(self.data[venue][sym], req_data, tf, 293 | sym, exc) 294 | 295 | # Put generated signal in the main event queue. 296 | if result: 297 | events.put(result) 298 | 299 | # Put signal in separate save-later queue. 300 | self.signals_save_to_db.put(result) 301 | 302 | def build_dataframe(self, exc, sym, tf, current_bar=None, lookback=150): 303 | """ 304 | Return a dataframe of size lookback for the given symbol, 305 | exchange and timeframe. If "curent_bar" param is passed in, 306 | construct the dataframe using current_bar as first row of dataframe. 307 | 308 | E.g 1 (no current_bar) for a dataframe with tf = 4h, lookback = 50, we 309 | need to fetch and resample 4*60*50 1 min bars (12000 bars) into 50 4h 310 | bars. 311 | 312 | E.g 2 (with current_bar) for dataframe with tf = 4h, lookback = 50, we 313 | need to fetch and resample 4*60*50 - 1 1 min bars (11999 bars) into 50 314 | 4h bars, using current_bar as the first bar (total 12000 bars). 315 | 316 | Args: 317 | exc: exchange name (string). 318 | symb: instrument ticker code (string) 319 | tf: timeframe code (string). 320 | current_bar: bar to insert first, if using 321 | lookback: number of final bars required for the model to use. 322 | 323 | Returns: 324 | dataframe: dataframe containing resampled price data. 325 | 326 | Raises: 327 | Resampling error. 328 | """ 329 | 330 | # Find the total number of 1min bars needed using TFM dict. 331 | if lookback > 1: 332 | # Increase the size of lookback by 50 to account for feature lag. 333 | size = int(self.TF_MINS[tf] * (lookback + self.LOOKBACK_PAD)) 334 | else: 335 | # Dont adjust lookback for single bar requests. 336 | size = self.TF_MINS[tf] * (lookback) 337 | 338 | # Create Dataframe using current_bar and stored bars. 339 | if current_bar: 340 | 341 | # Reduce size to account for current_bar. 342 | size = size - 1 343 | 344 | # Use a projection to remove mongo "_id" field and symbol. 345 | result = self.db_collections_price[exc].find( 346 | {"symbol": sym}, { 347 | "_id": 0, "symbol": 0}).limit( 348 | size).sort([("timestamp", -1)]) 349 | 350 | # Add current_bar and DB results to a list. 351 | rows = [current_bar] 352 | for doc in result: 353 | rows.append(doc) 354 | 355 | # Create Dataframe using only stored bars 356 | if not current_bar: 357 | 358 | # Use a projection to remove mongo "_id" field and symbol. 359 | rows = self.db_collections_price[exc].find( 360 | {"symbol": sym}, { 361 | "_id": 0, "symbol": 0}).limit( 362 | size).sort([("timestamp", -1)]) 363 | 364 | # Pass cursor to DataFrame constructor. 365 | df = pd.DataFrame(rows) 366 | 367 | # Format time column. 368 | df['timestamp'] = df['timestamp'].apply( 369 | lambda x: datetime.utcfromtimestamp(x)) 370 | 371 | # Set index. 372 | df.set_index("timestamp", inplace=True) 373 | 374 | # Pad any null bars forward. 375 | df.fillna(method="pad", inplace=True) 376 | 377 | # Downsample 1 min data to target timeframe 378 | resampled_df = pd.DataFrame() 379 | try: 380 | resampled_df = (df.resample(tf).agg(self.RESAMPLE_KEY)) 381 | except Exception as exc: 382 | print("Resampling error", exc) 383 | 384 | return resampled_df.sort_values(by="timestamp", ascending=True) 385 | 386 | def single_bar_resample(self, venue, sym, tf, bar, timestamp): 387 | """ 388 | Return a pd.Series containing a single bar of timeframe "tf" for 389 | the given venue and symbol. 390 | 391 | Args: 392 | venue: exchange name (string). 393 | sym: instrument ticker code (string) 394 | tf: timeframe code (string). 395 | bar: newest 1-min bar. 396 | 397 | Returns: new_row: pd.Series containing a single bar of timeframe "tf" 398 | for the given venue and symbol. 399 | 400 | Raises: 401 | Resampling error. 402 | """ 403 | 404 | if tf == "1Min": 405 | # Don't need to do any resampling for 1 min bars. 406 | rows = [bar] 407 | 408 | else: 409 | # Determine how many bars to fetch for resampling. 410 | size = self.TF_MINS[tf] - 1 411 | 412 | # Use a projection to remove mongo "_id" field and symbol. 413 | result = self.db_collections_price[venue].find( 414 | {"symbol": sym}, { 415 | "_id": 0, "symbol": 0}).limit( 416 | size).sort([("timestamp", -1)]) 417 | 418 | # Add current_bar and DB results to a list. 419 | rows = [bar] 420 | for doc in result: 421 | rows.append(doc) 422 | 423 | # Pass cursor to DataFrame constructor. 424 | df = pd.DataFrame(rows) 425 | 426 | # Format time column. 427 | df['timestamp'] = df['timestamp'].apply( 428 | lambda x: datetime.utcfromtimestamp(x)) 429 | 430 | # Set index. 431 | df.set_index("timestamp", inplace=True) 432 | 433 | # Pad any null bars forward. 434 | df.fillna(method="pad", inplace=True) 435 | 436 | # Downsample 1 min data to target timeframe. 437 | resampled = pd.DataFrame() 438 | try: 439 | resampled = (df.resample(tf).agg(self.RESAMPLE_KEY)) 440 | except Exception as exc: 441 | print("Resampling error", exc) 442 | 443 | # Must be ascending=True to grab the first value with iloc[]. 444 | resampled.sort_values(by="timestamp", ascending=False, inplace=True) 445 | 446 | new_row = resampled.iloc[0] 447 | 448 | return new_row 449 | 450 | def remove_element(self, dictionary, element): 451 | """ 452 | Return a shallow copy of dictionary less the given element. 453 | 454 | Args: 455 | dictionary: dictionary to be copied. 456 | element: element to be removed. 457 | 458 | Returns: 459 | new_dict: copy of dictionary less element. 460 | 461 | Raises: 462 | 463 | """ 464 | 465 | new_dict = dict(dictionary) 466 | del new_dict[element] 467 | 468 | return new_dict 469 | 470 | def load_models(self, logger): 471 | """ 472 | Create and return a list of trade strategy models. 473 | 474 | Args: 475 | logger: logger object. 476 | 477 | Returns: 478 | models: list of models. 479 | 480 | Raises: 481 | None. 482 | """ 483 | 484 | models = [] 485 | models.append(EMACrossTestingOnly(logger)) 486 | self.logger.info("Initialised models.") 487 | return models 488 | 489 | def init_dataframes(self, empty=False): 490 | """ 491 | Create working datasets (self.data dict). 492 | 493 | Args: 494 | None. 495 | 496 | Returns: 497 | empty: boolean flag. If True, will return empty dataframes. 498 | 499 | Raises: 500 | None. 501 | """ 502 | 503 | start = time.time() 504 | 505 | self.data = { 506 | i.get_name(): self.load_local_data( 507 | i, empty) for i in self.exchanges} 508 | 509 | end = time.time() 510 | duration = round(end - start, 5) 511 | 512 | symbolcount = 0 513 | for i in self.exchanges: 514 | symbolcount += len(i.get_symbols()) 515 | 516 | # Only log output if data is loaded. 517 | if not empty: 518 | self.logger.info( 519 | "Initialised " + str(symbolcount * len(self.ALL_TIMEFRAMES)) + 520 | " timeframe datasets in " + str(duration) + " seconds.") 521 | 522 | def load_local_data(self, exchange, empty=False): 523 | 524 | """ 525 | Create and return a dictionary of dataframes for all symbols and 526 | timeframes for the given venue. 527 | 528 | Args: 529 | exchange: exchange object. 530 | empty: boolean flag. If True, will return empty dataframes. 531 | 532 | Returns: 533 | dicts: tree containing a dataframe for all symbols and 534 | timeframes for the given exchange. If "empty" is true, 535 | dont load any data. 536 | 537 | Raises: 538 | None. 539 | """ 540 | 541 | dicts = {} 542 | for symbol in exchange.get_symbols(): 543 | 544 | # Return empty dataframes. 545 | if empty: 546 | dicts[symbol] = { 547 | tf: pd.DataFrame() for tf in self.ALL_TIMEFRAMES} 548 | 549 | # Return dataframes with data. 550 | elif not empty: 551 | dicts[symbol] = { 552 | tf: self.build_dataframe( 553 | exchange, symbol, tf) for tf in self.ALL_TIMEFRAMES} 554 | 555 | return dicts 556 | 557 | def trim_datasets(self): 558 | """ 559 | Reduce size of datasets if length > MAX_LOOKBACK + LOOKBACK_PAD. 560 | Args: 561 | None. 562 | 563 | Returns: 564 | None. 565 | 566 | Raises: 567 | None. 568 | """ 569 | 570 | for exc in self.exchanges: 571 | 572 | venue = exc.get_name() 573 | 574 | for sym in exc.get_symbols(): 575 | for tf in self.ALL_TIMEFRAMES: 576 | 577 | size = len(self.data[venue][sym][tf].index) 578 | 579 | if size > self.MAX_LOOKBACK + self.LOOKBACK_PAD: 580 | diff = size - (self.MAX_LOOKBACK + self.LOOKBACK_PAD) 581 | 582 | # Get list of indicies to drop. 583 | to_drop = [i for i in range(diff)] 584 | 585 | # Drop rows by index in-place. 586 | self.data[venue][sym][tf].drop( 587 | self.data[venue][sym][tf].index[to_drop], 588 | inplace=True) 589 | 590 | # print("Timeframe:", tf, " \n", self.data[e][s][tf]) 591 | 592 | def get_relevant_timeframes(self, time): 593 | """ 594 | Return a list of timeframes relevant to the just-elapsed period. 595 | E.g if time has just struck UTC 10:30am the list will contain "1min", 596 | "3Min", "5Min", "15Min" and "30Min" strings. The first minute of a new 597 | day or week will add daily/weekly/monthly timeframe strings. 598 | 599 | Args: 600 | time: datetime object 601 | 602 | Returns: 603 | timeframes: list containing relevant timeframe string codes. 604 | 605 | Raises: 606 | None. 607 | 608 | """ 609 | 610 | # Check against the previous minute - the just-elapsed period. 611 | if type(time) is not datetime: 612 | time = datetime.utcfromtimestamp(time) 613 | 614 | timestamp = time - timedelta(hours=0, minutes=1) 615 | timeframes = [] 616 | 617 | self.logger.info("Timestamp just elapsed: " + str(timestamp)) 618 | 619 | for i in self.MINUTE_TIMEFRAMES: 620 | self.minute_timeframe(i, timestamp, timeframes) 621 | for i in self.HOUR_TIMEFRAMES: 622 | self.hour_timeframe(i, timestamp, timeframes) 623 | for i in self.DAY_TIMEFRAMES: 624 | self.day_timeframe(i, timestamp, timeframes) 625 | 626 | # if (timestamp.minute == 0 and timestamp.hour == 0 and 627 | # calendar.day_name[date.today().weekday()] == "Monday"): 628 | # timeframes.append("7D") 629 | 630 | return timeframes 631 | 632 | def minute_timeframe(self, minutes, timestamp, timeframes): 633 | """ 634 | Adds minute timeframe codes to timeframes list if the relevant 635 | period has just elapsed. 636 | """ 637 | 638 | for i in range(0, 60, minutes): 639 | if timestamp.minute == i: 640 | timeframes.append(str(minutes) + "Min") 641 | 642 | def hour_timeframe(self, hours, timestamp, timeframes): 643 | """ 644 | Adds hourly timeframe codes to timeframes list if the relevant 645 | period has just elapsed. 646 | """ 647 | 648 | if timestamp.minute == 0 and timestamp.hour % hours == 0: 649 | timeframes.append(str(hours) + "H") 650 | 651 | def day_timeframe(self, days, timestamp, timeframes): 652 | """ 653 | Adds daily timeframe codes to timeframes list if the relevant 654 | period has just elapsed. 655 | """ 656 | 657 | if (timestamp.minute == 0 and timestamp.hour == 0 and 658 | timestamp.day % days == 0): 659 | timeframes.append(str(days) + "D") 660 | 661 | def save_new_signals_to_db(self): 662 | """ 663 | Save signals in save-later queue to database. 664 | 665 | Args: 666 | None. 667 | Returns: 668 | None. 669 | Raises: 670 | pymongo.errors.DuplicateKeyError. 671 | """ 672 | 673 | count = 0 674 | while True: 675 | 676 | try: 677 | signal = self.signals_save_to_db.get(False) 678 | 679 | except queue.Empty: 680 | if count: 681 | self.logger.info( 682 | "Wrote " + str(count) + " new signals to database " + 683 | str(self.db_other.name) + ".") 684 | break 685 | 686 | else: 687 | if signal is not None: 688 | count += 1 689 | # Store signal in relevant db collection. 690 | try: 691 | self.db_other['signals'].insert_one( 692 | self.remove_element(signal.get_signal_dict(), "op_data")) 693 | 694 | # Skip duplicates if they exist. 695 | except pymongo.errors.DuplicateKeyError: 696 | continue 697 | 698 | self.signals_save_to_db.task_done() 699 | -------------------------------------------------------------------------------- /trade_types.py: -------------------------------------------------------------------------------- 1 | """ 2 | trading-server is a multi-asset, multi-strategy, event-driven trade execution 3 | and backtesting platform (OEMS) for trading common markets. 4 | 5 | Copyright (C) 2020 Sam Breznikar 6 | 7 | Licensed under GNU General Public License 3.0 or later. 8 | 9 | Some rights reserved. See LICENSE.md, AUTHORS.md. 10 | """ 11 | 12 | from abc import ABC, abstractmethod 13 | 14 | 15 | class Trade(ABC): 16 | """ 17 | Trade parent class, different types of trade subclasses must inherit this. 18 | 19 | Trade subclasses are used to generalise a collective set of orders and 20 | positions that make up a trades management from start to finish. 21 | 22 | Child trade classes may be composed of positons and orders across one or 23 | multiple instruments and venues. 24 | """ 25 | 26 | def __init__(self): 27 | self.trade_id = None # Must be set before saving to DB. 28 | self.order_count = 0 # Number of component orders. 29 | self.signal_timestamp = None # Epoch timestamp of parent signal. 30 | self.active = False # True/False. 31 | self.venue_count = 0 # Number of venues in use. 32 | self.instrument_count = 0 # Number of instruments in use. 33 | self.model = None # Name of model that triggered trade. 34 | self.u_pnl = 0 # Total unrealised pnl. 35 | self.r_pnl = 0 # Total realised pnl. 36 | self.fees = 0 # Total fees/commisions paid. 37 | self.exposure = None # Percentage of possible loss remaining at risk. 38 | self.consent = None # If or not user consents to trade. 39 | self.systematic_close = None # If or not trade was closed properly. 40 | 41 | @abstractmethod 42 | def get_trade_dict(self): 43 | """ 44 | Return all trade variables as a dict for DB storage. 45 | """ 46 | 47 | def set_batch_size_and_id(self, trade_id): 48 | """ 49 | Must be called after trade object has been prepared. 50 | Sets the trade ID and order count, and assigns unique ID's to orders. 51 | """ 52 | 53 | self.order_count = len(self.orders) 54 | self.trade_id = trade_id 55 | 56 | 57 | class SingleInstrumentTrade(Trade): 58 | """ 59 | Models the state of a single-instrument, single venue trade. 60 | 61 | Used when trading a single instrument directionally, with take profit 62 | and stop loss orders. 63 | """ 64 | 65 | def __init__(self, logger, direction, venue, symbol, model, s_ts=None, 66 | timeframe=None, entry_price=None, position=None, orders=None): 67 | super().__init__() 68 | self.logger = logger 69 | self.type = "SINGLE_INSTRUMENT" 70 | self.venue_count = 1 71 | self.instrument_count = 1 72 | self.direction = direction # LONG or SHORT. 73 | self.signal_timestamp = s_ts # Epoch timestamp of parent signal. 74 | self.timeframe = timeframe # Trade timeframe. 75 | self.entry_price = entry_price # Trade entry price. 76 | self.exit_price = None # Trade exit price. 77 | self.venue = venue # Exchange or broker traded with. 78 | self.symbol = symbol # Instrument ticker code. 79 | self.model = model # Name of triggerstrategy. 80 | self.position = position # Position object, if positioned. 81 | self.orders = orders # Dict of component orders. 82 | 83 | def get_trade_dict(self): 84 | return { 85 | 'trade_id': self.trade_id, 86 | 'signal_timestamp': self.signal_timestamp, 87 | 'type': self.type, 88 | 'active': self.active, 89 | 'venue_count': self.venue_count, 90 | 'instrument_count': self.instrument_count, 91 | 'model': self.model, 92 | 'direction': self.direction, 93 | 'timeframe': self.timeframe, 94 | 'entry_price': self.entry_price, 95 | 'exit_price': self.exit_price, 96 | 'systematic_close': self.systematic_close, 97 | 'u_pnl': self.u_pnl, 98 | 'r_pnl': self.r_pnl, 99 | 'fees': self.fees, 100 | 'exposure': self.exposure, 101 | 'venue': self.venue, 102 | 'symbol': self.symbol, 103 | 'position': self.position, 104 | 'consent': self.consent, 105 | 'order_count': self.order_count, 106 | 'orders': self.orders} 107 | 108 | 109 | class Position: 110 | """ 111 | Models a single active position, as part of a parent trade. 112 | """ 113 | 114 | def __init__(self, fill_conf): 115 | self.fill_conf = fill_conf 116 | 117 | # TODO 118 | self.fees = None 119 | 120 | def __str__(self): 121 | return str(" ") 122 | 123 | def get_fill_conf(self): 124 | return self.fill_conf 125 | 126 | def get_pos_dict(self): 127 | return { 128 | 'trade_id': self.fill_conf['trade_id'], 129 | 'size': self.fill_conf['size'], 130 | 'avg_entry_price': self.fill_conf['avg_fill_price'], 131 | 'symbol': self.fill_conf['symbol'], 132 | 'direction': self.fill_conf['direction'], 133 | 'currency': self.fill_conf['currency'], 134 | 'opening_timestamp': self.fill_conf['timestamp'], 135 | 'opening_size': self.fill_conf['size'], 136 | 'status': "OPEN"} 137 | 138 | 139 | class Order: 140 | """ 141 | Models a single order, as part of parent trade. 142 | """ 143 | 144 | def __init__(self, logger, trade_id, order_id, symbol, venue, 145 | direction, size, price, order_type, metatype, void_price, 146 | trail, reduce_only, post_only, status="UNFILLED"): 147 | self.logger = logger 148 | self.trade_id = trade_id # Parent trade ID. 149 | self.order_id = None # Internal use order ID. 150 | self.timestamp = None # Order placement timestamp. 151 | self.avg_fill_price = None # Actual fill price 152 | self.currency = None # Instrument denomination currency. 153 | self.venue_id = None # Order ID as used by venue. 154 | self.symbol = symbol # Instrument ticker code. 155 | self.venue = venue # Venue or exchange traded at. 156 | self.direction = direction.upper() # LONG, SHORT. 157 | self.size = size # Size in local asset/contract. 158 | self.price = price # Order price. 159 | self.order_type = order_type.upper() # LIMIT MARKET STOP_LIMIT STOP. 160 | self.metatype = metatype.upper() # ENTRY, STOP, TAKE_PROFIT, FINAL_TAKE_PROFIT. 161 | self.void_price = void_price # Order invalidation price. 162 | self.trail = trail # True or False, only for stops. 163 | self.reduce_only = reduce_only # True or False. 164 | self.post_only = post_only # True of False. 165 | self.batch_size = 0 # Batch size for all related orders. 166 | self.status = status # FILLED, NEW, PARTIAL. 167 | 168 | def get_order_dict(self): 169 | """ 170 | Return all order variables as a dict for DB storage. 171 | """ 172 | return { 173 | 'trade_id': self.trade_id, 174 | 'order_id': self.order_id, 175 | 'timestamp': self.timestamp, 176 | 'avg_fill_price': self.avg_fill_price, 177 | 'currency': self.currency, 178 | 'venue_id': self.venue_id, 179 | 'venue': self.venue, 180 | 'symbol': self.symbol, 181 | 'direction': self.direction, 182 | 'size': self.size, 183 | 'price': self.price, 184 | 'order_type': self.order_type, 185 | 'metatype': self.metatype, 186 | 'void_price': self.void_price, 187 | 'trail': self.trail, 188 | 'reduce_only': self.reduce_only, 189 | 'post_only': self.post_only, 190 | 'batch_size': self.batch_size, 191 | 'status': self.status} 192 | 193 | 194 | class TradeID(): 195 | """ 196 | Utility class for generating sequential trade ID's from database. 197 | """ 198 | 199 | def __init__(self, db): 200 | self.db = db 201 | 202 | def new_id(self): 203 | result = list(self.db['trades'].find({}).sort([("trade_id", -1)])) 204 | return (int(result[0]['trade_id']) + 1) if result else 1 205 | --------------------------------------------------------------------------------