├── .gitignore ├── LICENSE ├── README.md ├── TradingStrategyTemplate.py ├── _config.yml ├── auquanToolbox ├── __init__.py ├── dataloader.py ├── metrics.py ├── resultviewer.py ├── toolbox.py └── version.py └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | runLogs 3 | nasdaq 4 | nyse 5 | historicalData 6 | .pypirc 7 | auquanToolbox.egg-info 8 | build 9 | dist 10 | test*.py 11 | 12 | # Windows image file caches 13 | Thumbs.db 14 | ehthumbs.db 15 | 16 | # Folder config file 17 | Desktop.ini 18 | 19 | # Recycle Bin used on file shares 20 | $RECYCLE.BIN/ 21 | 22 | # Windows Installer files 23 | *.cab 24 | *.msi 25 | *.msm 26 | *.msp 27 | 28 | # Windows shortcuts 29 | *.lnk 30 | 31 | # ========================= 32 | # Operating System Files 33 | # ========================= 34 | 35 | # OSX 36 | # ========================= 37 | 38 | .DS_Store 39 | .AppleDouble 40 | .LSOverride 41 | 42 | # Thumbnails 43 | ._* 44 | 45 | # Files that might appear in the root of a volume 46 | .DocumentRevisions-V100 47 | .fseventsd 48 | .Spotlight-V100 49 | .TemporaryItems 50 | .Trashes 51 | .VolumeIcon.icns 52 | 53 | # Directories potentially created on remote AFP share 54 | .AppleDB 55 | .AppleDesktop 56 | Network Trash Folder 57 | Temporary Items 58 | .apdisk 59 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016 Auquan 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software 4 | and associated documentation files (the "Software"), to deal in the Software without restriction, 5 | including without limitation the rights to use, copy, modify, merge, publish, distribute, 6 | sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is 7 | furnished to do so, subject to the following conditions: 8 | 9 | The above copyright notice and this permission notice shall be included in all copies or substantial 10 | portions of the Software. 11 | 12 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT 13 | NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 14 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 15 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT 16 | OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Note: Deprecation notice 2 | **This is the old Auquan Toolbox, and is no longer maintained. For all purposes find [the new repo for the toolbox](https://links.auquan.com/auquan-toolbox). All the issues and Pull requests should be created in the new repo** 3 | -------------------------------------------------------------------------------- /TradingStrategyTemplate.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function, unicode_literals 2 | import pandas as pd 3 | import numpy as np 4 | import auquanToolbox as at 5 | 6 | 7 | def settings(): 8 | exchange = "nasdaq" # Exchange to download data for (only nasdaq for now) 9 | markets = ['A', 'AAPL', 'IBM', 'GOOG', 'C'] # Stocks to download data for. 10 | # markets = [] Leave empty array to download all stocks for the exchange (~900 stocks) 11 | # To have a look at all possible stocks go here: 12 | # https://raw.githubusercontent.com/Auquan/auquan-historical-data/master/nasdaq/nasdaq.txt 13 | # Based on data avalaible, some markets might be dropped. Only the non dropped markets will appear 14 | # in lookback_data for trading_strategy 15 | 16 | date_start = '2015-01-03' # Date to start the backtest 17 | date_end = '2016-11-06' # Date to end the backtest 18 | lookback = 120 # Number of days you want historical data for 19 | 20 | """ To make a decision for day t, your algorithm will have historical data 21 | from t-lookback to t-1 days""" 22 | return [exchange, markets, date_start, date_end, lookback] 23 | 24 | 25 | def trading_strategy(lookback_data): 26 | """ 27 | :param lookback_data: Historical Data for the past "lookback" number of days as set in the main settings. 28 | It is a dictionary of features such as, 29 | 'OPEN', 'CLOSE', 'HIGH', 'LOW', 'VOLUME', 'SLIPPAGE', 'POSITION', 'ORDER', 30 | 'FILLED_ORDER', 'DAILY_PNL', 'TOTAL_PNL', 'FUNDS', 'VALUE' 31 | Any feature data can be accessed as:lookback_data['OPEN'] 32 | The output is a pandas dataframe with dates as the index (row) 33 | and markets as columns. 34 | """""""""""""""""""""""""" 35 | """""""""""""""""""""""""" 36 | To see a complete list of features, uncomment the line below""" 37 | # print(lookback_data.keys()) 38 | 39 | """"""""""""""""""""""""""" 40 | :return: A pandas dataframe with markets you are trading as index(row) and 41 | signal, price and quantity as columns 42 | order['SIGNAL']:buy (+1), hold (0) or sell (-1) trading signals for all securities in markets[] 43 | order['PRICE']: The price where you want to trade each security. Buy orders are executed at or below the price and sell orders are executed at or above the price 44 | order['WEIGHTS']: The normalized set of weights for the markets 45 | System will buy the specified quantity of stock if it's price <= price specified here 46 | System will sell the specified quantity of a stock if it's price >= price specified here 47 | """ 48 | 49 | """IMPORTANT: Please make sure you have enough funds to buy or sell. 50 | Order is cancelled if order_value > available funds(both buy and short sell)""" 51 | 52 | order = pd.DataFrame(0, index=lookback_data['POSITION'].columns, columns=['SIGNAL', 'WEIGHTS', 'PRICE']) 53 | 54 | # YOUR CODE HERE 55 | 56 | return order 57 | 58 | 59 | if __name__ == '__main__': 60 | [exchange, markets, date_start, date_end, lookback] = settings() 61 | at.backtest(exchange, markets, trading_strategy, date_start, date_end, lookback) # ,verbose=True) 62 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-tactile -------------------------------------------------------------------------------- /auquanToolbox/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | from .dataloader import * 3 | from .toolbox import * 4 | from .resultviewer import * 5 | from .metrics import * 6 | except: 7 | raise 8 | -------------------------------------------------------------------------------- /auquanToolbox/dataloader.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function, unicode_literals 2 | try: 3 | from urllib2 import urlopen 4 | except ImportError: 5 | from urllib.request import urlopen 6 | import numpy as np 7 | import pandas as pd 8 | from pandas.tseries.offsets import BDay 9 | import os 10 | 11 | 12 | def download(exchange, ticker, file_name, logger): 13 | url = 'https://raw.githubusercontent.com/Auquan/auquan-historical-data/master/%s/historicalData/%s.csv' % ( 14 | exchange.lower(), ticker.lower()) 15 | response = urlopen(url) 16 | status = response.getcode() 17 | if status == 200: 18 | logger.info('Downloading %s data to file: %s' % (ticker, file_name)) 19 | with open(file_name, 'w') as f: 20 | f.write(response.read()) 21 | return True 22 | else: 23 | logger.info('File not found. Please check settings!') 24 | return False 25 | 26 | 27 | def data_available(exchange, markets, logger): 28 | dir_name = '%s/historicalData/' % exchange.lower() 29 | if not os.path.exists(dir_name): 30 | os.makedirs(dir_name) 31 | for m in markets: 32 | file_name = '%s%s.csv' % (dir_name, m.lower()) 33 | if not os.path.exists(file_name): 34 | try: 35 | assert(download(exchange, m, file_name, logger) 36 | ), "%s not found. Please check settings!" % file_name 37 | except AssertionError: 38 | logger.exception( 39 | "%s not found. Please check settings!" % file_name) 40 | raise 41 | return True 42 | 43 | 44 | def download_security_list(exchange, logger): 45 | dir_name = '%s/' % exchange.lower() 46 | if not os.path.exists(dir_name): 47 | os.makedirs(dir_name) 48 | 49 | file_name = '%s%s.txt' % (dir_name, exchange.lower()) 50 | if not os.path.exists(file_name): 51 | url = 'https://raw.githubusercontent.com/Auquan/auquan-historical-data/master/%s' % ( 52 | file_name) 53 | response = urlopen(url) 54 | status = response.getcode() 55 | if status == 200: 56 | logger.info('Downloading data to file: %s' % file_name) 57 | with open(file_name, 'w') as f: 58 | f.write(response.read()) 59 | return True 60 | else: 61 | logger.info('File not found. Please check exchange settings!') 62 | return False 63 | else: 64 | return True 65 | 66 | 67 | def compatibleDictKeyCheck(dict, key): 68 | try: 69 | return dict.has_key(key) 70 | except: 71 | return key in dict 72 | 73 | 74 | def load_data(exchange, markets, start, end, lookback, budget, logger, random=False): 75 | 76 | logger.info("Loading Data from %s to %s...." % (start, end)) 77 | 78 | # because there are some holidays adding some cushion to lookback 79 | try: 80 | dates = [pd.to_datetime( 81 | start) - BDay((lookback * 1.10) + 10), pd.to_datetime(end)] 82 | except ValueError: 83 | logger.exception( 84 | "%s or %s is not valid date. Please check settings!" % (start, end)) 85 | raise ValueError( 86 | "%s or %s is not valid date. Please check settings!" % (start, end)) 87 | 88 | try: 89 | assert(dates[1] > dates[0]), "Start Date is after End Date" 90 | except AssertionError: 91 | logger.exception("Start Date is after End Date") 92 | raise 93 | 94 | # Download list of securities 95 | assert(download_security_list(exchange, logger)) 96 | if len(markets) == 0: 97 | file_name = '%s/%s.txt' % (exchange.lower(), exchange.lower()) 98 | 99 | markets = [line.strip() for line in open(file_name)] 100 | 101 | markets = [m.upper() for m in markets] 102 | features = ['OPEN', 'CLOSE', 'HIGH', 'LOW', 'VOLUME'] 103 | date_range = pd.date_range(start=dates[0], end=dates[1], freq='B') 104 | back_data = {} 105 | if random: 106 | for feature in features: 107 | back_data[feature] = pd.DataFrame(np.random.randint(10, 50, size=(date_range.size, len(markets))), 108 | index=date_range, 109 | columns=markets) 110 | else: 111 | for feature in features: 112 | back_data[feature] = pd.DataFrame( 113 | index=date_range, columns=markets) 114 | assert data_available(exchange, markets, logger) 115 | market_to_drop = [] 116 | for market in markets: 117 | logger.info('Reading %s.csv' % market) 118 | csv = pd.read_csv('%s/historicalData/%s.csv' % 119 | (exchange.lower(), market.lower()), index_col=0) 120 | csv.index = pd.to_datetime(csv.index) 121 | csv.columns = [col.upper() for col in csv.columns] 122 | csv = csv.reindex(index=csv.index[::-1]) 123 | features = [col.upper() for col in csv.columns] 124 | market_first_date = csv.index[0] 125 | if (market_first_date > (dates[0] - BDay(1) + BDay(1))): 126 | market_to_drop.append(market) 127 | logger.info( 128 | 'Dropping %s. This stock did not start trading before (start date -lookback days)' % market) 129 | continue 130 | market_last_date = csv.index[-1] 131 | if (market_last_date < (dates[0] - BDay(1) + BDay(1))): 132 | market_to_drop.append(market) 133 | logger.info( 134 | 'Dropping %s. This stock terminated before (start date -lookback days)' % market) 135 | continue 136 | 137 | back_fill_data = False 138 | if market_last_date in date_range: 139 | back_fill_data = True 140 | logger.info( 141 | 'The market %s doesnt have data for the whole duration. Subsituting missing dates with the last known data' % market) 142 | 143 | for feature in features: 144 | if not compatibleDictKeyCheck(back_data, feature): 145 | back_data[feature] = pd.DataFrame( 146 | index=date_range, columns=markets) 147 | back_data[feature][market] = csv[feature][date_range] 148 | if back_fill_data: 149 | back_data[feature].loc[market_last_date:date_range[-1], 150 | market] = back_data[feature].at[market_last_date, market] 151 | 152 | for m in market_to_drop: 153 | logger.info('Dropping %s. Not Enough Data' % m) 154 | markets.remove(m) 155 | 156 | for feature in features: 157 | back_data[feature].drop(market_to_drop, axis=1, inplace=True) 158 | dates_to_drop = pd.Series(False, index=date_range) 159 | for feature in features: 160 | dates_to_drop |= pd.isnull(back_data[feature]).any(axis=1) 161 | 162 | dropped_dates = date_range[dates_to_drop] 163 | date_range = date_range[~dates_to_drop] 164 | for feature in features: 165 | back_data[feature] = back_data[feature].drop(dropped_dates) 166 | 167 | back_data['COST TO TRADE'] = pd.DataFrame( 168 | 0, index=date_range, columns=markets) 169 | back_data['POSITION'] = pd.DataFrame(0, index=date_range, columns=markets) 170 | back_data['ORDER'] = pd.DataFrame(0, index=date_range, columns=markets) 171 | back_data['FILLED_ORDER'] = pd.DataFrame( 172 | 0, index=date_range, columns=markets) 173 | back_data['DAILY_PNL'] = pd.DataFrame(0, index=date_range, columns=markets) 174 | back_data['TOTAL_PNL'] = pd.DataFrame(0, index=date_range, columns=markets) 175 | back_data['FUNDS'] = pd.Series(budget, index=date_range) 176 | back_data['VALUE'] = pd.Series(budget, index=date_range) 177 | back_data['MARGIN'] = pd.Series(0, index=date_range) 178 | 179 | return back_data, date_range 180 | 181 | # TODO: Refactor this 182 | 183 | 184 | def load_data_nologs(exchange, markets, start, end, lookback=2): 185 | 186 | # because there are some holidays adding some cushion to lookback 187 | try: 188 | dates = [pd.to_datetime( 189 | start) - BDay(lookback * 1.10), pd.to_datetime(end)] 190 | except ValueError: 191 | raise ValueError( 192 | "%s or %s is not valid date. Please check settings!" % (start, end)) 193 | 194 | assert(dates[1] > dates[0]), "Start Date is after End Date" 195 | 196 | dir_name = '%s/' % exchange.lower() 197 | if not os.path.exists(dir_name): 198 | os.makedirs(dir_name) 199 | 200 | file_name = '%s%s.txt' % (dir_name, exchange.lower()) 201 | if not os.path.exists(file_name): 202 | url = 'https://raw.githubusercontent.com/Auquan/auquan-historical-data/master/%s' % ( 203 | file_name) 204 | response = urlopen(url) 205 | status = response.getcode() 206 | if status == 200: 207 | with open(file_name, 'w') as f: 208 | f.write(response.read()) 209 | else: 210 | print('File not found. Please check exchange name!') 211 | 212 | if len(markets) == 0: 213 | file_name = '%s/%s.txt' % (exchange.lower(), exchange.lower()) 214 | 215 | markets = [line.strip() for line in open(file_name)] 216 | 217 | markets = [m.upper() for m in markets] 218 | features = ['OPEN', 'CLOSE', 'HIGH', 'LOW', 'VOLUME'] 219 | date_range = pd.date_range(start=dates[0], end=dates[1], freq='B') 220 | back_data = {} 221 | for feature in features: 222 | back_data[feature] = pd.DataFrame(index=date_range, columns=markets) 223 | dir_name = '%s/historicalData/' % exchange.lower() 224 | if not os.path.exists(dir_name): 225 | os.makedirs(dir_name) 226 | for m in markets: 227 | file_name = '%s%s.csv' % (dir_name, m.lower()) 228 | if not os.path.exists(file_name): 229 | url = 'https://raw.githubusercontent.com/Auquan/auquan-historical-data/master/%s/historicalData/%s.csv' % ( 230 | exchange.lower(), m.lower()) 231 | response = urlopen(url) 232 | status = response.getcode() 233 | if status == 200: 234 | with open(file_name, 'w') as f: 235 | f.write(response.read()) 236 | else: 237 | print('File not found. Please check settings!') 238 | 239 | market_to_drop = [] 240 | for market in markets: 241 | csv = pd.read_csv('%s/historicalData/%s.csv' % 242 | (exchange.lower(), market.lower()), index_col=0) 243 | csv.index = pd.to_datetime(csv.index) 244 | csv.columns = [col.upper() for col in csv.columns] 245 | csv = csv.reindex(index=csv.index[::-1]) 246 | features = [col.upper() for col in csv.columns] 247 | market_first_date = csv.index[0] 248 | if (market_first_date > (dates[0] - BDay(1) + BDay(1))): 249 | market_to_drop.append(market) 250 | continue 251 | market_last_date = csv.index[-1] 252 | if (market_last_date < (dates[0] - BDay(1) + BDay(1))): 253 | market_to_drop.append(market) 254 | continue 255 | 256 | back_fill_data = False 257 | if market_last_date in date_range: 258 | back_fill_data = True 259 | 260 | for feature in features: 261 | if not compatibleDictKeyCheck(back_data, feature): 262 | back_data[feature] = pd.DataFrame( 263 | index=date_range, columns=markets) 264 | back_data[feature][market] = csv[feature][date_range] 265 | if back_fill_data: 266 | back_data[feature].loc[market_last_date:date_range[-1], 267 | market] = back_data[feature].at[market_last_date, market] 268 | 269 | for m in market_to_drop: 270 | markets.remove(m) 271 | 272 | for feature in features: 273 | back_data[feature].drop(market_to_drop, axis=1, inplace=True) 274 | dates_to_drop = pd.Series(False, index=date_range) 275 | for feature in features: 276 | dates_to_drop |= pd.isnull(back_data[feature]).any(axis=1) 277 | 278 | dropped_dates = date_range[dates_to_drop] 279 | date_range = date_range[~dates_to_drop] 280 | for feature in features: 281 | back_data[feature] = back_data[feature].drop(dropped_dates) 282 | 283 | return back_data 284 | -------------------------------------------------------------------------------- /auquanToolbox/metrics.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function, unicode_literals 2 | import numpy as np 3 | import pandas as pd 4 | from auquanToolbox.dataloader import data_available 5 | import matplotlib.pyplot as plt 6 | 7 | 8 | def metrics(daily_pnl, total_pnl, baseline_data, base_index): 9 | 10 | stats = {} 11 | daily_return = daily_pnl.sum(axis=1) 12 | 13 | stats['Total Pnl'] = (total_pnl.iloc[total_pnl.index.size - 1].sum()) 14 | stats['Annual Return'] = annualized_return(daily_return) 15 | stats['Annual Vol'] = annual_vol(daily_return) 16 | stats['Sharpe Ratio'] = sharpe_ratio(daily_return) 17 | stats['Sortino Ratio'] = sortino_ratio(daily_return) 18 | stats['Max Drawdown'] = max_drawdown(daily_return) 19 | stats['Profit Factor'] = profit_factor(daily_return) 20 | stats['Profitablity (%)'] = profit_percent(daily_return) 21 | if base_index: 22 | stats['Base Return(%)'] = annualized_return(baseline_data['DAILY_PNL']) 23 | stats['Beta'] = beta(daily_return, baseline_data['DAILY_PNL']) 24 | 25 | for x in stats.keys(): 26 | if np.isnan(stats[x]): 27 | del stats[x] 28 | 29 | return stats 30 | 31 | 32 | def annualized_return(daily_return): 33 | total_return = daily_return.sum() 34 | total_days = daily_return.index.size 35 | if total_return < -1: 36 | total_return = -1 37 | return ((1 + total_return)**(252 / total_days) - 1) 38 | 39 | 40 | def annualized_std(daily_return): 41 | return np.sqrt(252) * np.std(daily_return) 42 | 43 | 44 | def annualized_downside_std(daily_return): 45 | downside_return = daily_return.copy() 46 | downside_return[downside_return > 0] = 0 47 | return np.sqrt(252) * np.std(downside_return) 48 | 49 | 50 | def annual_vol(daily_return): 51 | return annualized_std(daily_return) 52 | 53 | 54 | def sharpe_ratio(daily_return): 55 | stdev = annualized_std(daily_return) 56 | if stdev == 0: 57 | return np.nan 58 | else: 59 | return annualized_return(daily_return) / stdev 60 | 61 | 62 | def sortino_ratio(daily_return): 63 | stdev = annualized_downside_std(daily_return) 64 | if stdev == 0: 65 | return np.nan 66 | else: 67 | return annualized_return(daily_return) / stdev 68 | 69 | 70 | def max_drawdown(daily_return): 71 | return np.max(np.maximum.accumulate(daily_return) - daily_return) 72 | 73 | 74 | def beta(daily_return, baseline_daily_return): 75 | stdev = np.std(baseline_daily_return) 76 | if stdev == 0: 77 | return np.nan 78 | else: 79 | return np.corrcoef(daily_return, baseline_daily_return)[0, 1] * np.std(daily_return) / stdev 80 | 81 | 82 | def alpha(daily_return, baseline_daily_return, beta): 83 | return annualized_return(daily_return) - beta * annualized_return(baseline_daily_return) 84 | 85 | 86 | def profit_factor(daily_return): 87 | downside_return = daily_return.copy() 88 | downside_return[downside_return > 0] = 0 89 | upside_return = daily_return.copy() 90 | upside_return[upside_return < 0] = 0 91 | if downside_return.sum() == 0: 92 | return 0 93 | return -(upside_return.sum()) / (downside_return.sum()) 94 | 95 | 96 | def profit_percent(daily_return): 97 | total_return = daily_return.copy() 98 | total_return[total_return != 0] = 1 99 | upside_return = daily_return.copy() 100 | upside_return[upside_return < 0] = 0 101 | upside_return[upside_return > 0] = 1 102 | if total_return.sum() == 0: 103 | return 0 104 | return upside_return.sum() / total_return.sum() 105 | 106 | 107 | def baseline(exchange, base_index, date_range, logger): 108 | features = ['OPEN', 'CLOSE'] 109 | baseline_data = {} 110 | 111 | assert data_available(exchange, [base_index], logger) 112 | csv = pd.read_csv('%s/historicalData/%s.csv' % 113 | (exchange.lower(), base_index.lower()), index_col=0) 114 | csv.index = pd.to_datetime(csv.index) 115 | csv.columns = [col.upper() for col in csv.columns] 116 | csv = csv.reindex(index=csv.index[::-1]) 117 | # features = [col.upper() for col in csv.columns] 118 | 119 | for feature in features: 120 | baseline_data[feature] = pd.Series(0, index=date_range) 121 | baseline_data[feature][base_index] = csv[feature][date_range] 122 | 123 | baseline_data['DAILY_PNL'] = pd.Series(0, index=date_range) 124 | baseline_data['TOTAL_PNL'] = pd.Series(0, index=date_range) 125 | 126 | open_start = baseline_data['OPEN'][base_index].iloc[1] 127 | for end in range(1, date_range.size): 128 | close_curr = baseline_data['CLOSE'][base_index].iloc[end] 129 | close_last = baseline_data['CLOSE'][base_index].iloc[end - 1] 130 | if end == 1: 131 | close_last = open_start 132 | pnl_curr = (close_curr - close_last) / open_start 133 | 134 | baseline_data['DAILY_PNL'].iloc[end] = pnl_curr 135 | baseline_data['TOTAL_PNL'].iloc[end] = pnl_curr + \ 136 | baseline_data['TOTAL_PNL'].iloc[end - 1] 137 | 138 | return baseline_data 139 | 140 | 141 | def analyze(exchange, markets, back_data): 142 | plt.close('all') 143 | f, plot_arr = plt.subplots(2, sharex=True) 144 | plot_arr[0].set_title('Open') 145 | plot_arr[1].set_title('Close') 146 | for m in markets: 147 | plot_arr[0].plot(back_data['OPEN'].index, 148 | back_data['OPEN'][m], label=m) 149 | plot_arr[1].plot(back_data['OPEN'].index, 150 | back_data['CLOSE'][m], label=m) 151 | plot_arr[0].legend(loc='upper center') 152 | plot_arr[1].legend(loc='upper center') 153 | plt.show() 154 | -------------------------------------------------------------------------------- /auquanToolbox/resultviewer.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function, unicode_literals 2 | import numpy as np 3 | import pandas as pd 4 | import matplotlib 5 | matplotlib.use("TkAgg") # important to call this right after 6 | from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg 7 | from matplotlib import style 8 | import matplotlib.pyplot as plt 9 | import matplotlib.dates as mdates 10 | import matplotlib.ticker as mtick 11 | 12 | try: 13 | import Tkinter as tk 14 | import ttk 15 | import tkFont 16 | import tkMessageBox 17 | except: 18 | import tkinter as tk 19 | from tkinter import ttk 20 | from tkinter import font as tkFont 21 | from tkinter import messagebox as tkMessageBox 22 | 23 | from auquanToolbox.metrics import metrics, baseline 24 | 25 | 26 | def loadgui(back_data, exchange, base_index, budget, logger): 27 | 28 | ###################### 29 | # Setup data 30 | ###################### 31 | 32 | position = back_data['POSITION'] 33 | close = back_data['CLOSE'] 34 | 35 | # position as % of total portfolio 36 | 37 | long_position = (position * close).div(back_data['VALUE'], axis=0) 38 | short_position = long_position.copy() 39 | long_position[long_position < 0] = 0 40 | short_position[short_position > 0] = 0 41 | 42 | daily_pnl = back_data['DAILY_PNL'] / budget 43 | total_pnl = back_data['TOTAL_PNL'] / budget 44 | 45 | if base_index: 46 | baseline_data = baseline(exchange, base_index, total_pnl.index, logger) 47 | stats = metrics(daily_pnl, total_pnl, baseline_data, base_index) 48 | else: 49 | baseline_data = {} 50 | stats = metrics(daily_pnl, total_pnl, {}, base_index) 51 | 52 | daily_return = daily_pnl.sum(axis=1) 53 | total_return = total_pnl.sum(axis=1) 54 | long_exposure = long_position.sum(axis=1) 55 | short_exposure = short_position.sum(axis=1) 56 | zero_line = np.zeros(daily_pnl.index.size) 57 | 58 | # print to logger 59 | for x in stats.keys(): 60 | logger.info('%s : %0.2f' % (x, stats[x])) 61 | 62 | def isDate(val): 63 | 64 | # Function to validate if a given entry is valid date 65 | try: 66 | d = pd.to_datetime(val) 67 | if d > daily_pnl.index[0] and d < daily_pnl.index[-1]: 68 | return True 69 | else: 70 | return False 71 | except ValueError: 72 | raise ValueError("Not a Valid Date") 73 | return False 74 | 75 | def newselection(event): 76 | 77 | # Function to autoupdate chart on new selection from dropdown 78 | i = dropdown.current() 79 | market = ['TOTAL PORTFOLIO'] + daily_pnl.columns.values.tolist() 80 | plot(daily_pnl, total_pnl, long_position, short_position, baseline_data, base_index, market[i], box_value2.get(), box_value3.get()) 81 | 82 | def plot(daily_pnl, total_pnl, long_position, short_position, 83 | baseline_data, base_index, market='TOTAL PORTFOLIO', 84 | start=daily_pnl.index.format()[0], 85 | end=daily_pnl.index.format()[-1]): 86 | 87 | # New plot when custom fields are changed 88 | 89 | plt.clf() 90 | 91 | # plt.style.use("seaborn-whitegrid") 92 | daily_pnl = daily_pnl.loc[start:end] 93 | total_pnl = total_pnl.loc[start:end] 94 | long_position = long_position.loc[start:end] 95 | short_position = short_position.loc[start:end] 96 | 97 | if market == 'TOTAL PORTFOLIO': 98 | daily_return = daily_pnl.sum(axis=1) 99 | total_return = total_pnl.sum(axis=1) 100 | long_exposure = long_position.sum(axis=1) 101 | short_exposure = short_position.sum(axis=1) 102 | else: 103 | daily_return = daily_pnl[market] 104 | total_return = total_pnl[market] 105 | long_exposure = long_position[market] 106 | short_exposure = short_position[market] 107 | 108 | zero_line = np.zeros(daily_pnl.index.size) 109 | # f, plot_arr = plt.subplots(3, sharex=True) 110 | 111 | total_plot = plt.subplot2grid((10, 8), (0, 0), colspan=12, rowspan=4) 112 | daily_plot = plt.subplot2grid((10, 8), (5, 0), colspan=12, rowspan=2, sharex=total_plot) 113 | position_plot = plt.subplot2grid((10, 8), (8, 0), colspan=12, rowspan=2, sharex=total_plot) 114 | ind = np.arange(len(daily_pnl.index)) 115 | 116 | total_plot.set_title('Total PnL') 117 | total_plot.plot(ind, zero_line, 'k') 118 | total_plot.plot(ind, total_return.values, 'b', linewidth=0.5, label='strategy') 119 | total_plot.legend(loc='upper left') 120 | total_plot.autoscale(tight=True) 121 | plt.setp(total_plot.get_xticklabels(), visible=False) 122 | total_plot.yaxis.set_major_formatter(mtick.FuncFormatter(format_perc)) 123 | total_plot.set_ylabel('Cumulative Performance') 124 | total_plot.legend(bbox_to_anchor=(0.03, 0.97), loc='lower left', borderaxespad=0.) 125 | if base_index: 126 | total_plot.plot(ind, baseline_data['TOTAL_PNL'], 'g', linewidth=0.5, label=base_index) 127 | 128 | daily_plot.set_title('Daily PnL') 129 | daily_plot.plot(ind, zero_line, 'k') 130 | daily_plot.bar(ind, daily_return.values, 0.2, align='center', color='c', label='strategy') 131 | daily_plot.legend(loc='upper left') 132 | daily_plot.autoscale(tight=True) 133 | plt.setp(daily_plot.get_xticklabels(), visible=False) 134 | daily_plot.yaxis.set_major_formatter(mtick.FuncFormatter(format_perc)) 135 | daily_plot.set_ylabel('Daily Performance') 136 | daily_plot.legend(bbox_to_anchor=(0.03, 0.97), loc='lower left', borderaxespad=0.) 137 | 138 | position_plot.set_title('Daily Exposure') 139 | position_plot.plot(ind, zero_line, 'k') 140 | position_plot.bar(ind, short_exposure.values, 0.3, linewidth=0, align='center', color='r', label='short') 141 | position_plot.bar(ind, long_exposure.values, 0.3, linewidth=0, align='center', color='b', label='long') 142 | position_plot.legend(loc='upper left') 143 | position_plot.autoscale(tight=True) 144 | position_plot.xaxis.set_major_formatter(mtick.FuncFormatter(format_date)) 145 | position_plot.yaxis.set_major_formatter(mtick.FuncFormatter(format_perc)) 146 | position_plot.set_ylabel('Long/Short %') 147 | position_plot.legend(bbox_to_anchor=(0.03, 0.97), loc='lower left', borderaxespad=0.) 148 | 149 | plt.gcf().canvas.draw() 150 | 151 | def update_plot(): 152 | 153 | # Callback Function for plot button 154 | try: 155 | d1 = pd.to_datetime(box_value2.get()) 156 | d2 = pd.to_datetime(box_value3.get()) 157 | if d1 >= daily_pnl.index[0] and d2 <= daily_pnl.index[-1]: 158 | plot(daily_pnl, total_pnl, long_position, short_position, baseline_data, base_index, box_value.get(), box_value2.get(), box_value3.get()) 159 | else: 160 | tkMessageBox.showinfo("Date out of Range", "Please enter a date from %s to %s" % (daily_pnl.index[0].strftime('%Y-%m-%d'), daily_pnl.index[-1].strftime('%Y-%m-%d'))) 161 | except ValueError: 162 | raise ValueError("Not a Valid Date") 163 | 164 | def close_window(): 165 | 166 | # Callback function for Quit Button 167 | GUI.destroy() 168 | GUI.quit() 169 | 170 | def format_date(x, pos=None): 171 | 172 | # Format axis ticklabels to dates 173 | thisind = np.clip(int(x + 0.5), 0, len(daily_pnl.index) - 1) 174 | return daily_pnl.index[thisind].strftime('%b-%y') 175 | 176 | def format_perc(y, pos=None): 177 | 178 | # Format axis ticklabels to % 179 | if budget > 1: 180 | return '{percent:.2%}'.format(percent=y) 181 | else: 182 | return y 183 | 184 | def onFrameConfigure(canvas): 185 | canvas.configure(scrollregion=canvas.bbox("all")) 186 | 187 | ###################### 188 | # GUI mainloop 189 | ###################### 190 | 191 | # Create widget 192 | GUI = tk.Tk() 193 | GUI.title('Backtest Results') 194 | 195 | winCanvas = tk.Canvas(GUI, borderwidth=0, background="#ffffff", width=1500, height=1000) 196 | frame = tk.Frame(winCanvas, background="#ffffff") 197 | vsb = tk.Scrollbar(GUI, orient="vertical", command=winCanvas.yview) 198 | hsb = tk.Scrollbar(GUI, orient="horizontal", command=winCanvas.xview) 199 | winCanvas.configure(yscrollcommand=vsb.set) 200 | winCanvas.configure(xscrollcommand=hsb.set) 201 | 202 | vsb.pack(side="left", fill="y") 203 | hsb.pack(side="bottom", fill="x") 204 | winCanvas.pack(side="right", fill="both", expand=True) 205 | winCanvas.create_window((50, 50), window=frame, anchor="nw") 206 | 207 | frame.bind("", lambda event, canvas=winCanvas: onFrameConfigure(winCanvas)) 208 | 209 | # Create dropdown for market 210 | 211 | Label_1 = tk.Label(frame, text="Trading Performance:") 212 | Label_1.grid(row=0, column=0, sticky=tk.EW) 213 | 214 | box_value = tk.StringVar() 215 | dropdown = ttk.Combobox(frame, textvariable=box_value, state='readonly') 216 | dropdown['values'] = ['TOTAL PORTFOLIO'] + daily_pnl.columns.values.tolist() 217 | dropdown.grid(row=0, column=1, sticky=tk.EW) 218 | dropdown.current(0) 219 | dropdown.bind('<>', newselection) 220 | 221 | # Create entry field for start date 222 | 223 | Label_2 = tk.Label(frame, text="Start Date") 224 | Label_2.grid(row=0, column=2, sticky=tk.EW) 225 | 226 | box_value2 = tk.StringVar(frame, value=daily_pnl.index.format()[0]) 227 | start = tk.Entry(frame, textvariable=box_value2, validate='key', validatecommand=(GUI.register(isDate), '%P')) 228 | start.grid(row=0, column=3, sticky=tk.EW) 229 | 230 | # Create entry field for end date 231 | 232 | Label_3 = tk.Label(frame, text="End Date") 233 | Label_3.grid(row=0, column=4, sticky=tk.EW) 234 | 235 | box_value3 = tk.StringVar(frame, value=daily_pnl.index.format()[-1]) 236 | end = tk.Entry(frame, textvariable=box_value3, validate='key', validatecommand=(GUI.register(isDate), '%P')) 237 | end.grid(row=0, column=5, sticky=tk.EW) 238 | 239 | # Create Plot button to reload chart 240 | 241 | button1 = tk.Button(frame, text='PLOT', command=update_plot) 242 | button1.grid(row=0, column=6, sticky=tk.EW) 243 | 244 | # Create text widget with backtest results 245 | 246 | customFont1 = tkFont.Font(family="Helvetica", size=9, weight="bold") 247 | customFont2 = tkFont.Font(family="Helvetica", size=12) 248 | 249 | text = tk.Text(frame, height=3, width=50, wrap=tk.WORD, bd=5, padx=10, pady=5) 250 | text.grid(row=1, column=0, columnspan=7, sticky=tk.EW) 251 | String1 = '' 252 | String2 = '' 253 | for y in stats.keys(): 254 | String1 = String1 + y + '\t\t' 255 | x = stats[y] 256 | if budget > 1 and 'Ratio' not in y: 257 | String2 = String2 + '{percent:.2%}'.format(percent=x) + '\t\t' 258 | else: 259 | String2 = String2 + '%0.2f' % x + '\t\t' 260 | text.insert(tk.END, String1) 261 | text.insert(tk.END, '\n') 262 | text.insert(tk.END, String2) 263 | text.tag_add("keys", "1.0", "1.end") 264 | text.tag_config("keys", font=customFont1) 265 | text.tag_add("values", "2.0", "2.end") 266 | text.tag_config("values", foreground="red", font=customFont2) 267 | 268 | # Create canvas to plot chart 269 | 270 | f = plt.figure(figsize=(16, 8)) 271 | canvas = FigureCanvasTkAgg(f, master=frame) 272 | canvas.get_tk_widget().grid(row=2, column=0, columnspan=7, rowspan=1, sticky=tk.NSEW) 273 | toolbar_frame = tk.Frame(frame) 274 | toolbar_frame.grid(row=4, column=0, columnspan=7) 275 | 276 | # plot 3 subplots for total position, daily position and exposure 277 | 278 | plt.style.use("seaborn-whitegrid") 279 | total_plot = plt.subplot2grid((10, 8), (0, 0), colspan=12, rowspan=4) 280 | daily_plot = plt.subplot2grid((10, 8), (5, 0), colspan=12, rowspan=2, sharex=total_plot) 281 | position_plot = plt.subplot2grid((10, 8), (8, 0), colspan=12, rowspan=2, sharex=total_plot) 282 | ind = np.arange(len(daily_pnl.index)) 283 | 284 | total_plot.set_title('Total PnL') 285 | total_plot.plot(ind, zero_line, 'k') 286 | total_plot.plot(ind, total_return.values, 'b', linewidth=0.5, label='strategy') 287 | total_plot.legend(loc='upper left') 288 | total_plot.autoscale(tight=True) 289 | plt.setp(total_plot.get_xticklabels(), visible=False) 290 | total_plot.yaxis.set_major_formatter(mtick.FuncFormatter(format_perc)) 291 | total_plot.set_ylabel('Cumulative Performance') 292 | total_plot.legend(bbox_to_anchor=(0.03, 0.97), loc='lower left', borderaxespad=0.) 293 | if base_index: 294 | total_plot.plot(ind, baseline_data['TOTAL_PNL'], 'g', linewidth=0.5, label=base_index) 295 | 296 | daily_plot.set_title('Daily PnL') 297 | daily_plot.plot(ind, zero_line, 'k') 298 | daily_plot.bar(ind, daily_return.values, 0.2, align='center', color='c', label='strategy') 299 | daily_plot.legend(loc='upper left') 300 | daily_plot.autoscale(tight=True) 301 | plt.setp(daily_plot.get_xticklabels(), visible=False) 302 | daily_plot.yaxis.set_major_formatter(mtick.FuncFormatter(format_perc)) 303 | daily_plot.set_ylabel('Daily Performance') 304 | daily_plot.legend(bbox_to_anchor=(0.03, 0.97), loc='lower left', borderaxespad=0.) 305 | 306 | position_plot.set_title('Daily Exposure') 307 | position_plot.plot(ind, zero_line, 'k') 308 | position_plot.bar(ind, short_exposure.values, 0.3, linewidth=0, align='center', color='r', label='short') 309 | position_plot.bar(ind, long_exposure.values, 0.3, linewidth=0, align='center', color='b', label='long') 310 | position_plot.legend(loc='upper left') 311 | position_plot.autoscale(tight=True) 312 | position_plot.xaxis.set_major_formatter(mtick.FuncFormatter(format_date)) 313 | position_plot.yaxis.set_major_formatter(mtick.FuncFormatter(format_perc)) 314 | position_plot.set_ylabel('Long/Short') 315 | position_plot.legend(bbox_to_anchor=(0.03, 0.97), loc='lower left', borderaxespad=0.) 316 | 317 | plt.gcf().canvas.draw() 318 | 319 | # Create Quit Button 320 | 321 | button2 = tk.Button(frame, text='QUIT', command=close_window) 322 | button2.grid(row=4, column=6, sticky=tk.EW) 323 | 324 | GUI.mainloop() 325 | -------------------------------------------------------------------------------- /auquanToolbox/toolbox.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function, unicode_literals 2 | import numpy as np 3 | import pandas as pd 4 | import os 5 | import json 6 | import logging 7 | import datetime as dt 8 | from auquanToolbox.dataloader import load_data 9 | from auquanToolbox.resultviewer import loadgui 10 | from auquanToolbox.metrics import metrics, baseline 11 | import urllib2 12 | 13 | 14 | def backtest(exchange, markets, trading_strategy, date_start, date_end, lookback, budget=1000000, verbose=False, base_index='SPX', trading_costs=True, isJson=False): 15 | 16 | logger = get_logger() 17 | 18 | if updateCheck(): 19 | logger.warn('Your version of auquanToolbox is not the most updated.' + 20 | ' If you are using pip, please use \'pip install -U auquanToolbox\'.' + 21 | ' If you downloaded the package, you need to go to https://github.com/Auquan/auquan-toolbox-python' + 22 | ' to redownload that package.') 23 | 24 | # Verify Settings 25 | 26 | try: 27 | assert(isinstance(lookback, int)), "Lookback is invalid" 28 | except AssertionError: 29 | logger.exception("Lookback is invalid") 30 | raise 31 | 32 | # Load data for backtest 33 | 34 | (back_data, date_range) = load_data(exchange, markets, 35 | date_start, date_end, lookback, budget, logger) 36 | logger.info('Initial funds: %0.2f' % budget) 37 | logger.info('------------------------------------') 38 | logger.info('Evaluating...') 39 | 40 | budget_curr = budget 41 | 42 | position_curr = None 43 | margin_curr = None 44 | cost_to_trade = None 45 | 46 | start_index = -1 47 | 48 | for startDate in pd.date_range(start=date_start, end=date_end, freq='B'): 49 | if startDate not in date_range: 50 | logger.info(startDate.strftime( 51 | 'Trading date is a Holiday or data not present :%d %b %Y')) 52 | continue 53 | end = date_range.get_loc(startDate) 54 | if start_index < 0: 55 | start_index = end 56 | 57 | start = end - lookback 58 | if start < 0: 59 | start = 0 60 | 61 | if position_curr is None: 62 | position_curr = back_data['POSITION'].iloc[end - 1] 63 | margin_curr = back_data['MARGIN'].iloc[end - 1] 64 | cost_to_trade = position_curr * 0 65 | 66 | # get order and verify 67 | lookback_data = {feature: data[start: end] 68 | for feature, data in back_data.items()} 69 | order = trading_strategy(lookback_data) 70 | try: 71 | assert((order['SIGNAL'].isin([-1, 0, 1])).all()) 72 | except AssertionError: 73 | logger.info("Signal can only be -1(sell), 0(hold) or 1(buy)") 74 | raise 75 | try: 76 | assert((order['PRICE'] >= 0).all()) 77 | except AssertionError: 78 | logger.info("Price cannot be negative") 79 | raise 80 | try: 81 | assert(order['WEIGHTS'] >= 0).all() 82 | except AssertionError: 83 | logger.info( 84 | "Please check weights. Weights cannot be negative and should sum to <= 1") 85 | raise 86 | 87 | if order['WEIGHTS'].sum() > 1: 88 | order['WEIGHTS'] = order['WEIGHTS'] / order['WEIGHTS'].sum() 89 | 90 | # evaluate new position based on order and budget 91 | 92 | try: 93 | price_curr = back_data['OPEN'].iloc[end].astype(float) 94 | open_curr = back_data['OPEN'].iloc[end].astype(float) 95 | close_curr = back_data['CLOSE'].iloc[end].astype(float) 96 | close_last = back_data['CLOSE'].iloc[end - 1].astype(float) 97 | high = back_data['HIGH'].iloc[end - 1].astype(float) 98 | low = back_data['LOW'].iloc[end - 1].astype(float) 99 | except ValueError: 100 | logger.info("Data not formatted properly") 101 | raise 102 | 103 | slippage = (high - low) * 0.05 104 | position_last = back_data['POSITION'].iloc[end - 1].astype(int) 105 | value = budget_curr + margin_curr + (position_last * open_curr).sum() 106 | order['QUANTITY'] = getquantity( 107 | order, price_curr, slippage, value, position_last, logger) 108 | (position_curr, budget_curr, margin_curr, cost_to_trade) = execute_order( 109 | order, position_last, slippage, price_curr, budget_curr, margin_curr, logger, trading_costs) 110 | 111 | # set info in back data 112 | back_data['POSITION'].iloc[end] = position_curr 113 | back_data['ORDER'].iloc[end] = order['QUANTITY'] 114 | filled_order = position_curr - position_last 115 | back_data['FILLED_ORDER'].iloc[end] = filled_order 116 | 117 | # calculate pnl 118 | pnl_curr = (position_curr * (close_curr - open_curr) + 119 | position_last * (open_curr - close_last)) - cost_to_trade 120 | back_data['DAILY_PNL'].iloc[end] = pnl_curr 121 | back_data['TOTAL_PNL'].iloc[end] = pnl_curr + \ 122 | back_data['TOTAL_PNL'].iloc[end - 1] 123 | 124 | # available funds 125 | back_data['FUNDS'].iloc[end] = budget_curr 126 | 127 | # funds used as margin 128 | back_data['MARGIN'].iloc[ 129 | end] = -(position_curr[position_curr < 0] * close_curr[position_curr < 0]).sum() 130 | 131 | # portfolio value 132 | value_curr = budget_curr + margin_curr + (margin_curr - back_data['MARGIN'].iloc[end]) + ( 133 | position_curr[position_curr > 0] * close_curr[position_curr > 0]).sum() 134 | back_data['VALUE'].iloc[end] = value_curr 135 | 136 | # cost 137 | back_data['COST TO TRADE'].iloc[end] = cost_to_trade 138 | 139 | # print to STDOUT 140 | logger.info(date_range[end].strftime('Trading date :%d %b %Y')) 141 | if verbose: 142 | s = 'stocks : %s' % markets + '\n' +\ 143 | 'today open : %s' % open_curr.values + '\n' +\ 144 | 'today close : %s' % close_curr.values + '\n' +\ 145 | 'order : %s' % order['QUANTITY'].values + '\n' +\ 146 | 'position : %s' % position_curr.values + '\n' +\ 147 | 'cost to trade : %0.2f' % cost_to_trade.sum() + '\n' +\ 148 | 'Available funds: %0.2f' % budget_curr + '\n' +\ 149 | 'Margin funds : %0.2f' % margin_curr + '\n' +\ 150 | 'pnl : %0.2f' % pnl_curr.sum() + '\n' +\ 151 | 'Portfolio Value: %0.2f' % value_curr + '\n' +\ 152 | '------------------------------------' 153 | logger.info(s) 154 | 155 | if value_curr <= 0: 156 | logger.info('Out of funds. Exiting!') 157 | break 158 | 159 | logger.info('Final Portfolio Value: %0.2f' % value_curr) 160 | 161 | if isJson: 162 | if base_index: 163 | baseline_data = baseline(exchange, base_index, date_range, logger) 164 | return writejson({feature: data[start_index - 1: end + 1] for feature, data in back_data.items()}, budget, {feature: data[start_index - 1: end + 1] for feature, data in baseline_data.items()}, base_index) 165 | else: 166 | return writejson({feature: data[start_index - 1: end + 1] for feature, data in back_data.items()}, budget, {}, base_index) 167 | else: 168 | writecsv({feature: data[start_index - 1: end + 1] 169 | for feature, data in back_data.items()}, budget) 170 | 171 | logger.info('Plotting Results...') 172 | 173 | loadgui({feature: data[start_index - 1: end + 1] for feature, 174 | data in back_data.items()}, exchange, base_index, budget, logger) 175 | 176 | 177 | def commission(): 178 | return 0.1 179 | 180 | 181 | def margin_perc(): 182 | return 1 183 | 184 | 185 | def getquantity(order, price, slippage, value, position, logger): 186 | weights = order['WEIGHTS'] 187 | cost_to_trade = slippage + commission() 188 | if weights.sum() > 0: 189 | new_portfolio_value = (weights.sum() * value) / \ 190 | (weights * (price + cost_to_trade) / price).sum() 191 | desired_position = weights * new_portfolio_value / price 192 | quantity = (order['SIGNAL'] * desired_position) - position 193 | else: 194 | new_portfolio_value = 0 195 | quantity = - position 196 | quantity.fillna(0) 197 | return quantity.astype(int) 198 | 199 | 200 | def execute_order(order, position, slippage, price, budget, margin, logger, trading_costs): 201 | 202 | trade_criteria = (np.sign(order['QUANTITY']) * price[order.index] <= np.sign(order['QUANTITY']) * order['PRICE']) 203 | trade_criteria[np.sign(order['QUANTITY']) * price[order.index] > 204 | np.sign(order['QUANTITY']) * order['PRICE']] = order['PRICE'] == 0 205 | 206 | position_curr = position.copy() 207 | total_commission = 0 * position_curr 208 | adj_slippage = 0 * position_curr 209 | 210 | position_curr[trade_criteria] += order['QUANTITY'][trade_criteria] 211 | margin_curr = -(position_curr[position_curr < 0] * price[position_curr < 0]).sum() 212 | if trading_costs: 213 | total_commission = np.abs(position_curr - position) * commission() 214 | slippage_adjusted_price = price + \ 215 | (np.sign(order['QUANTITY']) * slippage) 216 | slippage_adjusted_price[slippage_adjusted_price < 0] = 0 217 | adj_slippage = np.abs(position_curr - position) * \ 218 | np.abs(price - slippage_adjusted_price) 219 | margin_call = margin_curr - margin 220 | order_value = ((position_curr - position) * price).sum() + margin_call 221 | cost_to_trade = total_commission + adj_slippage 222 | return position_curr, budget - order_value - margin_call - cost_to_trade.sum(), margin_curr, cost_to_trade 223 | 224 | 225 | def get_logger(): 226 | logger_name = dt.datetime.now().strftime('%Y-%m-%d %H-%M-%S') 227 | logger = logging.getLogger(logger_name) 228 | logger.setLevel(logging.DEBUG) 229 | logger_dir = 'runLogs/' 230 | logger_file = '%srun-%s.txt' % (logger_dir, logger_name) 231 | if not os.path.exists(logger_dir): 232 | os.makedirs(logger_dir) 233 | formatter = logging.Formatter('%(message)s') 234 | file_handler = logging.FileHandler(logger_file) 235 | console_handler = logging.StreamHandler() 236 | file_handler.setFormatter(formatter) 237 | console_handler.setFormatter(formatter) 238 | logger.addHandler(file_handler) 239 | logger.addHandler(console_handler) 240 | return logger 241 | 242 | 243 | class noop_logger: 244 | def info(self, str): 245 | print(str) 246 | 247 | def exception(self, str): 248 | print(str) 249 | 250 | def warn(self, str): 251 | print(str) 252 | 253 | 254 | def get_noop_logger(): 255 | return noop_logger() 256 | 257 | 258 | def writecsv(back_data, budget): 259 | 260 | results = pd.DataFrame( 261 | 0, index=back_data['DAILY_PNL'].index, columns=['Daily Returns']) 262 | results['Daily Returns'] = back_data[ 263 | 'DAILY_PNL'].sum(axis=1) * 100 / budget 264 | results['Total Returns'] = back_data[ 265 | 'TOTAL_PNL'].sum(axis=1) * 100 / budget 266 | results['Funds'] = back_data['FUNDS'] 267 | results['Margin'] = back_data['MARGIN'] 268 | results['Portfolio Value'] = back_data['VALUE'] 269 | for stock in back_data['DAILY_PNL'].columns.tolist(): 270 | results['%s Position' % stock] = back_data['POSITION'][stock] 271 | results['%s Order' % stock] = back_data['ORDER'][stock] 272 | results['%s Filled Order' % stock] = back_data['FILLED_ORDER'][stock] 273 | results['%s Trade Price' % stock] = back_data['OPEN'][stock] 274 | results['%s Cost to Trade' % stock] = back_data['COST TO TRADE'][stock] 275 | results['%s PnL' % stock] = back_data['DAILY_PNL'][stock] 276 | 277 | results = results.sort_index(axis=0, ascending=False) 278 | csv_dir = 'runLogs/' 279 | try: 280 | csv_file = open('%srun-%s.csv' % (csv_dir, 281 | dt.datetime.now().strftime('%Y-%m-%d %H-%M-%S')), 'wb') 282 | results.to_csv(csv_file) 283 | except: 284 | csv_file = open('%srun-%s.csv' % (csv_dir, 285 | dt.datetime.now().strftime('%Y-%m-%d %H-%M-%S')), 'w') 286 | results.to_csv(csv_file) 287 | # writer = csv.writer(csv_file) 288 | # writer.writerow(['Dates']+back_data['DAILY_PNL'].index.format()) 289 | # writer.writerow(['Daily Pnl']+daily_return.sum(axis=1).values.tolist()) 290 | # writer.writerow(['Total PnL']+total_return.sum(axis=1).values.tolist()) 291 | # writer.writerow(['Funds']+back_data['FUNDS'].values.tolist()) 292 | # writer.writerow(['Portfolio Value']+back_data['VALUE'].values.tolist()) 293 | # for stock in back_data['DAILY_PNL'].columns.tolist(): 294 | # writer.writerow(['%s Position'%stock]+back_data['POSITION'][stock].values.tolist()) 295 | # writer.writerow(['%s Order'%stock]+back_data['ORDER'][stock].values.tolist()) 296 | # writer.writerow(['%s Filled Order'%stock]+back_data['FILLED_ORDER'][stock].values.tolist()) 297 | # writer.writerow(['%s Slippage'%stock]+back_data['SLIPPAGE'][stock].values.tolist()) 298 | # writer.writerow(['%s PnL'%stock]+back_data['DAILY_PNL'][stock].values.tolist()) 299 | csv_file.close() 300 | 301 | 302 | def writejson(back_data, budget, baseline_data, base_index): 303 | 304 | daily_return = back_data['DAILY_PNL'] / budget 305 | total_return = back_data['TOTAL_PNL'] / budget 306 | stats = metrics(daily_return, total_return, baseline_data, base_index) 307 | # multiply by 100 for readability purposes 308 | daily_return_percent = daily_return * 100 309 | total_return_percent = total_return * 100 310 | 311 | d = {'dates': back_data['DAILY_PNL'].index.format(), 312 | 'daily_pnl': daily_return_percent.sum(axis=1).values.tolist(), 313 | 'total_pnl': total_return_percent.sum(axis=1).values.tolist(), 314 | 'stocks': back_data['DAILY_PNL'].columns.tolist(), 315 | 'stock_pnl': daily_return_percent.values.tolist(), 316 | 'stock_position': back_data['POSITION'].values.tolist(), 317 | 'metrics': stats.keys(), 318 | 'metrics_values': stats.values()} 319 | return d 320 | 321 | 322 | def updateCheck(): 323 | ''' checks for new version of toolbox 324 | Returns: 325 | returns True if the version of the toolox on PYPI is not the same as the current version 326 | returns False if version is the same 327 | ''' 328 | 329 | from auquanToolbox.version import __version__ 330 | try: 331 | toolboxJson = urllib2.urlopen( 332 | 'https://pypi.python.org/pypi/auquanToolbox/json') 333 | except: 334 | return False 335 | 336 | toolboxDict = json.loads(toolboxJson.read()) 337 | 338 | if __version__ != toolboxDict['info']['version']: 339 | return True 340 | else: 341 | return False 342 | -------------------------------------------------------------------------------- /auquanToolbox/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '1.16.0' 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | from auquanToolbox.version import __version__ 4 | 5 | 6 | setup(name='auquanToolbox', 7 | version=__version__, 8 | description='The Auquan Toolbox for trading system development', 9 | url='http://auquan.com/', 10 | author='Auquan', 11 | author_email='info@auquan.com', 12 | license='MIT', 13 | packages=['auquanToolbox'], 14 | scripts=['TradingStrategyTemplate.py'], 15 | include_package_data = True, 16 | 17 | install_requires=[ 18 | 'pandas', 19 | 'numpy', 20 | 'matplotlib', 21 | ], 22 | 23 | zip_safe=False, 24 | ) --------------------------------------------------------------------------------