├── .gitignore ├── MANIFEST.in ├── README.rst ├── VisualPortfolio ├── Env.py ├── Miscellaneous.py ├── Plottings.py ├── Tears.py ├── Timeseries.py ├── Transactions.py └── __init__.py ├── img ├── 1.png ├── 2.png ├── 3.png └── 4.png ├── notebooks ├── Overview of VisualPortfolio.ipynb └── data │ └── positions.csv ├── requirements ├── py2.txt └── py3.txt ├── setup.cfg └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints/* 2 | notebooks/.ipynb_checkpoints/* 3 | .idea/* 4 | dist/* 5 | build/* 6 | *.pyc 7 | *.csv 8 | *.log 9 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.* 2 | include requirements/*.txt 3 | include img/*.png 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ------------------------------------------------------------ 2 | |Join the chat at https://gitter.im/chinaquants/algotrading| 3 | ------------------------------------------------------------ 4 | 5 | VisualPortfolio 6 | ========================= 7 | 8 | 将策略或者资产包的表现可视化。大量参考以及模仿自 `pyfolio `_ ,包括图的配置以及代码。 9 | 10 | This tool is used to visualize the perfomance of a portfolio. Much of the codes and samples come from the original referenced project: `pyfolio `_ 11 | 12 | 依赖 13 | ---------------------- 14 | 15 | :: 16 | 17 | lxml 18 | matplotlib 19 | numpy 20 | pandas 21 | seaborn 22 | tushare 23 | 24 | 25 | 安装 26 | ---------------------- 27 | 28 | 首先将代码 ``clone`` 至本地: 29 | 30 | :: 31 | 32 | git clone https://github.com/ChinaQuants/VisualPortfolio.git (如果你是从github获取) 33 | 34 | 35 | 安装 36 | 37 | :: 38 | 39 | cd VisualPortfolio 40 | python setpy.py install 41 | 42 | 例子 43 | ---------------------- 44 | 45 | .. code:: python 46 | 47 | In [1]: %matplotlib inline 48 | In [2]: from VisualPortfolio import createPerformanceTearSheet 49 | In [3]: from pandas_datareader import data 50 | In [4]: prices = data.get_data_yahoo('600000.ss')['Close'] 51 | In [5]: benchmark = data.get_data_yahoo('000300.ss')['Close'] 52 | ......: benchmark.name = "000300.ss" 53 | In [6]: perf_matric, perf_df, rollingRisk = createPerformanceTearSheet(prices=prices, benchmark=benchmark) 54 | 55 | 56 | .. image:: img/1.png 57 | :align: center 58 | 59 | .. image:: img/2.png 60 | :align: center 61 | 62 | .. image:: img/3.png 63 | :align: center 64 | 65 | .. image:: img/4.png 66 | :align: center 67 | 68 | .. |Join the chat at https://gitter.im/chinaquants/algotrading| image:: https://badges.gitter.im/Join%20Chat.svg 69 | :target: https://gitter.im/chinaquants/algotrading?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge 70 | -------------------------------------------------------------------------------- /VisualPortfolio/Env.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2016-1-18 4 | 5 | @author: cheng.li 6 | """ 7 | 8 | from enum import IntEnum 9 | from enum import unique 10 | 11 | 12 | @unique 13 | class DataSource(IntEnum): 14 | DataYes = 1 15 | DXDataCenter = 2 16 | 17 | 18 | class SettingsFactory: 19 | 20 | def __init__(self): 21 | self._data_source = DataSource.DataYes 22 | 23 | def set_source(self, data_source): 24 | self._data_source = data_source 25 | 26 | @property 27 | def data_source(self): 28 | return self._data_source 29 | 30 | 31 | Settings = SettingsFactory() 32 | -------------------------------------------------------------------------------- /VisualPortfolio/Miscellaneous.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | u""" 4 | Created on 2016-1-8 5 | 6 | @author: cheng.li 7 | """ 8 | 9 | from VisualPortfolio.Plottings import plotting_context 10 | from VisualPortfolio.Tears import createPerformanceTearSheet 11 | from VisualPortfolio.Env import DataSource 12 | from VisualPortfolio.Env import Settings 13 | 14 | 15 | def get_equity_eod(instruments, start_date, end_date): 16 | if Settings.data_source == DataSource.DXDataCenter: 17 | from DataAPI import api 18 | data = api.GetEquityBarEOD(instrumentIDList=instruments, 19 | startDate=start_date, 20 | endDate=end_date, 21 | field='closePrice', 22 | instrumentIDasCol=True, 23 | baseDate='end') 24 | elif Settings.data_source == DataSource.DataYes: 25 | import os 26 | import tushare as ts 27 | 28 | try: 29 | ts.set_token(os.environ['DATAYES_TOKEN']) 30 | except KeyError: 31 | raise 32 | 33 | mt = ts.Market() 34 | res = [] 35 | for ins in instruments: 36 | data = mt.MktEqud(ticker=ins, 37 | beginDate=start_date.replace('-', ''), 38 | endDate=end_date.replace('-', ''), 39 | field='tradeDate,ticker,closePrice') 40 | res.append(data) 41 | 42 | data = pd.concat(res) 43 | data['tradeDate'] = pd.to_datetime(data['tradeDate'], format='%Y-%m-%d') 44 | data['ticker'] = data['ticker'].apply(lambda x: '{0:06d}'.format(x)) 45 | data.set_index(['tradeDate', 'ticker'], inplace=True, verify_integrity=True) 46 | data = data.unstack(level=-1) 47 | 48 | return data 49 | 50 | 51 | @plotting_context 52 | def portfolioAnalysis(posDF, 53 | startDate, 54 | endDate, 55 | notional=10000000., 56 | benchmark='000300.zicn', 57 | isweight=False): 58 | 59 | secIDs = posDF['instrumentID'] 60 | 61 | data = get_equity_eod(instruments=secIDs, 62 | start_date=startDate, 63 | end_date=endDate) 64 | 65 | close_data = data['closePrice'] 66 | close_data = close_data.fillna(method='pad') 67 | close_data.fillna(value=0., inplace=True) 68 | columns = close_data.columns 69 | 70 | for instrument in columns: 71 | 72 | if isweight and notional: 73 | invest_value = posDF[posDF.instrumentID == instrument]['position'].iloc[0] * notional 74 | volume = int(invest_value / close_data[instrument].values[0]) 75 | else: 76 | volume = posDF[posDF.instrumentID == instrument]['position'].iloc[0] 77 | 78 | close_data[instrument] *= volume 79 | 80 | prices = close_data.sum(axis=1) 81 | 82 | perf_metric, perf_df, rollingRisk = createPerformanceTearSheet(prices=prices, benchmark=benchmark) 83 | return perf_metric, perf_df, rollingRisk 84 | 85 | 86 | if __name__ == "__main__": 87 | import pandas as pd 88 | data = pd.read_excel('d:/basket.xlsx') 89 | data.instrumentID = data.instrumentID.apply(lambda x: "{0:06d}".format(x)) 90 | 91 | Settings.set_source(DataSource.DataYes) 92 | res = portfolioAnalysis(data, '2006-01-01', '2016-01-15') -------------------------------------------------------------------------------- /VisualPortfolio/Plottings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2015-11-9 4 | 5 | @author: cheng.li 6 | """ 7 | 8 | from functools import wraps 9 | import seaborn as sns 10 | import matplotlib 11 | from matplotlib.ticker import FuncFormatter 12 | from statsmodels.tsa.stattools import acf 13 | import pandas as pd 14 | import numpy as np 15 | from VisualPortfolio.Timeseries import aggregateReturns 16 | from VisualPortfolio.Transactions import getTurnOver 17 | from VisualPortfolio.Timeseries import aggregatePositons 18 | 19 | 20 | def get_color_list(): 21 | return ['#0000CD', '#F08080', '#8B0000', '#EE82EE', '#8B4513', '#008B8B', 22 | '#7B68EE', '#FF1493', '#FF6347', '#DC143C', '#E9967A', '#FF4500', 23 | '#DA70D6', '#FFA07A', '#8B008B', '#66CDAA', '#3CB371', '#191970', 24 | '#4B0082', '#0000FF', '#BC8F8F', '#FF8C00', '#FFB6C1', '#4682B4', 25 | '#FF00FF', '#DB7093', '#FF7F50', '#20B2AA', '#2E8B57', '#DAA520', 26 | '#FA8072', '#1E90FF', '#BA55D3', '#000000', '#87CEEB', '#5F9EA0', 27 | '#00BFFF', '#556B2F', '#CD853F', '#FFFF00', '#6495ED', '#483D8B', 28 | '#A52A2A', '#2F4F4F', '#B22222', '#C71585', '#FF0000', '#9932CC', 29 | '#00008B', '#00FFFF', '#FFA500', '#FFD700', '#D8BFD8', '#800080', 30 | '#00CED1', '#FF00FF', '#4169E1', '#9400D3', '#40E0D0', '#B8860B', 31 | '#808000', '#8FBC8F'] 32 | 33 | 34 | def plotting_context(func): 35 | @wraps(func) 36 | def call_w_context(*args, **kwargs): 37 | set_context = kwargs.pop('set_context', True) 38 | if set_context: 39 | with context(): 40 | return func(*args, **kwargs) 41 | else: 42 | return func(*args, **kwargs) 43 | 44 | return call_w_context 45 | 46 | 47 | def context(context='notebook', font_scale=1.5, rc=None): 48 | if rc is None: 49 | rc = {} 50 | 51 | rc_default = {'lines.linewidth': 1.5, 52 | 'axes.facecolor': '0.995', 53 | 'figure.facecolor': '0.97'} 54 | 55 | # Add defaults if they do not exist 56 | for name, val in rc_default.items(): 57 | rc.setdefault(name, val) 58 | 59 | return sns.plotting_context(context=context, font_scale=font_scale, 60 | rc=rc) 61 | 62 | 63 | def integer_format(x, pos): 64 | return '%d' % x 65 | 66 | 67 | def two_dec_places(x, pos): 68 | return '%.2f' % x 69 | 70 | 71 | def percentage(x, pos): 72 | return '%.2f%%' % (x * 100) 73 | 74 | 75 | def zero_dec_percentage(x, pos): 76 | return '%.1f%%' % (x * 100) 77 | 78 | 79 | def plottingRollingReturn(cumReturns, 80 | cumReturnsWithoutTC, 81 | benchmarkReturns, 82 | other_curves, 83 | ax, 84 | title='Strategy Cumulative Returns'): 85 | y_axis_formatter = FuncFormatter(two_dec_places) 86 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 87 | 88 | cumReturns.plot(lw=3, 89 | color='forestgreen', 90 | alpha=0.6, 91 | label='Strategy', 92 | ax=ax) 93 | 94 | if cumReturnsWithoutTC is not None: 95 | cumReturnsWithoutTC.plot(lw=3, 96 | color='red', 97 | alpha=0.6, 98 | label='Strategy (w/o tc)', 99 | ax=ax) 100 | 101 | color_names = get_color_list() 102 | 103 | if benchmarkReturns is not None: 104 | benchmarkReturns.plot(lw=2, 105 | color='gray', 106 | alpha=0.6, 107 | label=benchmarkReturns.name, 108 | ax=ax) 109 | 110 | if other_curves is not None: 111 | for i, curve_info in enumerate(zip(*other_curves)): 112 | marker = curve_info[0] 113 | line_style = curve_info[1] 114 | label = curve_info[2] 115 | series = curve_info[3] 116 | series.plot(lw=2, 117 | marker=marker, 118 | markersize=12, 119 | linestyle=line_style, 120 | color=color_names[i], 121 | alpha=0.6, 122 | label=label, 123 | ax=ax) 124 | 125 | ax.axhline(0.0, linestyle='--', color='black', lw=2) 126 | ax.set_ylabel('Cumulative returns') 127 | ax.set_title(title) 128 | ax.legend(loc='best') 129 | return ax 130 | 131 | 132 | def plottingRollingBeta(rb, bmName, ax): 133 | y_axis_formatter = FuncFormatter(two_dec_places) 134 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 135 | 136 | ax.set_title("Rolling Portfolio Beta to " + bmName) 137 | ax.set_ylabel('Beta') 138 | 139 | rb['beta_1m'].plot(color='steelblue', lw=3, alpha=0.6, ax=ax) 140 | rb['beta_3m'].plot(color='grey', lw=3, alpha=0.4, ax=ax) 141 | rb['beta_6m'].plot(color='yellow', lw=3, alpha=0.5, ax=ax) 142 | ax.axhline(rb['beta_1m'].mean(), color='steelblue', linestyle='--', lw=3) 143 | ax.axhline(0.0, color='black', linestyle='-', lw=2) 144 | ax.set_xlabel('') 145 | ax.legend(['1-m', 146 | '3-m', 147 | '6-m', 148 | 'average 1-m'], 149 | loc='best') 150 | 151 | return ax 152 | 153 | 154 | def plottingRollingSharp(rs, ax): 155 | y_axis_formatter = FuncFormatter(two_dec_places) 156 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 157 | 158 | ax.set_title('Rolling Sharpe ratio') 159 | ax.set_ylabel('Sharp') 160 | 161 | rs['sharp_1m'].plot(color='steelblue', lw=3, alpha=0.6, ax=ax) 162 | rs['sharp_3m'].plot(color='grey', lw=3, alpha=0.4, ax=ax) 163 | rs['sharp_6m'].plot(color='yellow', lw=3, alpha=0.5, ax=ax) 164 | ax.axhline(rs['sharp_1m'].mean(), color='steelblue', linestyle='--', lw=3) 165 | ax.axhline(0.0, color='black', linestyle='-', lw=2) 166 | ax.set_xlabel('') 167 | ax.legend(['1-m', 168 | '3-m', 169 | '6-m', 170 | 'average 1-m'], 171 | loc='best') 172 | return ax 173 | 174 | 175 | def plottingDrawdownPeriods(cumReturns, 176 | drawDownTable, 177 | top, 178 | ax, 179 | title='Top 5 Drawdown Periods'): 180 | y_axis_formatter = FuncFormatter(two_dec_places) 181 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 182 | cumReturns.plot(ax=ax) 183 | lim = ax.get_ylim() 184 | 185 | tmp = drawDownTable.sort_values(by='draw_down') 186 | topDrawdown = tmp.groupby('recovery').first() 187 | topDrawdown = topDrawdown.sort_values(by='draw_down')[:top] 188 | colors = sns.cubehelix_palette(len(topDrawdown))[::-1] 189 | for i in range(len(colors)): 190 | recovery = topDrawdown.index[i] 191 | ax.fill_between((topDrawdown['peak'][i], recovery), 192 | lim[0], 193 | lim[1], 194 | alpha=.4, 195 | color=colors[i]) 196 | 197 | ax.set_title(title) 198 | ax.set_ylabel('Cumulative returns') 199 | ax.legend(['Cumulative returns'], loc='best') 200 | ax.set_xlabel('') 201 | return ax 202 | 203 | 204 | def plottingUnderwater(drawDownSeries, ax, title='Underwater Plot'): 205 | y_axis_formatter = FuncFormatter(percentage) 206 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 207 | drawDownSeries.plot(ax=ax, kind='area', color='coral', alpha=0.7) 208 | ax.set_ylabel('Drawdown') 209 | ax.set_title(title) 210 | ax.legend(loc='best') 211 | ax.set_xlabel('') 212 | return ax 213 | 214 | 215 | def plottingMonthlyReturnsHeapmap(returns, ax, title='Monthly Returns (%)'): 216 | x_axis_formatter = FuncFormatter(integer_format) 217 | ax.xaxis.set_major_formatter(FuncFormatter(x_axis_formatter)) 218 | monthlyRetTable = pd.DataFrame(aggregateReturns(returns, convert='monthly')[0]) 219 | monthlyRetTable = monthlyRetTable.unstack() 220 | monthlyRetTable.columns = monthlyRetTable.columns.droplevel() 221 | sns.heatmap((np.exp(monthlyRetTable.fillna(0)) - 1.0) * 100.0, 222 | annot=True, 223 | fmt=".1f", 224 | annot_kws={"size": 9}, 225 | alpha=1.0, 226 | center=0.0, 227 | cbar=False, 228 | cmap=matplotlib.cm.RdYlGn_r, 229 | ax=ax) 230 | ax.set_ylabel('Year') 231 | ax.set_xlabel('Month') 232 | ax.set_title(title) 233 | return ax 234 | 235 | 236 | def plottingAnnualReturns(returns, ax, title='Annual Returns'): 237 | x_axis_formatter = FuncFormatter(zero_dec_percentage) 238 | ax.xaxis.set_major_formatter(FuncFormatter(x_axis_formatter)) 239 | ax.tick_params(axis='x', which='major', labelsize=10) 240 | 241 | annulaReturns = pd.DataFrame(aggregateReturns(returns, convert='yearly')[0]) 242 | annulaReturns = np.exp(annulaReturns) - 1. 243 | 244 | ax.axvline(annulaReturns.values.mean(), 245 | color='steelblue', 246 | linestyle='--', 247 | lw=4, 248 | alpha=0.7) 249 | 250 | annulaReturns.sort_index(ascending=False).plot( 251 | ax=ax, 252 | kind='barh', 253 | alpha=0.7 254 | ) 255 | 256 | ax.axvline(0.0, color='black', linestyle='-', lw=3) 257 | 258 | ax.set_ylabel('Year') 259 | ax.set_xlabel('Returns') 260 | ax.set_title(title) 261 | ax.legend(['mean'], loc='best') 262 | return ax 263 | 264 | 265 | def plottingMonthlyRetDist(returns, 266 | ax, 267 | title="Distribution of Monthly Returns"): 268 | x_axis_formatter = FuncFormatter(zero_dec_percentage) 269 | ax.xaxis.set_major_formatter(FuncFormatter(x_axis_formatter)) 270 | ax.tick_params(axis='x', which='major', labelsize=10) 271 | 272 | monthlyRetTable = aggregateReturns(returns, convert='monthly')[0] 273 | monthlyRetTable = np.exp(monthlyRetTable) - 1. 274 | 275 | if len(monthlyRetTable) > 1: 276 | ax.hist( 277 | monthlyRetTable, 278 | color='orange', 279 | alpha=0.8, 280 | bins=20 281 | ) 282 | 283 | ax.axvline( 284 | monthlyRetTable.mean(), 285 | color='steelblue', 286 | linestyle='--', 287 | lw=4, 288 | alpha=1.0 289 | ) 290 | 291 | ax.axvline(0.0, color='black', linestyle='-', lw=3, alpha=0.75) 292 | ax.legend(['mean'], loc='best') 293 | ax.set_ylabel('Number of months') 294 | ax.set_xlabel('Returns') 295 | ax.set_title(title) 296 | return ax 297 | 298 | 299 | def plottingExposure(positions, ax, title="Total non cash exposure (%)"): 300 | positions = aggregatePositons(positions, convert='daily') 301 | y_axis_formatter = FuncFormatter(two_dec_places) 302 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 303 | if 'cash' in positions: 304 | positions_without_cash = positions.drop('cash', axis='columns') 305 | else: 306 | positions_without_cash = positions 307 | longs = positions_without_cash[positions_without_cash > 0] \ 308 | .sum(axis=1).fillna(0) * 100 309 | shorts = positions_without_cash[positions_without_cash < 0] \ 310 | .abs().sum(axis=1).fillna(0) * 100 311 | df_long_short = pd.DataFrame({'long': longs, 312 | 'short': shorts}) 313 | df_long_short.plot(kind='area', 314 | stacked=True, 315 | color=['blue', 'green'], 316 | linewidth=0., ax=ax) 317 | ax.set_title(title) 318 | return ax 319 | 320 | 321 | def plottingTopExposure(positions, 322 | ax, 323 | top=10, 324 | title="Top 10 securities exposure (%)"): 325 | positions = aggregatePositons(positions, convert='daily') 326 | y_axis_formatter = FuncFormatter(two_dec_places) 327 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 328 | df_mean = positions.abs().mean() 329 | df_top = df_mean.nlargest(top) 330 | (positions[df_top.index] * 100.).plot(ax=ax) 331 | ax.legend(loc='upper center', 332 | frameon=True, 333 | bbox_to_anchor=(0.5, -0.14), 334 | ncol=5) 335 | ax.set_title(title) 336 | return ax 337 | 338 | 339 | def plottingHodings(positions, ax, freq='M', title="Holdings Analysis"): 340 | positions = aggregatePositons(positions, convert='daily') 341 | if 'cash' in positions: 342 | positions = positions.drop('cash', axis='columns') 343 | df_holdings = positions.apply(lambda x: np.sum(x != 0), axis='columns') 344 | df_holdings_by_freq = df_holdings.resample(freq).mean() 345 | df_holdings.plot(color='steelblue', alpha=0.6, lw=0.5, ax=ax) 346 | 347 | if freq == 'M': 348 | freq = 'monthly' 349 | else: 350 | freq = 'daily' 351 | 352 | df_holdings_by_freq.plot( 353 | color='orangered', 354 | alpha=0.5, 355 | lw=2, 356 | ax=ax) 357 | ax.axhline( 358 | df_holdings.values.mean(), 359 | color='steelblue', 360 | ls='--', 361 | lw=3, 362 | alpha=1.0) 363 | 364 | ax.set_xlim((positions.index[0], positions.index[-1])) 365 | 366 | ax.legend(['Holdings on each bar', 367 | 'Average {0} holdings'.format(freq), 368 | 'Average whole peirod {0} holdings'.format(freq)], 369 | loc="best") 370 | ax.set_title(title) 371 | ax.set_ylabel('Number of securities holdings') 372 | ax.set_xlabel('') 373 | return ax 374 | 375 | 376 | def plottingPositionACF(positions, ax, title='Position auto correlation function'): 377 | positions = aggregatePositons(positions, convert='raw') 378 | if 'cash' in positions: 379 | positions = positions.drop('cash', axis='columns') 380 | 381 | nlags = 100 382 | acf_mat = np.zeros((len(positions.columns), nlags+1)) 383 | cols = positions.columns 384 | 385 | for i, col in enumerate(cols): 386 | acfs = acf(positions[col], nlags=nlags) 387 | acf_mat[i, 0:len(acfs)] = acfs 388 | 389 | acf_mean = np.nanmean(acf_mat, axis=0) 390 | ax.plot(acf_mean, 391 | color='orangered', 392 | alpha=0.5, 393 | lw=2,) 394 | ax.set_title(title) 395 | ax.set_ylabel('Auto correlation') 396 | ax.set_xlabel('lags') 397 | return ax 398 | 399 | 400 | def plottingTurnover(transactions, positions, turn_over=None, freq='M', ax=None, title="Turnover Analysis"): 401 | if turn_over is None: 402 | df_turnover = getTurnOver(transactions, positions) 403 | else: 404 | df_turnover = turn_over 405 | 406 | df_turnover_agreagted = df_turnover.resample(freq).sum().dropna() 407 | 408 | if freq == 'M': 409 | freq = 'monthly' 410 | else: 411 | freq = 'daily' 412 | 413 | if ax: 414 | y_axis_formatter = FuncFormatter(two_dec_places) 415 | ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter)) 416 | df_turnover.plot(color='steelblue', alpha=1.0, lw=0.5, ax=ax) 417 | df_turnover_agreagted.plot( 418 | color='orangered', 419 | alpha=0.5, 420 | lw=2, 421 | ax=ax) 422 | ax.axhline( 423 | df_turnover_agreagted.mean(), 424 | color='steelblue', 425 | linestyle='--', 426 | lw=3, 427 | alpha=1.0) 428 | ax.legend(['turnover', 429 | 'Aggregated {0} turnover'.format(freq), 430 | 'Average {0} turnover'.format(freq)], 431 | loc="best") 432 | ax.set_title(title + ' (aggregated {0})'.format(freq)) 433 | ax.set_ylabel('Turnover') 434 | return ax, df_turnover 435 | 436 | 437 | if __name__ == "__main__": 438 | 439 | from matplotlib import pyplot as plt 440 | from FactorMiner.runner.simplebarrunner import SimpleBarRunner 441 | from PyFin.api import * 442 | 443 | alpha_list = ['alpha101_40_4', 444 | 'alpha101_225_10', 445 | 'alpha101_540_20', 446 | 'alpha102_40_4', 447 | 'alpha111_40_10', 448 | 'alpha111_225_10', 449 | 'alpha111_600_20', 450 | 'alpha121_40_15', 451 | 'alpha121_275_15', 452 | 'alpha121_600_20', 453 | 'alpha121_900_20', 454 | 'alpha122_140_4', 455 | 'alpha151_40_15', 456 | 'alpha151_275_15', 457 | 'alpha151_600_20', 458 | 'alpha151_900_20', 459 | 'alpha152_140_5'] 460 | weights_list = [1., 461 | 3., 462 | 6., 463 | 12., 464 | 1., 465 | 3., 466 | 6., 467 | 1., 3., 6., 3., 468 | 12., 469 | 1., 3., 6., 3., 12.] 470 | 471 | weights_list = np.array(weights_list) / np.sum(np.array(weights_list)) 472 | 473 | huty_factor = None 474 | for i, f_name in enumerate(alpha_list): 475 | if huty_factor: 476 | huty_factor = huty_factor + weights_list[i] * LAST(f_name) 477 | else: 478 | huty_factor = weights_list[i] * LAST(f_name) 479 | 480 | runner = SimpleBarRunner(None, 481 | huty_factor, 482 | '2014-01-01', 483 | '2017-02-01', 484 | username='sa', 485 | password='A12345678!', 486 | server_name='test_w', 487 | account='test_mssql_sa', 488 | freq=5) 489 | 490 | turn_over, daily_return, positions, risk_stats, detail_series, factor_values \ 491 | = runner.simulate(leverage=None, tc_cost=0.) 492 | 493 | plt.show() -------------------------------------------------------------------------------- /VisualPortfolio/Tears.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2015-11-9 4 | 5 | @author: cheng.li 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import matplotlib.gridspec as gridspec 11 | from matplotlib import pyplot as plt 12 | from VisualPortfolio.Timeseries import aggregateReturns 13 | from VisualPortfolio.Timeseries import drawDown 14 | from VisualPortfolio.Plottings import plottingRollingReturn 15 | from VisualPortfolio.Plottings import plottingDrawdownPeriods 16 | from VisualPortfolio.Plottings import plottingRollingBeta 17 | from VisualPortfolio.Plottings import plottingRollingSharp 18 | from VisualPortfolio.Plottings import plottingUnderwater 19 | from VisualPortfolio.Plottings import plottingMonthlyReturnsHeapmap 20 | from VisualPortfolio.Plottings import plottingAnnualReturns 21 | from VisualPortfolio.Plottings import plottingMonthlyRetDist 22 | from VisualPortfolio.Plottings import plotting_context 23 | from VisualPortfolio.Timeseries import annualReturn 24 | from VisualPortfolio.Timeseries import annualVolatility 25 | from VisualPortfolio.Timeseries import sortinoRatio 26 | from VisualPortfolio.Timeseries import sharpRatio 27 | from VisualPortfolio.Timeseries import aggregatePositons 28 | from VisualPortfolio.Timeseries import calculatePosWeight 29 | from VisualPortfolio.Timeseries import aggregateTranscations 30 | from VisualPortfolio.Plottings import plottingExposure 31 | from VisualPortfolio.Plottings import plottingTopExposure 32 | from VisualPortfolio.Plottings import plottingHodings 33 | from VisualPortfolio.Plottings import plottingPositionACF 34 | from VisualPortfolio.Plottings import plottingTurnover 35 | from VisualPortfolio.Timeseries import APPROX_BDAYS_PER_MONTH 36 | from VisualPortfolio.Timeseries import RollingBeta 37 | from VisualPortfolio.Timeseries import RollingSharp 38 | from PyFin.api import advanceDateByCalendar 39 | from PyFin.Enums import BizDayConventions 40 | from PyFin.api import bizDatesList 41 | 42 | 43 | def get_benchmark_data(benchmark, start_date, end_data): 44 | 45 | if Settings.data_source == DataSource.DXDataCenter: 46 | benchmark_data = api.GetIndexBarEOD(instrumentIDList=benchmark, 47 | startDate=start_date, 48 | endDate=end_data, 49 | field=['closePrice']) 50 | elif Settings.data_source == DataSource.DataYes: 51 | import os 52 | import tushare as ts 53 | 54 | try: 55 | ts.set_token(os.environ['DATAYES_TOKEN']) 56 | except KeyError: 57 | raise 58 | 59 | mt = ts.Market() 60 | 61 | benchmark_data = mt.MktIdxd(benchmark, 62 | beginDate=start_date.replace('-', ''), 63 | endDate=end_data.replace('-', ''), 64 | field='closeIndex,tradeDate') 65 | benchmark_data = benchmark_data.set_index('tradeDate') 66 | benchmark_data = benchmark_data.rename(columns={'closeIndex': 'closePrice'}) 67 | benchmark_data.index = pd.to_datetime(benchmark_data.index, format="%Y-%m-%d") 68 | 69 | return benchmark_data 70 | 71 | 72 | @plotting_context 73 | def createPerformanceTearSheet(prices=None, 74 | returns=None, 75 | benchmark=None, 76 | benchmarkReturns=None, 77 | other_curves=None, 78 | turn_over=None, 79 | tc_cost=0., 80 | plot=True): 81 | 82 | if prices is not None and not isinstance(prices, pd.Series): 83 | raise TypeError("prices series should be a pandas time series.") 84 | elif returns is not None and prices is not None: 85 | raise ValueError("prices series and returns series can't be both set.") 86 | 87 | if benchmark is not None and not (isinstance(benchmark, pd.Series) or isinstance(benchmark, str)): 88 | raise TypeError("benchmark series should be a pandas time series or a string ticker.") 89 | 90 | if returns is None: 91 | returns = np.log(prices / prices.shift(1)) 92 | returns.fillna(0, inplace=True) 93 | returns = returns[~np.isinf(returns)] 94 | 95 | if benchmark is not None and isinstance(benchmark, str) and benchmarkReturns is None: 96 | startDate = advanceDateByCalendar("China.SSE", prices.index[0], '-1b', BizDayConventions.Preceding) 97 | 98 | benchmarkPrices = get_benchmark_data(benchmark, 99 | start_date=startDate.strftime('%Y-%m-%d'), 100 | end_data=returns.index[-1].strftime("%Y-%m-%d")) 101 | 102 | # do the linear interpolation on the target time line 103 | date_index = prices.index 104 | new_index = benchmarkPrices.index.union(date_index) 105 | benchmarkPrices = benchmarkPrices.reindex(new_index) 106 | benchmarkPrices = benchmarkPrices.interpolate().ix[date_index].dropna() 107 | 108 | benchmarkReturns = np.log(benchmarkPrices['closePrice'] / benchmarkPrices['closePrice'].shift(1)) 109 | benchmarkReturns.name = benchmark 110 | benchmarkReturns.fillna(0, inplace=True) 111 | benchmarkReturns.index = pd.to_datetime(benchmarkReturns.index.date) 112 | elif benchmark is not None and isinstance(benchmark, pd.Series): 113 | benchmarkReturns = np.log(benchmark / benchmark.shift(1)) 114 | try: 115 | benchmarkReturns.name = benchmark.name 116 | except AttributeError: 117 | benchmarkReturns.name = "benchmark" 118 | benchmarkReturns.dropna(inplace=True) 119 | benchmarkReturns.index = pd.to_datetime(benchmarkReturns.index.date) 120 | 121 | aggregateDaily, aggregateDailyAfterTC = aggregateReturns(returns, turn_over, tc_cost) 122 | 123 | if aggregateDailyAfterTC is not None: 124 | aggregateDailyBeforeTC = aggregateDaily 125 | aggregateDaily = aggregateDailyAfterTC 126 | else: 127 | aggregateDailyBeforeTC = aggregateDaily 128 | 129 | drawDownDaily = drawDown(aggregateDaily) 130 | 131 | # perf metric 132 | annualRet = annualReturn(aggregateDaily) 133 | annualVol = annualVolatility(aggregateDaily) 134 | sortino = sortinoRatio(aggregateDaily) 135 | sharp = sharpRatio(aggregateDaily) 136 | maxDrawDown = np.min(drawDownDaily['draw_down']) 137 | winningDays = np.sum(aggregateDaily > 0.) 138 | lossingDays = np.sum(aggregateDaily < 0.) 139 | 140 | perf_metric = pd.DataFrame([annualRet, annualVol, sortino, sharp, maxDrawDown, winningDays, lossingDays], 141 | index=['annual_return', 142 | 'annual_volatiltiy', 143 | 'sortino_ratio', 144 | 'sharp_ratio', 145 | 'max_draw_down', 146 | 'winning_days', 147 | 'lossing_days'], 148 | columns=['metrics']) 149 | 150 | perf_df = pd.DataFrame(index=aggregateDaily.index) 151 | perf_df['daily_return'] = aggregateDaily 152 | perf_df['daily_return (w/o tc)'] = aggregateDailyBeforeTC 153 | perf_df['daily_cum_return'] = np.exp(aggregateDaily.cumsum()) - 1.0 154 | perf_df['daily_cum_return (w/o tc)'] = np.exp(aggregateDailyBeforeTC.cumsum()) - 1.0 155 | perf_df['daily_draw_down'] = drawDownDaily['draw_down'] 156 | 157 | if benchmarkReturns is not None: 158 | perf_df['benchmark_return'] = benchmarkReturns 159 | perf_df['benchmark_cum_return'] = benchmarkReturns.cumsum() 160 | perf_df.fillna(0.0, inplace=True) 161 | perf_df['benchmark_cum_return'] = np.exp(perf_df['benchmark_cum_return'] 162 | - perf_df['benchmark_cum_return'][0]) - 1.0 163 | perf_df['access_return'] = aggregateDaily - perf_df['benchmark_return'] 164 | perf_df['access_cum_return'] = (1.0 + perf_df['daily_cum_return']) \ 165 | / (1.0 + perf_df['benchmark_cum_return']) - 1.0 166 | accessDrawDownDaily = drawDown(perf_df['access_return']) 167 | else: 168 | accessDrawDownDaily = None 169 | 170 | if 'benchmark_cum_return' in perf_df: 171 | benchmarkCumReturns = perf_df['benchmark_cum_return'] 172 | benchmarkCumReturns.name = benchmarkReturns.name 173 | accessCumReturns = perf_df['access_cum_return'] 174 | accessReturns = perf_df['access_return'] 175 | 176 | index = perf_df.index 177 | 178 | length1 = len(bizDatesList('China.SSE', index[0], index[-1])) 179 | length2 = len(perf_df) 180 | factor = length1 / float(length2) 181 | 182 | rb = RollingBeta(perf_df['daily_return'], perf_df['benchmark_return'], [1, 3, 6], factor=factor) 183 | rs = RollingSharp(perf_df['daily_return'], [1, 3, 6], factor=factor) 184 | else: 185 | benchmarkCumReturns = None 186 | accessReturns = None 187 | accessCumReturns = None 188 | 189 | if len(perf_df['daily_return']) > APPROX_BDAYS_PER_MONTH and benchmarkCumReturns is not None: 190 | rollingRisk = pd.concat([pd.concat(rs, axis=1), pd.concat(rb, axis=1)], axis=1) 191 | else: 192 | rollingRisk = None 193 | 194 | if plot: 195 | verticalSections = 2 196 | plt.figure(figsize=(16, 7 * verticalSections)) 197 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 198 | 199 | axRollingReturns = plt.subplot(gs[0, :]) 200 | axDrawDown = plt.subplot(gs[1, :]) 201 | 202 | plottingRollingReturn(perf_df['daily_cum_return'], 203 | perf_df['daily_cum_return (w/o tc)'], 204 | benchmarkCumReturns, 205 | other_curves, axRollingReturns) 206 | plottingDrawdownPeriods(perf_df['daily_cum_return'], drawDownDaily, 5, axDrawDown) 207 | 208 | if rollingRisk is not None: 209 | plt.figure(figsize=(16, 7 * verticalSections)) 210 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 211 | axRollingBeta = plt.subplot(gs[0, :]) 212 | axRollingSharp = plt.subplot(gs[1, :]) 213 | 214 | bmName = benchmarkReturns.name 215 | plottingRollingBeta(rb, bmName, ax=axRollingBeta) 216 | plottingRollingSharp(rs, ax=axRollingSharp) 217 | 218 | plt.figure(figsize=(16, 7 * verticalSections)) 219 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 220 | 221 | axUnderwater = plt.subplot(gs[0, :]) 222 | axMonthlyHeatmap = plt.subplot(gs[1, 0]) 223 | axAnnualReturns = plt.subplot(gs[1, 1]) 224 | axMonthlyDist = plt.subplot(gs[1, 2]) 225 | 226 | plottingUnderwater(drawDownDaily['draw_down'], axUnderwater) 227 | plottingMonthlyReturnsHeapmap(aggregateDaily, axMonthlyHeatmap) 228 | plottingAnnualReturns(aggregateDaily, axAnnualReturns) 229 | plottingMonthlyRetDist(aggregateDaily, axMonthlyDist) 230 | 231 | if accessReturns is not None and plot: 232 | plt.figure(figsize=(16, 7 * verticalSections)) 233 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 234 | axRollingAccessReturns = plt.subplot(gs[0, :]) 235 | axAccessDrawDown = plt.subplot(gs[1, :], sharex=axRollingAccessReturns) 236 | plottingRollingReturn(accessCumReturns, None, None, None, axRollingAccessReturns, title='Access Cumulative Returns w.r.t. ' + benchmarkReturns.name) 237 | plottingDrawdownPeriods(accessCumReturns, accessDrawDownDaily, 5, axAccessDrawDown, title=('Top 5 Drawdown periods w.r.t. ' + benchmarkReturns.name)) 238 | 239 | plt.figure(figsize=(16, 7 * verticalSections)) 240 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 241 | 242 | axAccessUnderwater = plt.subplot(gs[0, :]) 243 | axAccessMonthlyHeatmap = plt.subplot(gs[1, 0]) 244 | axAccessAnnualReturns = plt.subplot(gs[1, 1]) 245 | axAccessMonthlyDist = plt.subplot(gs[1, 2]) 246 | 247 | plottingUnderwater(accessDrawDownDaily['draw_down'], axAccessUnderwater, title='Underwater Plot w.r.t. ' 248 | + benchmarkReturns.name) 249 | plottingMonthlyReturnsHeapmap(accessReturns, ax=axAccessMonthlyHeatmap, title='Monthly Access Returns (%)') 250 | plottingAnnualReturns(accessReturns, ax=axAccessAnnualReturns, title='Annual Access Returns') 251 | plottingMonthlyRetDist(accessReturns, ax=axAccessMonthlyDist, title='Distribution of Monthly Access Returns') 252 | 253 | return perf_metric, perf_df, rollingRisk 254 | 255 | 256 | @plotting_context 257 | def createPostionTearSheet(positions, freq='M', plot=True, convert='raw'): 258 | positions_weiget = calculatePosWeight(positions) 259 | if plot: 260 | verticalSections = 2 261 | plt.figure(figsize=(16, 7 * verticalSections)) 262 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 263 | 264 | axExposure = plt.subplot(gs[0, :]) 265 | axTopExposure = plt.subplot(gs[1, :], sharex=axExposure) 266 | 267 | verticalSections = 2 268 | plt.figure(figsize=(16, 7 * verticalSections)) 269 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 270 | 271 | axHoldings = plt.subplot(gs[0, :]) 272 | axPosACFs = plt.subplot(gs[1, :]) 273 | 274 | plottingExposure(positions_weiget, axExposure) 275 | plottingTopExposure(positions_weiget, axTopExposure) 276 | plottingHodings(positions_weiget, axHoldings, freq=freq) 277 | plottingPositionACF(positions_weiget, axPosACFs) 278 | return positions 279 | 280 | 281 | @plotting_context 282 | def createTranscationTearSheet(transactions, positions, turn_over=None, freq='M', plot=True): 283 | if turn_over is None: 284 | positions = aggregatePositons(positions) 285 | transactions = aggregateTranscations(transactions) 286 | 287 | if plot: 288 | verticalSections = 1 289 | plt.figure(figsize=(16, 7 * verticalSections)) 290 | gs = gridspec.GridSpec(verticalSections, 3, wspace=0.5, hspace=0.5) 291 | axTurnOver = plt.subplot(gs[0, :]) 292 | else: 293 | axTurnOver = None 294 | 295 | turnOverRate = plottingTurnover(transactions, positions, turn_over, freq, axTurnOver)[1] 296 | turnOverRate.name = 'turnover_rate' 297 | turnOverRate.index.name = 'date' 298 | 299 | return pd.DataFrame(turnOverRate) 300 | 301 | 302 | @plotting_context 303 | def createAllTearSheet(positions, transcations=None, prices=None, returns=None, benchmark=None, turn_over=None, tc_cost=0., freq='M', plot=True): 304 | perf_metric, perf_df, rollingRisk = createPerformanceTearSheet(prices=prices, 305 | returns=returns, 306 | benchmark=benchmark, 307 | turn_over=turn_over, 308 | tc_cost=tc_cost, 309 | plot=plot) 310 | createPostionTearSheet(positions=positions, plot=plot, freq=freq, convert='raw') 311 | if transcations is not None or turn_over is not None: 312 | createTranscationTearSheet(positions=positions, transactions=transcations, turn_over=turn_over, freq=freq, plot=plot) 313 | return perf_metric, perf_df, rollingRisk 314 | 315 | 316 | if __name__ == "__main__": 317 | from DataAPI import api 318 | from VisualPortfolio import Settings 319 | from VisualPortfolio import DataSource 320 | 321 | Settings.set_source(DataSource.DXDataCenter) 322 | 323 | data = api.GetEquityBarEOD('600000', '2012-01-01', '2015-10-01') 324 | 325 | createPerformanceTearSheet(data['closePrice'], benchmark='000300.zicn') 326 | plt.show() -------------------------------------------------------------------------------- /VisualPortfolio/Timeseries.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2015-11-9 4 | 5 | @author: cheng.li 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import datetime as dt 11 | from math import sqrt 12 | from math import exp 13 | from PyFin.Math.Accumulators import MovingDrawDown 14 | from PyFin.Math.Accumulators import MovingAlphaBeta 15 | from PyFin.Math.Accumulators import MovingSharp 16 | 17 | APPROX_BDAYS_PER_MONTH = 21 18 | APPROX_BDAYS_PER_YEAR = 252. 19 | 20 | 21 | def aggregatePositons(positionBooks, convert='daily'): 22 | 23 | if convert == 'daily': 24 | resampled_pos = positionBooks.groupby( 25 | lambda x: dt.datetime(x.year, x.month, x.day)).last() 26 | elif convert == 'raw': 27 | resampled_pos = positionBooks.copy() 28 | 29 | return resampled_pos 30 | 31 | 32 | def aggregateTranscations(transcations, convert='daily'): 33 | transcations = transcations[['turnover_volume', 'turnover_value']].abs() 34 | if convert == 'daily': 35 | resampled_pos = transcations.groupby( 36 | lambda x: dt.datetime(x.year, x.month, x.day)).sum() 37 | 38 | return resampled_pos 39 | 40 | 41 | def calculatePosWeight(pos): 42 | 43 | if 'cash' in pos: 44 | pos_wo_cash = pos.drop('cash', axis=1) 45 | cash = pos.cash 46 | else: 47 | pos_wo_cash = pos 48 | cash = 0. 49 | 50 | longs = pos_wo_cash[pos_wo_cash > 0].sum(axis=1).fillna(0) 51 | shorts = pos_wo_cash[pos_wo_cash < 0].abs().sum(axis=1).fillna(0) 52 | 53 | net_liquidation = longs + shorts + cash 54 | 55 | return pos.divide( 56 | net_liquidation, 57 | axis='index' 58 | ).fillna(0.) 59 | 60 | 61 | def aggregateReturns(returns, turn_over=None, tc_cost=0., convert='daily'): 62 | 63 | def cumulateReturns(x): 64 | return x.sum() 65 | 66 | if turn_over is not None: 67 | returns_after_tc = returns.sub(turn_over * tc_cost, fill_value=0.).dropna() 68 | 69 | if convert == 'daily': 70 | return returns.groupby( 71 | lambda x: dt.datetime(x.year, x.month, x.day)).apply(cumulateReturns), \ 72 | returns_after_tc.groupby( 73 | lambda x: dt.datetime(x.year, x.month, x.day)).apply(cumulateReturns) 74 | if convert == 'monthly': 75 | return returns.groupby( 76 | [lambda x: x.year, 77 | lambda x: x.month]).apply(cumulateReturns), \ 78 | returns_after_tc.groupby( 79 | [lambda x: x.year, 80 | lambda x: x.month]).apply(cumulateReturns) 81 | if convert == 'yearly': 82 | return returns.groupby( 83 | [lambda x: x.year]).apply(cumulateReturns), \ 84 | returns_after_tc.groupby( 85 | [lambda x: x.year]).apply(cumulateReturns) 86 | else: 87 | ValueError('convert must be daily, weekly, monthly or yearly') 88 | else: 89 | if convert == 'daily': 90 | return returns.groupby( 91 | lambda x: dt.datetime(x.year, x.month, x.day)).apply(cumulateReturns), None 92 | if convert == 'monthly': 93 | return returns.groupby( 94 | [lambda x: x.year, 95 | lambda x: x.month]).apply(cumulateReturns), None 96 | if convert == 'yearly': 97 | return returns.groupby( 98 | [lambda x: x.year]).apply(cumulateReturns), None 99 | else: 100 | ValueError('convert must be daily, weekly, monthly or yearly') 101 | 102 | 103 | def drawDown(returns): 104 | 105 | ddCal = MovingDrawDown(len(returns), 'ret') 106 | length = len(returns) 107 | ddSeries = [0.0] * length 108 | peakSeries = [0] * length 109 | valleySeries = [0] * length 110 | recoverySeries = [returns.index[-1]] * length 111 | for i, value in enumerate(returns): 112 | ddCal.push({'ret': value}) 113 | res = ddCal.value 114 | ddSeries[i] = exp(res[0]) - 1.0 115 | peakSeries[i] = returns.index[res[2]] 116 | valleySeries[i] = returns.index[i] 117 | 118 | for i, value in enumerate(ddSeries): 119 | for k in range(i, length): 120 | if ddSeries[k] == 0.0: 121 | recoverySeries[i] = returns.index[k] 122 | break 123 | 124 | df = pd.DataFrame(list(zip(ddSeries, peakSeries, valleySeries, recoverySeries)), 125 | index=returns.index, 126 | columns=['draw_down', 'peak', 'valley', 'recovery']) 127 | return df 128 | 129 | 130 | def annualReturn(returns): 131 | return returns.mean() * APPROX_BDAYS_PER_YEAR 132 | 133 | 134 | def annualVolatility(returns): 135 | return returns.std() * sqrt(APPROX_BDAYS_PER_YEAR) 136 | 137 | 138 | def sortinoRatio(returns): 139 | annualRet = annualReturn(returns) 140 | annualNegativeVol = annualVolatility(returns[returns < 0.0]) 141 | if annualNegativeVol != 0.: 142 | return annualRet / annualNegativeVol 143 | else: 144 | return np.nan 145 | 146 | 147 | def sharpRatio(returns): 148 | annualRet = annualReturn(returns) 149 | annualVol = annualVolatility(returns) 150 | 151 | if annualVol != 0.: 152 | return annualRet / annualVol 153 | else: 154 | return np.nan 155 | 156 | 157 | def RollingBeta(returns, benchmarkReturns, month_windows, factor): 158 | 159 | def calculateSingalWindowBete(returns, benchmarkReturns, window): 160 | res = [] 161 | rbcalc = MovingAlphaBeta(window=int(window * APPROX_BDAYS_PER_MONTH)) 162 | for pRet, mRet in zip(returns, benchmarkReturns): 163 | rbcalc.push({'pRet': pRet, 'mRet': mRet, 'riskFree': 0}) 164 | try: 165 | res.append(rbcalc.result()[1]) 166 | except ZeroDivisionError: 167 | res.append(np.nan) 168 | return res 169 | 170 | rtn = [pd.Series(calculateSingalWindowBete(returns, benchmarkReturns, window / factor), index=returns.index) 171 | for window in month_windows] 172 | 173 | return {"beta_{0}m".format(window): res[int(APPROX_BDAYS_PER_MONTH*min(month_windows) / factor):] for window, res in zip(month_windows, rtn)} 174 | 175 | 176 | def RollingSharp(returns, month_windows, factor): 177 | 178 | def calculateSingalWindowSharp(returns, window, factor): 179 | res = [] 180 | rscalc = MovingSharp(window=int(window * APPROX_BDAYS_PER_MONTH)) 181 | for ret in returns: 182 | rscalc.push({'ret': ret, 'riskFree': 0}) 183 | try: 184 | # in PyFin, sharp is not annualized 185 | res.append(rscalc.result() * sqrt(APPROX_BDAYS_PER_YEAR / factor)) 186 | except ZeroDivisionError: 187 | res.append(np.nan) 188 | return res 189 | 190 | rtn = [pd.Series(calculateSingalWindowSharp(returns, window / factor, factor), index=returns.index) 191 | for window in month_windows] 192 | 193 | return {"sharp_{0}m".format(window): res[int(APPROX_BDAYS_PER_MONTH*min(month_windows) / factor):] for window, res in zip(month_windows, rtn)} 194 | -------------------------------------------------------------------------------- /VisualPortfolio/Transactions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2015-11-10 4 | 5 | @author: cheng.li 6 | """ 7 | 8 | 9 | def getTurnOver(transactions, positions, period=None, average=True): 10 | 11 | tradedValue = transactions.turnover_value 12 | portfolioValue = positions.abs().sum(axis=1) 13 | portfolioValue[portfolioValue == 0] = portfolioValue.mean() 14 | if period: 15 | tradedValue = tradedValue.resample(period, how="sum") 16 | portfolioValue = portfolioValue.resample(period, how="mean") 17 | 18 | turnover = tradedValue / 2.0 if average else tradedValue 19 | turnoverRate = turnover.div(portfolioValue, axis='index') 20 | turnoverRate.fillna(0.0, inplace=True) 21 | return turnoverRate 22 | -------------------------------------------------------------------------------- /VisualPortfolio/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2015-11-9 4 | 5 | @author: cheng.li 6 | """ 7 | 8 | from VisualPortfolio.Tears import createPerformanceTearSheet 9 | from VisualPortfolio.Tears import createPostionTearSheet 10 | from VisualPortfolio.Tears import createTranscationTearSheet 11 | from VisualPortfolio.Tears import createAllTearSheet 12 | from VisualPortfolio.Miscellaneous import portfolioAnalysis 13 | 14 | 15 | from VisualPortfolio.Env import DataSource 16 | from VisualPortfolio.Env import Settings 17 | 18 | __version__ = '0.2.5' 19 | -------------------------------------------------------------------------------- /img/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wegamekinglc/VisualPortfolio/6d047069e771ef930c32d1a5c534de41995175fc/img/1.png -------------------------------------------------------------------------------- /img/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wegamekinglc/VisualPortfolio/6d047069e771ef930c32d1a5c534de41995175fc/img/2.png -------------------------------------------------------------------------------- /img/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wegamekinglc/VisualPortfolio/6d047069e771ef930c32d1a5c534de41995175fc/img/3.png -------------------------------------------------------------------------------- /img/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wegamekinglc/VisualPortfolio/6d047069e771ef930c32d1a5c534de41995175fc/img/4.png -------------------------------------------------------------------------------- /notebooks/Overview of VisualPortfolio.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": false, 8 | "scrolled": true 9 | }, 10 | "outputs": [], 11 | "source": [ 12 | "%matplotlib inline\n", 13 | "from VisualPortfolio import createPerformanceTearSheet\n", 14 | "from DataAPI import api" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "1. DataAPI与VisualPortfolio结合使用\n", 22 | "=========================\n", 23 | "***\n", 24 | "\n", 25 | "> 我们可以使用DataAPI获取某只证券的价格数据,简详细的绘制它的表现:" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": { 32 | "collapsed": true, 33 | "scrolled": true 34 | }, 35 | "outputs": [], 36 | "source": [ 37 | "sample_prices = api.GetEquityBarMin1('600000', '2012-10-01', '2015-11-09')" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": { 44 | "collapsed": false, 45 | "scrolled": true 46 | }, 47 | "outputs": [], 48 | "source": [ 49 | "sample_prices.tail()" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": { 56 | "collapsed": false, 57 | "scrolled": false 58 | }, 59 | "outputs": [], 60 | "source": [ 61 | "sample_prices[['closePrice']].plot(figsize=(14,8))" 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "1.1 收益表现\n", 69 | "---------------" 70 | ] 71 | }, 72 | { 73 | "cell_type": "markdown", 74 | "metadata": {}, 75 | "source": [ 76 | "* ``prices``:需要进行分析的价格序列;\n", 77 | "* ``benchmark``:与之进行对比的指数;" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": { 84 | "collapsed": false, 85 | "scrolled": false 86 | }, 87 | "outputs": [], 88 | "source": [ 89 | "perf_metric, perf_df, rollingRisk = createPerformanceTearSheet(prices=sample_prices['closePrice'], benchmark='000300.zicn')" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": {}, 95 | "source": [ 96 | "2. 证券头寸变化分析\n", 97 | "-----------------------\n", 98 | "***" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": { 105 | "collapsed": true 106 | }, 107 | "outputs": [], 108 | "source": [ 109 | "import pandas as pd\n", 110 | "from VisualPortfolio import createPostionTearSheet" 111 | ] 112 | }, 113 | { 114 | "cell_type": "markdown", 115 | "metadata": {}, 116 | "source": [ 117 | "我们可以读入一个我们已有的position book(表中的数据都是改证券当日净值):" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": null, 123 | "metadata": { 124 | "collapsed": false 125 | }, 126 | "outputs": [], 127 | "source": [ 128 | "positions = pd.read_csv('data/positions.csv', index_col=0, parse_dates=[0])" 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": null, 134 | "metadata": { 135 | "collapsed": false, 136 | "scrolled": false 137 | }, 138 | "outputs": [], 139 | "source": [ 140 | "positions[50:60]" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "绘制头寸分析图:" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "metadata": { 154 | "collapsed": false, 155 | "scrolled": false 156 | }, 157 | "outputs": [], 158 | "source": [ 159 | "positions = createPostionTearSheet(positions)" 160 | ] 161 | }, 162 | { 163 | "cell_type": "markdown", 164 | "metadata": {}, 165 | "source": [ 166 | "当然这个内容还需要补充。" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "3. 交易行为分析\n", 174 | "==================\n", 175 | "***" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "metadata": { 182 | "collapsed": false 183 | }, 184 | "outputs": [], 185 | "source": [ 186 | "from VisualPortfolio import createTranscationTearSheet\n", 187 | "transactions = pd.read_csv('data/transactions.csv', index_col=0, parse_dates=[0])" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "metadata": { 194 | "collapsed": false 195 | }, 196 | "outputs": [], 197 | "source": [ 198 | "transactions.head()" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": null, 204 | "metadata": { 205 | "collapsed": false 206 | }, 207 | "outputs": [], 208 | "source": [ 209 | "turnover_rate = createTranscationTearSheet(positions=positions, transactions=transactions)" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "metadata": { 216 | "collapsed": false 217 | }, 218 | "outputs": [], 219 | "source": [ 220 | "turnover_rate.tail()" 221 | ] 222 | }, 223 | { 224 | "cell_type": "markdown", 225 | "metadata": {}, 226 | "source": [ 227 | "4. 所有的都已经整合入``AlgoTrading``\n", 228 | "=======================\n", 229 | "***" 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "execution_count": null, 235 | "metadata": { 236 | "collapsed": true 237 | }, 238 | "outputs": [], 239 | "source": [ 240 | "import datetime as dt\n", 241 | "\n", 242 | "from AlgoTrading.Strategy.Strategy import Strategy\n", 243 | "from AlgoTrading.Backtest import strategyRunner\n", 244 | "from AlgoTrading.Backtest import DataSource\n", 245 | "from AlgoTrading.Data import set_universe\n", 246 | "from PyFin.API import MA\n", 247 | "from PyFin.API import MAX\n", 248 | "from PyFin.API import MIN\n", 249 | "\n", 250 | "\n", 251 | "class MovingAverageCrossStrategy(Strategy):\n", 252 | " def __init__(self):\n", 253 | " filtering = (MAX(10, 'close') / MIN(10, 'close')) > 1.00\n", 254 | " indicator = MA(10, 'close') - MA(120, 'close')\n", 255 | " self.signal = indicator[filtering]\n", 256 | "\n", 257 | " def handle_data(self):\n", 258 | " for s in self.universe:\n", 259 | " amount = self.avaliableForSale(s)\n", 260 | " if self.signal[s] > 0. and self.secPos[s] == 0:\n", 261 | " self.order(s, 1, quantity=100)\n", 262 | " elif self.signal[s] < 0. and amount != 0:\n", 263 | " self.order(s, -1, quantity=amount)\n", 264 | "\n", 265 | "\n", 266 | "def run_example():\n", 267 | " universe = set_universe('000300.zicn')\n", 268 | " initialCapital = 100000.0\n", 269 | " startDate = dt.datetime(2006, 10, 1)\n", 270 | " endDate = dt.datetime(2015, 10, 1)\n", 271 | "\n", 272 | " strategyRunner(userStrategy=MovingAverageCrossStrategy,\n", 273 | " initialCapital=initialCapital,\n", 274 | " symbolList=universe,\n", 275 | " startDate=startDate,\n", 276 | " endDate=endDate,\n", 277 | " dataSource=DataSource.DXDataCenter,\n", 278 | " freq=0,\n", 279 | " benchmark='000300.zicn',\n", 280 | " logLevel='critical',\n", 281 | " saveFile=True,\n", 282 | " plot=True)" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": null, 288 | "metadata": { 289 | "collapsed": false, 290 | "scrolled": false 291 | }, 292 | "outputs": [], 293 | "source": [ 294 | "startTime = dt.datetime.now()\n", 295 | "print(\"Start: %s\" % startTime)\n", 296 | "res = run_example()\n", 297 | "endTime = dt.datetime.now()\n", 298 | "print(\"End : %s\" % endTime)\n", 299 | "print(\"Elapsed: %s\" % (endTime - startTime))" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": null, 305 | "metadata": { 306 | "collapsed": true 307 | }, 308 | "outputs": [], 309 | "source": [] 310 | } 311 | ], 312 | "metadata": { 313 | "kernelspec": { 314 | "display_name": "Python 2", 315 | "language": "python", 316 | "name": "python2" 317 | }, 318 | "language_info": { 319 | "codemirror_mode": { 320 | "name": "ipython", 321 | "version": 2 322 | }, 323 | "file_extension": ".py", 324 | "mimetype": "text/x-python", 325 | "name": "python", 326 | "nbconvert_exporter": "python", 327 | "pygments_lexer": "ipython2", 328 | "version": "2.7.10" 329 | } 330 | }, 331 | "nbformat": 4, 332 | "nbformat_minor": 0 333 | } 334 | -------------------------------------------------------------------------------- /requirements/py2.txt: -------------------------------------------------------------------------------- 1 | enum34>=1.0.4 2 | Finance-Python>=0.4.1 3 | lxml>=3.4.4 4 | matplotlib>=1.4.3 5 | seaborn>=0.6.0 6 | statsmodels>=0.8.0 7 | tushare>=0.3.9 8 | -------------------------------------------------------------------------------- /requirements/py3.txt: -------------------------------------------------------------------------------- 1 | Finance-Python>=0.4.1 2 | lxml>=3.4.4 3 | matplotlib>=1.4.3 4 | seaborn>=0.6.0 5 | statsmodels>=0.8.0 6 | tushare>=0.3.9 -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.rst 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | u""" 3 | Created on 2015-11-10 4 | 5 | @author: cheng.li 6 | """ 7 | import sys 8 | import io 9 | from setuptools import setup 10 | 11 | PACKAGE = "VisualPortfolio" 12 | NAME = "VisualPortfolio" 13 | VERSION = "0.2.5" 14 | DESCRIPTION = "VisualPortfolio " + VERSION 15 | AUTHOR = "cheng li" 16 | AUTHOR_EMAIL = "wegamekinglc@hotmail.com" 17 | URL = 'https://github.com/ChinaQuants/VisualPortfolio' 18 | 19 | if sys.version_info > (3, 0, 0): 20 | requirements = "requirements/py3.txt" 21 | else: 22 | requirements = "requirements/py2.txt" 23 | 24 | setup( 25 | name=NAME, 26 | version=VERSION, 27 | description=DESCRIPTION, 28 | author=AUTHOR, 29 | author_email=AUTHOR_EMAIL, 30 | url=URL, 31 | packages=['VisualPortfolio'], 32 | install_requires=io.open(requirements, encoding='utf8').read(), 33 | ) 34 | --------------------------------------------------------------------------------