├── pyfolio ├── py.typed ├── _tests │ ├── __init__.py │ ├── test_data │ │ ├── __init__.py │ │ ├── intercepts.csv │ │ ├── test_pos.csv.gz │ │ ├── test_txn.csv.gz │ │ ├── test_returns.csv.gz │ │ ├── test_gross_lev.csv.gz │ │ ├── residuals.csv │ │ ├── positions.csv │ │ └── returns.csv │ ├── matplotlibrc │ ├── test_txn.py │ ├── test_capacity.py │ ├── test_pos.py │ ├── test_tears.py │ ├── test_round_trips.py │ ├── test_quantrocket_moonshot.py │ └── test_timeseries.py ├── examples │ └── pydata_stack-4-finance.jpg ├── _seaborn.py ├── ipycompat.py ├── deprecate.py ├── quantrocket_utils.py ├── __init__.py ├── interesting_periods.py ├── txn.py ├── quantrocket_zipline.py ├── pos.py ├── capacity.py ├── quantrocket_moonshot.py ├── utils.py └── round_trips.py ├── .gitattributes ├── MANIFEST.in ├── setup.cfg ├── .gitignore ├── setup.py ├── README.md ├── LICENSE └── WHATSNEW.md /pyfolio/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pyfolio/_tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pyfolio/_tests/matplotlibrc: -------------------------------------------------------------------------------- 1 | backend : Agg -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | pyfolio/_version.py export-subst 2 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/intercepts.csv: -------------------------------------------------------------------------------- 1 | 19001,0.0 2 | 19002,0.0 3 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include pyfolio/_version.py 3 | include LICENSE 4 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/test_pos.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantrocket-llc/pyfolio/HEAD/pyfolio/_tests/test_data/test_pos.csv.gz -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/test_txn.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantrocket-llc/pyfolio/HEAD/pyfolio/_tests/test_data/test_txn.csv.gz -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/test_returns.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantrocket-llc/pyfolio/HEAD/pyfolio/_tests/test_data/test_returns.csv.gz -------------------------------------------------------------------------------- /pyfolio/examples/pydata_stack-4-finance.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantrocket-llc/pyfolio/HEAD/pyfolio/examples/pydata_stack-4-finance.jpg -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/test_gross_lev.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantrocket-llc/pyfolio/HEAD/pyfolio/_tests/test_data/test_gross_lev.csv.gz -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | license_file = LICENSE 3 | 4 | # See the docstring in versioneer.py for instructions. Note that you must 5 | # re-run 'versioneer.py setup' after changing this section, and commit the 6 | # resulting files. 7 | [versioneer] 8 | VCS=git 9 | style=pep440 10 | versionfile_source=pyfolio/_version.py 11 | versionfile_build=pyfolio/_version.py 12 | tag_prefix= 13 | parentdir_prefix=pyfolio- 14 | -------------------------------------------------------------------------------- /pyfolio/_seaborn.py: -------------------------------------------------------------------------------- 1 | """Wrapper module around seaborn to suppress warnings on import. 2 | 3 | This should be removed when seaborn stops raising: 4 | 5 | UserWarning: axes.color_cycle is deprecated and replaced with axes.prop_cycle; 6 | please use the latter. 7 | """ 8 | import warnings 9 | 10 | 11 | with warnings.catch_warnings(): 12 | warnings.filterwarnings( 13 | 'ignore', 14 | 'axes.color_cycle is deprecated', 15 | UserWarning, 16 | 'matplotlib', 17 | ) 18 | from seaborn import * # noqa 19 | -------------------------------------------------------------------------------- /pyfolio/ipycompat.py: -------------------------------------------------------------------------------- 1 | import IPython 2 | 3 | IPY_MAJOR = IPython.version_info[0] 4 | if IPY_MAJOR < 3: 5 | raise ImportError("IPython version %d is not supported." % IPY_MAJOR) 6 | 7 | IPY3 = (IPY_MAJOR == 3) 8 | 9 | # IPython underwent a major refactor between versions 3 and 4. Many of the 10 | # imports in version 4 have aliases to their old locations in 3, but they raise 11 | # noisy deprecation warnings. By conditionally importing here, we can support 12 | # older versions without triggering warnings for users on new versions. 13 | if IPY3: 14 | from IPython.nbformat import read 15 | else: 16 | from nbformat import read 17 | 18 | 19 | __all__ = ['read'] 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *,cover 45 | 46 | # Translations 47 | *.mo 48 | *.pot 49 | 50 | # Django stuff: 51 | *.log 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # PyBuilder 57 | target/ 58 | 59 | # VIM 60 | *.sw? 61 | 62 | # IPython notebook checkpoints 63 | .ipynb_checkpoints/ 64 | -------------------------------------------------------------------------------- /pyfolio/deprecate.py: -------------------------------------------------------------------------------- 1 | """Utilities for marking deprecated functions.""" 2 | # Copyright 2018 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import warnings 17 | from functools import wraps 18 | 19 | 20 | def deprecated(msg=None, stacklevel=2): 21 | """ 22 | Used to mark a function as deprecated. 23 | Parameters 24 | ---------- 25 | msg : str 26 | The message to display in the deprecation warning. 27 | stacklevel : int 28 | How far up the stack the warning needs to go, before 29 | showing the relevant calling lines. 30 | Usage 31 | ----- 32 | @deprecated(msg='function_a is deprecated! Use function_b instead.') 33 | def function_a(*args, **kwargs): 34 | """ 35 | def deprecated_dec(fn): 36 | @wraps(fn) 37 | def wrapper(*args, **kwargs): 38 | warnings.warn( 39 | msg or "Function %s is deprecated." % fn.__name__, 40 | category=DeprecationWarning, 41 | stacklevel=stacklevel 42 | ) 43 | return fn(*args, **kwargs) 44 | return wrapper 45 | return deprecated_dec 46 | -------------------------------------------------------------------------------- /pyfolio/quantrocket_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 QuantRocket LLC - All Rights Reserved 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import pandas as pd 16 | 17 | def pad_initial(df): 18 | """ 19 | Pads returns with leading zeroes to ensure at least 126 days, as required 20 | for pyfolio 6-month rolling windows. 21 | 22 | Parameters 23 | ---------- 24 | filepath_or_buffer : str or file-like object 25 | filepath or file-like object of the CSV 26 | 27 | Returns 28 | ------- 29 | None 30 | """ 31 | if df.index.size > 126: 32 | return df 33 | 34 | num_dates = (127 - df.index.size) + 1 # +1 b/c the union index will have 1 overlapping date 35 | import warnings 36 | msg = ( 37 | "{0} index has only {1} dates ({2} - {3}) but must " 38 | "have at least 127 dates for pyfolio 6-month rolling windows " 39 | "to chart properly, padding {0} with {4} initial zeros").format( 40 | df.name or "DataFrame", 41 | df.index.size, 42 | df.index.min().isoformat(), 43 | df.index.max().isoformat(), 44 | 127 - df.index.size) 45 | warnings.warn(msg) 46 | 47 | pad_idx = pd.bdate_range(end=df.index.min(), periods=num_dates) 48 | idx = pad_idx.union(df.index) 49 | return df.reindex(index=idx).fillna(0) 50 | -------------------------------------------------------------------------------- /pyfolio/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Performance and risk analysis library for financial portfolios. 3 | 4 | Functions 5 | --------- 6 | from_zipline_csv 7 | Create a full tear sheet from a zipline backtest results CSV. 8 | 9 | from_moonshot_csv 10 | Create a full tear sheet from a moonshot backtest results CSV. 11 | 12 | create_full_tear_sheet 13 | Generate a number of tear sheets that are useful for analyzing a 14 | strategy's performance. 15 | 16 | create_capacity_tear_sheet 17 | Generate a report detailing portfolio size constraints set by 18 | least liquid tickers. 19 | 20 | create_interesting_times_tear_sheet 21 | Generate a number of returns plots around interesting points in time, 22 | like the flash crash and 9/11. 23 | 24 | create_position_tear_sheet 25 | Generate a number of plots for analyzing a strategy's positions and holdings. 26 | 27 | create_returns_tear_sheet 28 | Generate a number of plots for analyzing a strategy's returns. 29 | 30 | create_round_trip_tear_sheet 31 | Generate a number of figures and plots describing the duration, 32 | frequency, and profitability of trade "round trips." 33 | 34 | create_simple_tear_sheet 35 | Simpler version of `create_full_tear_sheet`; generate summary performance 36 | statistics and important plots as a single image. 37 | 38 | create_txn_tear_sheet 39 | Generate a number of plots for analyzing a strategy's transactions. 40 | """ 41 | from . import utils 42 | from . import timeseries 43 | from . import pos 44 | from . import txn 45 | from . import interesting_periods 46 | from . import capacity 47 | from . import round_trips 48 | 49 | from .tears import * # noqa 50 | from .plotting import * # noqa 51 | from ._version import get_versions 52 | from .quantrocket_moonshot import * # noqa 53 | from .quantrocket_zipline import from_zipline_csv # noqa 54 | 55 | __version__ = get_versions()['version'] 56 | del get_versions 57 | 58 | __all__ = [ 59 | 'utils', 60 | 'timeseries', 61 | 'pos', 62 | 'txn', 63 | 'interesting_periods', 64 | 'capacity', 65 | 'round_trips', 66 | 'from_zipline_csv', 67 | 'from_moonshot_csv' 68 | ] 69 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | 4 | from setuptools import setup, find_packages 5 | 6 | import versioneer 7 | 8 | DISTNAME = 'pyfolio' 9 | DESCRIPTION = "pyfolio is a Python library for performance" 10 | "and risk analysis of financial portfolios" 11 | LONG_DESCRIPTION = """pyfolio is a Python library for performance and risk analysis of 12 | financial portfolios developed by `Quantopian Inc`_. It works well with the 13 | `Zipline`_ open source backtesting library. 14 | 15 | At the core of pyfolio is a so-called tear sheet that consists of 16 | various individual plots that provide a comprehensive performance 17 | overview of a portfolio. 18 | 19 | .. _Quantopian Inc: https://www.quantopian.com 20 | .. _Zipline: http://zipline.io 21 | """ 22 | MAINTAINER = 'Quantopian Inc' 23 | MAINTAINER_EMAIL = 'opensource@quantopian.com' 24 | AUTHOR = 'Quantopian Inc' 25 | AUTHOR_EMAIL = 'opensource@quantopian.com' 26 | URL = "https://github.com/quantopian/pyfolio" 27 | LICENSE = "Apache License, Version 2.0" 28 | 29 | classifiers = ['Development Status :: 4 - Beta', 30 | 'Programming Language :: Python', 31 | 'Programming Language :: Python :: 2', 32 | 'Programming Language :: Python :: 3', 33 | 'Programming Language :: Python :: 2.7', 34 | 'Programming Language :: Python :: 3.4', 35 | 'Programming Language :: Python :: 3.5', 36 | 'License :: OSI Approved :: Apache Software License', 37 | 'Intended Audience :: Science/Research', 38 | 'Topic :: Scientific/Engineering', 39 | 'Topic :: Scientific/Engineering :: Mathematics', 40 | 'Operating System :: OS Independent'] 41 | 42 | if __name__ == "__main__": 43 | setup( 44 | name=DISTNAME, 45 | cmdclass=versioneer.get_cmdclass(), 46 | version=versioneer.get_version(), 47 | maintainer=MAINTAINER, 48 | maintainer_email=MAINTAINER_EMAIL, 49 | description=DESCRIPTION, 50 | license=LICENSE, 51 | url=URL, 52 | long_description=LONG_DESCRIPTION, 53 | packages=find_packages(include=['pyfolio', 'pyfolio.*']), 54 | package_data={ 55 | 'pyfolio._tests.test_data': ['*.csv', '*.gz'], 56 | "pyfolio": ["py.typed"], 57 | }, 58 | classifiers=classifiers 59 | ) 60 | -------------------------------------------------------------------------------- /pyfolio/interesting_periods.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """Generates a list of historical event dates that may have had 17 | significant impact on markets. See extract_interesting_date_ranges.""" 18 | 19 | import pandas as pd 20 | 21 | from collections import OrderedDict 22 | 23 | PERIODS = OrderedDict() 24 | # Dotcom bubble 25 | PERIODS['Dotcom'] = (pd.Timestamp('20000310'), pd.Timestamp('20000910')) 26 | 27 | # Lehmann Brothers 28 | PERIODS['Lehman'] = (pd.Timestamp('20080801'), pd.Timestamp('20081001')) 29 | 30 | # 9/11 31 | PERIODS['9/11'] = (pd.Timestamp('20010911'), pd.Timestamp('20011011')) 32 | 33 | # 05/08/11 US down grade and European Debt Crisis 2011 34 | PERIODS[ 35 | 'US downgrade/European Debt Crisis'] = (pd.Timestamp('20110805'), 36 | pd.Timestamp('20110905')) 37 | 38 | # 16/03/11 Fukushima melt down 2011 39 | PERIODS['Fukushima'] = (pd.Timestamp('20110316'), pd.Timestamp('20110416')) 40 | 41 | # 01/08/03 US Housing Bubble 2003 42 | PERIODS['US Housing'] = ( 43 | pd.Timestamp('20030108'), pd.Timestamp('20030208')) 44 | 45 | # 06/09/12 EZB IR Event 2012 46 | PERIODS['EZB IR Event'] = ( 47 | pd.Timestamp('20120910'), pd.Timestamp('20121010')) 48 | 49 | # August 2007, March and September of 2008, Q1 & Q2 2009, 50 | PERIODS['Aug07'] = (pd.Timestamp('20070801'), pd.Timestamp('20070901')) 51 | PERIODS['Mar08'] = (pd.Timestamp('20080301'), pd.Timestamp('20080401')) 52 | PERIODS['Sept08'] = (pd.Timestamp('20080901'), pd.Timestamp('20081001')) 53 | PERIODS['2009Q1'] = (pd.Timestamp('20090101'), pd.Timestamp('20090301')) 54 | PERIODS['2009Q2'] = (pd.Timestamp('20090301'), pd.Timestamp('20090601')) 55 | 56 | # Flash Crash (May 6, 2010 + 1 week post), 57 | PERIODS['Flash Crash'] = ( 58 | pd.Timestamp('20100505'), pd.Timestamp('20100510')) 59 | 60 | # April and October 2014). 61 | PERIODS['Apr14'] = (pd.Timestamp('20140401'), pd.Timestamp('20140501')) 62 | PERIODS['Oct14'] = (pd.Timestamp('20141001'), pd.Timestamp('20141101')) 63 | 64 | # Market down-turn in August/Sept 2015 65 | PERIODS['Fall2015'] = (pd.Timestamp('20150815'), pd.Timestamp('20150930')) 66 | 67 | # Market down-turn in late 2018 68 | PERIODS['2018 Bear Market'] = (pd.Timestamp('20180901'), pd.Timestamp('20190131')) 69 | 70 | # Covid-19 crash and recovery 71 | PERIODS['COVID-19'] = (pd.Timestamp('20200201'), pd.Timestamp('20200930')) 72 | 73 | # 2022 inflation and rate tightening 74 | PERIODS['2022 Inflation'] = (pd.Timestamp('20220103'), pd.Timestamp('20230428')) 75 | 76 | # Market regimes 77 | PERIODS['Low Volatility Bull Market'] = (pd.Timestamp('20050101'), 78 | pd.Timestamp('20070801')) 79 | 80 | PERIODS['GFC Crash'] = (pd.Timestamp('20070801'), 81 | pd.Timestamp('20090401')) 82 | 83 | PERIODS['GFC Recovery'] = (pd.Timestamp('20090401'), 84 | pd.Timestamp('20130101')) 85 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![pyfolio](https://media.quantopian.com/logos/open_source/pyfolio-logo-03.png "pyfolio") 2 | 3 | # pyfolio 4 | 5 | [![Join the chat at https://gitter.im/quantopian/pyfolio](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/quantopian/pyfolio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 6 | [![build status](https://travis-ci.org/quantopian/pyfolio.png?branch=master)](https://travis-ci.org/quantopian/pyfolio) 7 | 8 | pyfolio is a Python library for performance and risk analysis of 9 | financial portfolios developed by 10 | [Quantopian Inc](https://www.quantopian.com). It works well with the 11 | [Zipline](https://www.zipline.io/) open source backtesting library. 12 | Quantopian also offers a [fully managed service for professionals](https://factset.quantopian.com) 13 | that includes Zipline, Alphalens, Pyfolio, FactSet data, and more. 14 | 15 | At the core of pyfolio is a so-called tear sheet that consists of 16 | various individual plots that provide a comprehensive image of the 17 | performance of a trading algorithm. Here's an example of a simple tear 18 | sheet analyzing a strategy: 19 | 20 | ![simple tear 0](https://github.com/quantopian/pyfolio/raw/master/docs/simple_tear_0.png "Example tear sheet created from a Zipline algo") 21 | ![simple tear 1](https://github.com/quantopian/pyfolio/raw/master/docs/simple_tear_1.png "Example tear sheet created from a Zipline algo") 22 | 23 | Also see [slides of a talk about 24 | pyfolio](https://nbviewer.jupyter.org/format/slides/github/quantopian/pyfolio/blob/master/pyfolio/examples/pyfolio_talk_slides.ipynb#/). 25 | 26 | ## Installation 27 | 28 | To install pyfolio, run: 29 | 30 | ```bash 31 | pip install pyfolio 32 | ``` 33 | 34 | #### Development 35 | 36 | For development, you may want to use a [virtual environment](https://docs.python-guide.org/en/latest/dev/virtualenvs/) to avoid dependency conflicts between pyfolio and other Python projects you have. To get set up with a virtual env, run: 37 | ```bash 38 | mkvirtualenv pyfolio 39 | ``` 40 | 41 | Next, clone this git repository and run `python setup.py develop` 42 | and edit the library files directly. 43 | 44 | #### Matplotlib on OSX 45 | 46 | If you are on OSX and using a non-framework build of Python, you may need to set your backend: 47 | ``` bash 48 | echo "backend: TkAgg" > ~/.matplotlib/matplotlibrc 49 | ``` 50 | 51 | ## Usage 52 | 53 | A good way to get started is to run the pyfolio examples in 54 | a [Jupyter notebook](https://jupyter.org/). To do this, you first want to 55 | start a Jupyter notebook server: 56 | 57 | ```bash 58 | jupyter notebook 59 | ``` 60 | 61 | From the notebook list page, navigate to the pyfolio examples directory 62 | and open a notebook. Execute the code in a notebook cell by clicking on it 63 | and hitting Shift+Enter. 64 | 65 | 66 | ## Questions? 67 | 68 | If you find a bug, feel free to [open an issue](https://github.com/quantopian/pyfolio/issues) in this repository. 69 | 70 | You can also join our [mailing list](https://groups.google.com/forum/#!forum/pyfolio) or 71 | our [Gitter channel](https://gitter.im/quantopian/pyfolio). 72 | 73 | ## Support 74 | 75 | Please [open an issue](https://github.com/quantopian/pyfolio/issues/new) for support. 76 | 77 | ## Contributing 78 | 79 | If you'd like to contribute, a great place to look is the [issues marked with help-wanted](https://github.com/quantopian/pyfolio/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22). 80 | 81 | For a list of core developers and outside collaborators, see [the GitHub contributors list](https://github.com/quantopian/pyfolio/graphs/contributors). 82 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_txn.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from pandas import ( 4 | Series, 5 | DataFrame, 6 | date_range 7 | ) 8 | from pandas.testing import (assert_series_equal) 9 | 10 | from pyfolio.txn import (get_turnover, 11 | adjust_returns_for_slippage) 12 | 13 | 14 | class TransactionsTestCase(TestCase): 15 | 16 | def test_get_turnover(self): 17 | """ 18 | Tests turnover using a 20 day period. 19 | 20 | With no transactions, the turnover should be 0. 21 | 22 | with 200% of the AGB traded each day, the daily 23 | turnover rate should be 2.0. 24 | """ 25 | dates = date_range(start='2015-01-01', freq='D', periods=20, tz="UTC") 26 | 27 | # In this test, there is one sid (0) and a cash column 28 | positions = DataFrame([[10.0, 10.0]]*len(dates), 29 | columns=[0, 'cash'], index=dates) 30 | 31 | # Set every other non-cash position to 40 32 | positions[0][::2] = 40 33 | 34 | transactions = DataFrame(data=[], 35 | columns=['sid', 'amount', 'price', 'symbol'], 36 | index=dates) 37 | 38 | # Test with no transactions 39 | expected = Series([0.0]*len(dates), index=dates) 40 | result = get_turnover(positions, transactions) 41 | assert_series_equal(result, expected) 42 | 43 | transactions = DataFrame(data=[[1, 1, 10, 0]]*len(dates) + 44 | [[2, -1, 10, 0]]*len(dates), 45 | columns=['sid', 'amount', 'price', 'symbol'], 46 | index=dates.append(dates)).sort_index() 47 | 48 | # Turnover is more on day 1, because the day 0 AGB is set to zero 49 | # in get_turnover. On most days, we get 0.8 because we have 20 50 | # transacted and mean(10, 40) = 25, so 20/25. 51 | expected = Series([1.0] + [0.8] * (len(dates) - 1), index=dates) 52 | expected.index.freq = None 53 | result = get_turnover(positions, transactions) 54 | 55 | assert_series_equal(result, expected) 56 | 57 | # Test with denominator = 'portfolio_value' 58 | result = get_turnover(positions, transactions, 59 | denominator='portfolio_value') 60 | 61 | # Our portfolio value alternates between $20 and $50 so turnover 62 | # should alternate between 20/20 = 1.0 and 20/50 = 0.4. 63 | expected = Series([0.4, 1.0] * (int((len(dates) - 1) / 2) + 1), 64 | index=dates) 65 | 66 | assert_series_equal(result, expected) 67 | 68 | def test_adjust_returns_for_slippage(self): 69 | dates = date_range(start='2015-01-01', freq='D', periods=20, tz="UTC") 70 | 71 | positions = DataFrame([[0.0, 10.0]]*len(dates), 72 | columns=[0, 'cash'], index=dates) 73 | 74 | # 100% total, 50% average daily turnover 75 | transactions = DataFrame(data=[[1, 1, 10, 'A']]*len(dates), 76 | columns=['sid', 'amount', 'price', 'symbol'], 77 | index=dates) 78 | 79 | returns = Series([0.05]*len(dates), index=dates) 80 | # 0.001% slippage per dollar traded 81 | slippage_bps = 10 82 | expected = Series([0.049]*len(dates), index=dates) 83 | 84 | result = adjust_returns_for_slippage(returns, positions, 85 | transactions, slippage_bps) 86 | 87 | assert_series_equal(result, expected) 88 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_capacity.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from unittest import TestCase 3 | from parameterized import parameterized 4 | 5 | from pandas import ( 6 | Series, 7 | DataFrame, 8 | date_range, 9 | concat 10 | ) 11 | from datetime import datetime 12 | from pandas.testing import (assert_frame_equal, 13 | assert_series_equal) 14 | 15 | from pyfolio.capacity import (days_to_liquidate_positions, 16 | get_max_days_to_liquidate_by_ticker, 17 | get_low_liquidity_transactions, 18 | daily_txns_with_bar_data, 19 | apply_slippage_penalty) 20 | 21 | 22 | class CapacityTestCase(TestCase): 23 | dates = date_range(start='2015-01-01', freq='D', periods=3) 24 | dates.freq = None 25 | 26 | positions = DataFrame([[1.0, 3.0, 0.0], 27 | [0.0, 1.0, 1.0], 28 | [3.0, 0.0, 1.0]], 29 | columns=['A', 'B', 'cash'], index=dates) 30 | 31 | transactions = DataFrame(data=[[1, 100000, 10, 'A']] * len(dates), 32 | columns=['sid', 'amount', 'price', 'symbol'], 33 | index=dates) 34 | 35 | volume = DataFrame([[1.0, 3.0], 36 | [2.0, 2.0], 37 | [3.0, 1.0]], 38 | columns=['A', 'B'], index=dates) 39 | volume.index.name = 'dt' 40 | volume = volume * 1000000 41 | volume['market_data'] = 'volume' 42 | price = DataFrame([[1.0, 1.0]] * len(dates), 43 | columns=['A', 'B'], index=dates) 44 | price.index.name = 'dt' 45 | price['market_data'] = 'price' 46 | market_data = concat([volume, price]).reset_index().set_index( 47 | ['dt', 'market_data']) 48 | 49 | def test_days_to_liquidate_positions(self): 50 | dtlp = days_to_liquidate_positions(self.positions, 51 | self.market_data, 52 | max_bar_consumption=1, 53 | capital_base=1e6, 54 | mean_volume_window=1) 55 | 56 | expected = DataFrame([[0.0, .5/3], 57 | [0.75/2, 0.0]], 58 | columns=['A', 'B'], 59 | index=self.dates[1:]) 60 | assert_frame_equal(dtlp, expected) 61 | 62 | def test_get_max_days_to_liquidate_by_ticker(self): 63 | 64 | mdtl = get_max_days_to_liquidate_by_ticker(self.positions, 65 | self.market_data, 66 | max_bar_consumption=1, 67 | capital_base=1e6, 68 | mean_volume_window=1) 69 | 70 | expected = DataFrame([[datetime(2015, 1, 3), .75/2, 75.], 71 | [datetime(2015, 1, 2), .5/3, 50.]], 72 | columns=[ 73 | 'date', 'days_to_liquidate', 'pos_alloc_pct'], 74 | index=['A', 'B']) 75 | expected.index.name = 'symbol' 76 | 77 | assert_frame_equal(mdtl, expected) 78 | 79 | @parameterized.expand([(DataFrame([[datetime(2015, 1, 1), 100.], 80 | [datetime(2015, 1, 2), 100]], 81 | columns=['date', 'max_pct_bar_consumed'], 82 | index=['A', 'B']), None), 83 | (DataFrame([[datetime(2015, 1, 3), (1/3)*100.]], 84 | columns=['date', 'max_pct_bar_consumed'], 85 | index=['A']), 1)]) 86 | def test_get_low_liquidity_transactions(self, expected, last_n_days): 87 | txn_daily = DataFrame(data=[[1, 1000000, 1, 'A'], 88 | [2, 2000000, 1, 'B'], 89 | [1, 1000000, 1, 'A']], 90 | columns=['sid', 'amount', 'price', 'symbol'], 91 | index=self.dates) 92 | 93 | llt = get_low_liquidity_transactions(txn_daily, self.market_data, 94 | last_n_days=last_n_days) 95 | expected.index.name = 'symbol' 96 | assert_frame_equal(llt, expected) 97 | 98 | def test_daily_txns_with_bar_data(self): 99 | daily_txn = daily_txns_with_bar_data( 100 | self.transactions, self.market_data) 101 | expected = DataFrame(data=[['A', 100000, 1.0, 1000000.], 102 | ['A', 100000, 1.0, 2000000.], 103 | ['A', 100000, 1.0, 3000000.]], 104 | columns=['symbol', 'amount', 'price', 'volume'], 105 | index=self.dates) 106 | 107 | assert_frame_equal(daily_txn, expected) 108 | 109 | @parameterized.expand([(1000000, 1, [0.9995, 0.9999375, 0.99998611]), 110 | (10000000, 1, [0.95, 0.99375, 0.998611]), 111 | (100000, 1, [0.999995, 0.999999375, 0.9999998611]), 112 | (1000000, .1, [0.99995, 0.99999375, 0.999998611])]) 113 | def test_apply_slippage_penalty(self, starting_base, impact, 114 | expected_adj_returns): 115 | returns = Series([1., 1., 1.], index=self.dates) 116 | daily_txn = daily_txns_with_bar_data( 117 | self.transactions, self.market_data) 118 | 119 | adj_returns = apply_slippage_penalty( 120 | returns, daily_txn, starting_base, 1000000, impact=impact) 121 | expected_adj_returns = Series(expected_adj_returns, index=self.dates) 122 | 123 | assert_series_equal(adj_returns, expected_adj_returns) 124 | -------------------------------------------------------------------------------- /pyfolio/txn.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from __future__ import division 16 | 17 | import pandas as pd 18 | 19 | 20 | def map_transaction(txn): 21 | """ 22 | Maps a single transaction row to a dictionary. 23 | 24 | Parameters 25 | ---------- 26 | txn : pd.DataFrame 27 | A single transaction object to convert to a dictionary. 28 | 29 | Returns 30 | ------- 31 | dict 32 | Mapped transaction. 33 | """ 34 | 35 | if isinstance(txn['sid'], dict): 36 | sid = txn['sid']['real_sid'] 37 | symbol = txn['sid']['symbol'] 38 | else: 39 | sid = txn['sid'] 40 | symbol = txn['sid'] 41 | 42 | return {'sid': sid, 43 | 'symbol': symbol, 44 | 'price': txn['price'], 45 | 'order_id': txn['order_id'], 46 | 'amount': txn['amount'], 47 | 'resulting_amount': txn['resulting_amount'], 48 | 'commission': txn['commission'], 49 | 'dt': txn['dt']} 50 | 51 | 52 | def make_transaction_frame(transactions): 53 | """ 54 | Formats a transaction DataFrame. 55 | 56 | Parameters 57 | ---------- 58 | transactions : pd.DataFrame 59 | Contains improperly formatted transactional data. 60 | 61 | Returns 62 | ------- 63 | df : pd.DataFrame 64 | Daily transaction volume and dollar ammount. 65 | - See full explanation in tears.create_full_tear_sheet. 66 | """ 67 | 68 | transaction_list = [] 69 | for dt in transactions.index: 70 | txns = transactions.loc[dt] 71 | if len(txns) == 0: 72 | continue 73 | 74 | for txn in txns: 75 | txn = map_transaction(txn) 76 | transaction_list.append(txn) 77 | df = pd.DataFrame(sorted(transaction_list, key=lambda x: x['dt'])) 78 | df['txn_dollars'] = -df['amount'] * df['price'] 79 | 80 | df.index = list(map(pd.Timestamp, df.dt.values)) 81 | return df 82 | 83 | 84 | def get_txn_vol(transactions): 85 | """ 86 | Extract daily transaction data from set of transaction objects. 87 | 88 | Parameters 89 | ---------- 90 | transactions : pd.DataFrame 91 | Time series containing one row per symbol (and potentially 92 | duplicate datetime indices) and columns for amount and 93 | price. 94 | 95 | Returns 96 | ------- 97 | pd.DataFrame 98 | Daily transaction volume and number of shares. 99 | - See full explanation in tears.create_full_tear_sheet. 100 | """ 101 | 102 | txn_norm = transactions.copy() 103 | txn_norm.index = txn_norm.index.normalize() 104 | amounts = txn_norm.amount.abs() 105 | prices = txn_norm.price 106 | values = amounts * prices 107 | daily_amounts = amounts.groupby(amounts.index).sum() 108 | daily_values = values.groupby(values.index).sum() 109 | daily_amounts.name = "txn_shares" 110 | daily_values.name = "txn_volume" 111 | return pd.concat([daily_values, daily_amounts], axis=1) 112 | 113 | 114 | def adjust_returns_for_slippage(returns, positions, transactions, 115 | slippage_bps): 116 | """ 117 | Apply a slippage penalty for every dollar traded. 118 | 119 | Parameters 120 | ---------- 121 | returns : pd.Series 122 | Daily returns of the strategy, noncumulative. 123 | - See full explanation in create_full_tear_sheet. 124 | positions : pd.DataFrame 125 | Daily net position values. 126 | - See full explanation in create_full_tear_sheet. 127 | transactions : pd.DataFrame 128 | Prices and amounts of executed trades. One row per trade. 129 | - See full explanation in create_full_tear_sheet. 130 | slippage_bps: int/float 131 | Basis points of slippage to apply. 132 | 133 | Returns 134 | ------- 135 | pd.Series 136 | Time series of daily returns, adjusted for slippage. 137 | """ 138 | 139 | slippage = 0.0001 * slippage_bps 140 | portfolio_value = positions.sum(axis=1) 141 | pnl = portfolio_value * returns 142 | traded_value = get_txn_vol(transactions).txn_volume 143 | slippage_dollars = traded_value * slippage 144 | adjusted_pnl = pnl.add(-slippage_dollars, fill_value=0) 145 | adjusted_returns = returns * adjusted_pnl / pnl 146 | 147 | return adjusted_returns 148 | 149 | 150 | def get_turnover(positions, transactions, denominator='AGB'): 151 | """ 152 | - Value of purchases and sales divided 153 | by either the actual gross book or the portfolio value 154 | for the time step. 155 | 156 | Parameters 157 | ---------- 158 | positions : pd.DataFrame 159 | Contains daily position values including cash. 160 | - See full explanation in tears.create_full_tear_sheet 161 | transactions : pd.DataFrame 162 | Prices and amounts of executed trades. One row per trade. 163 | - See full explanation in tears.create_full_tear_sheet 164 | denominator : str, optional 165 | Either 'AGB' or 'portfolio_value', default AGB. 166 | - AGB (Actual gross book) is the gross market 167 | value (GMV) of the specific algo being analyzed. 168 | Swapping out an entire portfolio of stocks for 169 | another will yield 200% turnover, not 100%, since 170 | transactions are being made for both sides. 171 | - We use average of the previous and the current end-of-period 172 | AGB to avoid singularities when trading only into or 173 | out of an entire book in one trading period. 174 | - portfolio_value is the total value of the algo's 175 | positions end-of-period, including cash. 176 | 177 | Returns 178 | ------- 179 | turnover_rate : pd.Series 180 | timeseries of portfolio turnover rates. 181 | """ 182 | 183 | txn_vol = get_txn_vol(transactions) 184 | traded_value = txn_vol.txn_volume 185 | 186 | if denominator == 'AGB': 187 | # Actual gross book is the same thing as the algo's GMV 188 | # We want our denom to be avg(AGB previous, AGB current) 189 | AGB = positions.drop('cash', axis=1).abs().sum(axis=1) 190 | denom = AGB.rolling(2).mean() 191 | 192 | # Since the first value of pd.rolling returns NaN, we 193 | # set our "day 0" AGB to 0. 194 | denom.iloc[0] = AGB.iloc[0] / 2 195 | elif denominator == 'portfolio_value': 196 | denom = positions.sum(axis=1) 197 | else: 198 | raise ValueError( 199 | "Unexpected value for denominator '{}'. The " 200 | "denominator parameter must be either 'AGB'" 201 | " or 'portfolio_value'.".format(denominator) 202 | ) 203 | 204 | denom.index = denom.index.normalize() 205 | turnover = traded_value.div(denom, axis='index') 206 | turnover = turnover.fillna(0) 207 | return turnover 208 | -------------------------------------------------------------------------------- /pyfolio/quantrocket_zipline.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 QuantRocket LLC - All Rights Reserved 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from typing import Union, TextIO 16 | import pandas as pd 17 | from .tears import create_full_tear_sheet 18 | from .quantrocket_utils import pad_initial 19 | from quantrocket.zipline import ZiplineBacktestResult 20 | 21 | __all__ = [ 22 | "from_zipline_csv", 23 | ] 24 | 25 | def from_zipline_csv( 26 | filepath_or_buffer: Union[str, TextIO], 27 | slippage: float = None, 28 | live_start_date: pd.Timestamp = None, 29 | sector_mappings: Union[dict[str, str], 'pd.Series[str]'] = None, 30 | round_trips: bool = False, 31 | estimate_intraday: Union[bool, str] = 'infer', 32 | hide_positions: bool = False, 33 | cone_std: Union[float, tuple[float, float, float]] = (1.0, 1.5, 2.0), 34 | bootstrap: bool = False, 35 | unadjusted_returns: 'pd.Series[float]' = None, 36 | turnover_denom: str = 'AGB', 37 | set_context: bool = True, 38 | header_rows: dict[str, str] = None, 39 | start_date: Union[str, pd.Timestamp] = None, 40 | end_date: Union[str, pd.Timestamp] = None 41 | ) -> None: 42 | """ 43 | Create a full tear sheet from a zipline backtest results CSV. 44 | 45 | Parameters 46 | ---------- 47 | filepath_or_buffer : str or file-like object 48 | filepath or file-like object of the CSV 49 | 50 | slippage : int/float, optional 51 | Basis points of slippage to apply to returns before generating 52 | tearsheet stats and plots. 53 | If a value is provided, slippage parameter sweep 54 | plots will be generated from the unadjusted returns. 55 | Transactions and positions must also be passed. 56 | 57 | - See txn.adjust_returns_for_slippage for more details. 58 | 59 | live_start_date : datetime, optional 60 | The point in time when the strategy began live trading, 61 | after its backtest period. This datetime should be normalized. 62 | 63 | hide_positions : bool, optional 64 | If True, will not output any symbol names. 65 | 66 | round_trips: boolean, optional 67 | If True, causes the generation of a round trip tear sheet. 68 | 69 | sector_mappings : dict or pd.Series, optional 70 | Security identifier to sector mapping. 71 | Security ids as keys, sectors as values. 72 | 73 | estimate_intraday: boolean or str, optional 74 | Instead of using the end-of-day positions, use the point in the day 75 | where we have the most $ invested. This will adjust positions to 76 | better approximate and represent how an intraday strategy behaves. 77 | By default, this is 'infer', and an attempt will be made to detect 78 | an intraday strategy. Specifying this value will prevent detection. 79 | 80 | cone_std : float, or tuple, optional 81 | If float, The standard deviation to use for the cone plots. 82 | If tuple, Tuple of standard deviation values to use for the cone plots 83 | 84 | - The cone is a normal distribution with this standard deviation 85 | centered around a linear regression. 86 | 87 | bootstrap : boolean (optional) 88 | Whether to perform bootstrap analysis for the performance 89 | metrics. Takes a few minutes longer. 90 | 91 | turnover_denom : str 92 | Either AGB or portfolio_value, default AGB. 93 | 94 | - See full explanation in txn.get_turnover. 95 | 96 | header_rows : dict or OrderedDict, optional 97 | Extra rows to display at the top of the perf stats table. 98 | 99 | set_context : boolean, optional 100 | If True, set default plotting style context. 101 | 102 | - See plotting.context(). 103 | 104 | start_date : str or datetime, optional 105 | Truncate at this start date (otherwise include entire date range) 106 | 107 | end_date : str or datetime, optional 108 | Truncate at this end date (otherwise include entire date range) 109 | 110 | Returns 111 | ------- 112 | None 113 | 114 | Notes 115 | ----- 116 | Usage Guide: 117 | 118 | * Zipline backtesting: https://qrok.it/dl/pf/zipline-backtest 119 | """ 120 | results = ZiplineBacktestResult.from_csv(filepath_or_buffer) 121 | 122 | returns = results.returns 123 | returns.name = "returns" 124 | returns = pad_initial(returns) 125 | 126 | positions = results.positions 127 | transactions = results.transactions 128 | 129 | benchmark_rets = results.benchmark_returns 130 | if benchmark_rets is not None: 131 | benchmark_rets.name = "benchmark" 132 | benchmark_rets = pad_initial(benchmark_rets) 133 | 134 | commissions = None 135 | fees = None 136 | pnl = None 137 | if "commissions" in results.perf.columns and results.perf.commissions.sum() > 0: 138 | commissions = results.perf.commissions 139 | if "fees" in results.perf.columns and results.perf.fees.sum() > 0: 140 | fees = results.perf.fees 141 | if fees is not None or commissions is not None: 142 | pnl = results.perf.pnl 143 | 144 | if start_date: 145 | returns = returns.loc[start_date:] 146 | positions = positions.loc[start_date:] 147 | transactions = transactions.loc[start_date:] 148 | if benchmark_rets is not None: 149 | benchmark_rets = benchmark_rets.loc[start_date:] 150 | if commissions is not None: 151 | commissions = commissions.loc[start_date:] 152 | if fees is not None: 153 | fees = fees.loc[start_date:] 154 | if pnl is not None: 155 | pnl = pnl.loc[start_date:] 156 | 157 | if end_date: 158 | returns = returns.loc[:end_date] 159 | positions = positions.loc[:end_date] 160 | transactions = transactions.loc[:end_date] 161 | if benchmark_rets is not None: 162 | benchmark_rets = benchmark_rets.loc[:end_date] 163 | if commissions is not None: 164 | commissions = commissions.loc[:end_date] 165 | if fees is not None: 166 | fees = fees.loc[:end_date] 167 | if pnl is not None: 168 | pnl = pnl.loc[:end_date] 169 | 170 | return create_full_tear_sheet( 171 | returns, 172 | positions=positions, 173 | transactions=transactions, 174 | benchmark_rets=benchmark_rets, 175 | slippage=slippage, 176 | live_start_date=live_start_date, 177 | sector_mappings=sector_mappings, 178 | round_trips=round_trips, 179 | estimate_intraday=estimate_intraday, 180 | hide_positions=hide_positions, 181 | cone_std=cone_std, 182 | bootstrap=bootstrap, 183 | unadjusted_returns=unadjusted_returns, 184 | turnover_denom=turnover_denom, 185 | set_context=set_context, 186 | header_rows=header_rows, 187 | pnl=pnl, 188 | commissions=commissions, 189 | fees=fees 190 | ) 191 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_pos.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from parameterized import parameterized 3 | from collections import OrderedDict 4 | import os 5 | import gzip 6 | 7 | from pandas import ( 8 | Series, 9 | DataFrame, 10 | date_range, 11 | Timestamp, 12 | read_csv 13 | ) 14 | from pandas.testing import assert_frame_equal 15 | 16 | from numpy import ( 17 | arange, 18 | zeros_like, 19 | nan, 20 | ) 21 | 22 | import warnings 23 | 24 | from pyfolio.utils import (to_utc, to_series, check_intraday, 25 | detect_intraday, estimate_intraday) 26 | from pyfolio.pos import (get_percent_alloc, 27 | extract_pos, 28 | get_sector_exposures, 29 | get_max_median_position_concentration) 30 | 31 | 32 | class PositionsTestCase(TestCase): 33 | dates = date_range(start='2015-01-01', freq='D', periods=20) 34 | 35 | def test_get_percent_alloc(self): 36 | raw_data = arange(15, dtype=float).reshape(5, 3) 37 | # Make the first column negative to test absolute magnitudes. 38 | raw_data[:, 0] *= -1 39 | 40 | frame = DataFrame( 41 | raw_data, 42 | index=date_range('01-01-2015', freq='D', periods=5), 43 | columns=['A', 'B', 'C'] 44 | ) 45 | 46 | result = get_percent_alloc(frame) 47 | expected_raw = zeros_like(raw_data) 48 | for idx, row in enumerate(raw_data): 49 | expected_raw[idx] = row / row.sum() 50 | 51 | expected = DataFrame( 52 | expected_raw, 53 | index=frame.index, 54 | columns=frame.columns, 55 | ) 56 | 57 | assert_frame_equal(result, expected) 58 | 59 | def test_extract_pos(self): 60 | index_dup = [Timestamp('2015-06-08', tz='UTC'), 61 | Timestamp('2015-06-08', tz='UTC'), 62 | Timestamp('2015-06-09', tz='UTC'), 63 | Timestamp('2015-06-09', tz='UTC')] 64 | index = [Timestamp('2015-06-08', tz='UTC'), 65 | Timestamp('2015-06-09', tz='UTC')] 66 | 67 | positions = DataFrame( 68 | {'amount': [100., 200., 300., 400.], 69 | 'last_sale_price': [10., 20., 30., 40.], 70 | 'sid': [1, 2, 1, 2]}, 71 | index=index_dup 72 | ) 73 | cash = Series([100., 200.], index=index) 74 | 75 | result = extract_pos(positions, cash) 76 | 77 | expected = DataFrame(OrderedDict([ 78 | (1, [100.*10., 300.*30.]), 79 | (2, [200.*20., 400.*40.]), 80 | ('cash', [100., 200.])]), 81 | index=index 82 | ) 83 | expected.index.name = 'index' 84 | expected.columns.name = 'sid' 85 | 86 | assert_frame_equal(result, expected) 87 | 88 | @parameterized.expand([ 89 | (DataFrame([[1.0, 2.0, 3.0, 10.0]]*len(dates), 90 | columns=[0, 1, 2, 'cash'], index=dates), 91 | {0: 'A', 1: 'B', 2: 'A'}, 92 | DataFrame([[4.0, 2.0, 10.0]]*len(dates), 93 | columns=['A', 'B', 'cash'], index=dates), 94 | False), 95 | (DataFrame([[1.0, 2.0, 3.0, 10.0]]*len(dates), 96 | columns=[0, 1, 2, 'cash'], index=dates), 97 | Series(index=[0, 1, 2], data=['A', 'B', 'A']), 98 | DataFrame([[4.0, 2.0, 10.0]]*len(dates), 99 | columns=['A', 'B', 'cash'], index=dates), 100 | False), 101 | (DataFrame([[1.0, 2.0, 3.0, 10.0]]*len(dates), 102 | columns=[0, 1, 2, 'cash'], index=dates), 103 | {0: 'A', 1: 'B'}, 104 | DataFrame([[1.0, 2.0, 10.0]]*len(dates), 105 | columns=['A', 'B', 'cash'], index=dates), 106 | True) 107 | ]) 108 | def test_sector_exposure(self, positions, mapping, 109 | expected_sector_exposure, 110 | warning_expected): 111 | """ 112 | Tests sector exposure mapping and rollup. 113 | 114 | """ 115 | with warnings.catch_warnings(record=True) as w: 116 | result_sector_exposure = get_sector_exposures(positions, 117 | mapping) 118 | 119 | assert_frame_equal(result_sector_exposure, 120 | expected_sector_exposure) 121 | if warning_expected: 122 | self.assertEqual(len(w), 1) 123 | else: 124 | self.assertEqual(len(w), 0) 125 | 126 | @parameterized.expand([ 127 | (DataFrame([[1.0, 2.0, 3.0, 14.0]]*len(dates), 128 | columns=[0, 1, 2, 'cash'], index=dates), 129 | DataFrame([[0.15, 0.1, nan, nan]]*len(dates), 130 | columns=['max_long', 'median_long', 131 | 'median_short', 'max_short'], index=dates)), 132 | (DataFrame([[1.0, -2.0, -13.0, 15.0]]*len(dates), 133 | columns=[0, 1, 2, 'cash'], index=dates), 134 | DataFrame([[1.0, 1.0, -7.5, -13.0]]*len(dates), 135 | columns=['max_long', 'median_long', 136 | 'median_short', 'max_short'], index=dates)), 137 | (DataFrame([[nan, 2.0, nan, 8.0]]*len(dates), 138 | columns=[0, 1, 2, 'cash'], index=dates), 139 | DataFrame([[0.2, 0.2, nan, nan]]*len(dates), 140 | columns=['max_long', 'median_long', 141 | 'median_short', 'max_short'], index=dates)) 142 | ]) 143 | def test_max_median_exposure(self, positions, expected): 144 | alloc_summary = get_max_median_position_concentration(positions) 145 | assert_frame_equal(expected, alloc_summary) 146 | 147 | __location__ = os.path.realpath( 148 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 149 | 150 | test_returns = read_csv( 151 | gzip.open( 152 | __location__ + '/test_data/test_returns.csv.gz'), 153 | index_col=0, parse_dates=True) 154 | test_returns = to_series(to_utc(test_returns)) 155 | test_txn = to_utc(read_csv( 156 | gzip.open( 157 | __location__ + '/test_data/test_txn.csv.gz'), 158 | index_col=0, parse_dates=True)) 159 | test_pos = to_utc(read_csv( 160 | gzip.open(__location__ + '/test_data/test_pos.csv.gz'), 161 | index_col=0, parse_dates=True)) 162 | 163 | @parameterized.expand([ 164 | (test_pos, test_txn, False), 165 | (test_pos.resample('1W').last(), test_txn, True) 166 | ]) 167 | def test_detect_intraday(self, positions, transactions, expected): 168 | detected = detect_intraday(positions, transactions, threshold=0.25) 169 | assert detected == expected 170 | 171 | @parameterized.expand([ 172 | ('infer', test_returns, test_pos, test_txn, test_pos), 173 | (False, test_returns, test_pos, test_txn, test_pos) 174 | ]) 175 | def test_check_intraday(self, estimate, returns, 176 | positions, transactions, expected): 177 | detected = check_intraday(estimate, returns, positions, transactions) 178 | assert_frame_equal(detected, expected) 179 | 180 | @parameterized.expand([ 181 | (test_returns, test_pos, test_txn, (1506, 8)), 182 | (test_returns, test_pos.resample('1W').last(), test_txn, (1819, 8)) 183 | ]) 184 | def test_estimate_intraday(self, returns, positions, 185 | transactions, expected): 186 | intraday_pos = estimate_intraday(returns, positions, transactions) 187 | assert intraday_pos.shape == expected 188 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_tears.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import warnings 3 | import inspect 4 | import functools 5 | from unittest import TestCase 6 | from parameterized import parameterized 7 | 8 | import os 9 | import gzip 10 | 11 | import matplotlib 12 | import matplotlib.pyplot as plt 13 | from pandas import read_csv 14 | 15 | from pyfolio.utils import (to_utc, to_series) 16 | from pyfolio.tears import (create_full_tear_sheet, 17 | create_simple_tear_sheet, 18 | create_returns_tear_sheet, 19 | create_position_tear_sheet, 20 | create_txn_tear_sheet, 21 | create_round_trip_tear_sheet, 22 | create_interesting_times_tear_sheet,) 23 | 24 | # The following code is copied from matplotlib, having been deprecated in mpl 3.6: 25 | # https://github.com/matplotlib/matplotlib/blob/v3.7.2/lib/matplotlib/testing/decorators.py#L24C1-L89C28 26 | @contextlib.contextmanager 27 | def _cleanup_cm(): 28 | orig_units_registry = matplotlib.units.registry.copy() 29 | try: 30 | with warnings.catch_warnings(), matplotlib.rc_context(): 31 | yield 32 | finally: 33 | matplotlib.units.registry.clear() 34 | matplotlib.units.registry.update(orig_units_registry) 35 | plt.close("all") 36 | 37 | def cleanup(style=None): 38 | """ 39 | A decorator to ensure that any global state is reset before 40 | running a test. 41 | 42 | Parameters 43 | ---------- 44 | style : str, dict, or list, optional 45 | The style(s) to apply. Defaults to ``["classic", 46 | "_classic_test_patch"]``. 47 | """ 48 | 49 | # If cleanup is used without arguments, *style* will be a callable, and we 50 | # pass it directly to the wrapper generator. If cleanup if called with an 51 | # argument, it is a string naming a style, and the function will be passed 52 | # as an argument to what we return. This is a confusing, but somewhat 53 | # standard, pattern for writing a decorator with optional arguments. 54 | 55 | def make_cleanup(func): 56 | if inspect.isgeneratorfunction(func): 57 | @functools.wraps(func) 58 | def wrapped_callable(*args, **kwargs): 59 | with _cleanup_cm(), matplotlib.style.context(style): 60 | yield from func(*args, **kwargs) 61 | else: 62 | @functools.wraps(func) 63 | def wrapped_callable(*args, **kwargs): 64 | with _cleanup_cm(), matplotlib.style.context(style): 65 | func(*args, **kwargs) 66 | 67 | return wrapped_callable 68 | 69 | if callable(style): 70 | result = make_cleanup(style) 71 | # Default of mpl_test_settings fixture and image_comparison too. 72 | style = ["classic", "_classic_test_patch"] 73 | return result 74 | else: 75 | return make_cleanup 76 | 77 | class PositionsTestCase(TestCase): 78 | __location__ = os.path.realpath( 79 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 80 | 81 | test_returns = read_csv( 82 | gzip.open( 83 | __location__ + '/test_data/test_returns.csv.gz'), 84 | index_col=0, parse_dates=True) 85 | test_returns = to_series(to_utc(test_returns)) 86 | test_txn = to_utc(read_csv( 87 | gzip.open( 88 | __location__ + '/test_data/test_txn.csv.gz'), 89 | index_col=0, parse_dates=True)) 90 | test_pos = to_utc(read_csv( 91 | gzip.open(__location__ + '/test_data/test_pos.csv.gz'), 92 | index_col=0, parse_dates=True)) 93 | 94 | @parameterized.expand([({},), 95 | ({'slippage': 1},), 96 | ({'live_start_date': test_returns.index[-20]},), 97 | ({'round_trips': True},), 98 | ({'hide_positions': True},), 99 | ({'cone_std': 1},), 100 | ({'bootstrap': True},), 101 | ]) 102 | @cleanup 103 | def test_create_full_tear_sheet_breakdown(self, kwargs): 104 | create_full_tear_sheet(self.test_returns, 105 | positions=self.test_pos, 106 | transactions=self.test_txn, 107 | benchmark_rets=self.test_returns, 108 | **kwargs 109 | ) 110 | 111 | @parameterized.expand([({},), 112 | ({'slippage': 1},), 113 | ({'live_start_date': test_returns.index[-20]},), 114 | ]) 115 | @cleanup 116 | def test_create_simple_tear_sheet_breakdown(self, kwargs): 117 | create_simple_tear_sheet(self.test_returns, 118 | positions=self.test_pos, 119 | transactions=self.test_txn, 120 | **kwargs 121 | ) 122 | 123 | @parameterized.expand([({},), 124 | ({'live_start_date': 125 | test_returns.index[-20]},), 126 | ({'cone_std': 1},), 127 | ({'bootstrap': True},), 128 | ]) 129 | @cleanup 130 | def test_create_returns_tear_sheet_breakdown(self, kwargs): 131 | create_returns_tear_sheet(self.test_returns, 132 | benchmark_rets=self.test_returns, 133 | **kwargs 134 | ) 135 | 136 | @parameterized.expand([({},), 137 | ({'hide_positions': True},), 138 | ({'show_and_plot_top_pos': 0},), 139 | ({'show_and_plot_top_pos': 1},), 140 | ]) 141 | @cleanup 142 | def test_create_position_tear_sheet_breakdown(self, kwargs): 143 | create_position_tear_sheet(self.test_returns, 144 | self.test_pos, 145 | **kwargs 146 | ) 147 | 148 | @parameterized.expand([({},), 149 | ({'unadjusted_returns': test_returns},), 150 | ]) 151 | @cleanup 152 | def test_create_txn_tear_sheet_breakdown(self, kwargs): 153 | create_txn_tear_sheet(self.test_returns, 154 | self.test_pos, 155 | self.test_txn, 156 | **kwargs 157 | ) 158 | 159 | @parameterized.expand([({},), 160 | ({'sector_mappings': {}},), 161 | ]) 162 | @cleanup 163 | def test_create_round_trip_tear_sheet_breakdown(self, kwargs): 164 | create_round_trip_tear_sheet(self.test_returns, 165 | self.test_pos, 166 | self.test_txn, 167 | **kwargs 168 | ) 169 | 170 | @parameterized.expand([({},), 171 | ({'legend_loc': 1},), 172 | ]) 173 | @cleanup 174 | def test_create_interesting_times_tear_sheet_breakdown(self, 175 | kwargs): 176 | create_interesting_times_tear_sheet(self.test_returns, 177 | self.test_returns, 178 | **kwargs 179 | ) 180 | -------------------------------------------------------------------------------- /pyfolio/pos.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from __future__ import division 16 | 17 | import pandas as pd 18 | import numpy as np 19 | import warnings 20 | 21 | try: 22 | from zipline.assets import Equity, Future 23 | ZIPLINE = True 24 | except ImportError: 25 | ZIPLINE = False 26 | warnings.warn( 27 | 'Module "zipline.assets" not found; multipliers will not be applied' 28 | ' to position notionals.' 29 | ) 30 | 31 | 32 | def get_percent_alloc(values): 33 | """ 34 | Determines a portfolio's allocations. 35 | 36 | Parameters 37 | ---------- 38 | values : pd.DataFrame 39 | Contains position values or amounts. 40 | 41 | Returns 42 | ------- 43 | allocations : pd.DataFrame 44 | Positions and their allocations. 45 | """ 46 | 47 | return values.divide( 48 | values.sum(axis='columns'), 49 | axis='rows' 50 | ) 51 | 52 | 53 | def get_top_long_short_abs(positions, top=10): 54 | """ 55 | Finds the top long, short, and absolute positions. 56 | 57 | Parameters 58 | ---------- 59 | positions : pd.DataFrame 60 | The positions that the strategy takes over time. 61 | top : int, optional 62 | How many of each to find (default 10). 63 | 64 | Returns 65 | ------- 66 | df_top_long : pd.DataFrame 67 | Top long positions. 68 | df_top_short : pd.DataFrame 69 | Top short positions. 70 | df_top_abs : pd.DataFrame 71 | Top absolute positions. 72 | """ 73 | 74 | positions = positions.drop('cash', axis='columns') 75 | df_max = positions.max() 76 | df_min = positions.min() 77 | df_abs_max = positions.abs().max() 78 | df_top_long = df_max[df_max > 0].nlargest(top) 79 | df_top_short = df_min[df_min < 0].nsmallest(top) 80 | df_top_abs = df_abs_max.nlargest(top) 81 | return df_top_long, df_top_short, df_top_abs 82 | 83 | 84 | def get_max_median_position_concentration(positions): 85 | """ 86 | Finds the max and median long and short position concentrations 87 | in each time period specified by the index of positions. 88 | 89 | Parameters 90 | ---------- 91 | positions : pd.DataFrame 92 | The positions that the strategy takes over time. 93 | 94 | Returns 95 | ------- 96 | pd.DataFrame 97 | Columns are max long, max short, median long, and median short 98 | position concentrations. Rows are timeperiods. 99 | """ 100 | 101 | expos = get_percent_alloc(positions) 102 | expos = expos.drop('cash', axis=1) 103 | 104 | longs = expos.where(expos.map(lambda x: x > 0)) 105 | shorts = expos.where(expos.map(lambda x: x < 0)) 106 | 107 | alloc_summary = pd.DataFrame() 108 | alloc_summary['max_long'] = longs.max(axis=1) 109 | alloc_summary['median_long'] = longs.median(axis=1) 110 | alloc_summary['median_short'] = shorts.median(axis=1) 111 | alloc_summary['max_short'] = shorts.min(axis=1) 112 | 113 | return alloc_summary 114 | 115 | 116 | def extract_pos(positions, cash): 117 | """ 118 | Extract position values from backtest object as returned by 119 | get_backtest() on the Quantopian research platform. 120 | 121 | Parameters 122 | ---------- 123 | positions : pd.DataFrame 124 | timeseries containing one row per symbol (and potentially 125 | duplicate datetime indices) and columns for amount and 126 | last_sale_price. 127 | cash : pd.Series 128 | timeseries containing cash in the portfolio. 129 | 130 | Returns 131 | ------- 132 | pd.DataFrame 133 | Daily net position values. 134 | - See full explanation in tears.create_full_tear_sheet. 135 | """ 136 | 137 | positions = positions.copy() 138 | if positions.empty: 139 | positions = pd.DataFrame( 140 | np.nan, 141 | index=[], 142 | columns=[ 143 | 'sid', 'amount', 'cost_basis', 144 | 'last_sale_price', 'last_sale_date']) 145 | positions['values'] = positions.amount * positions.last_sale_price 146 | 147 | cash.name = 'cash' 148 | 149 | values = positions.reset_index().pivot_table(index='index', 150 | columns='sid', 151 | values='values') 152 | 153 | if ZIPLINE: 154 | for asset in values.columns: 155 | if type(asset) in [Equity, Future]: 156 | values[asset] = values[asset] * asset.price_multiplier 157 | 158 | values = values.join(cash).fillna(0) 159 | 160 | # NOTE: Set name of DataFrame.columns to sid, to match the behavior 161 | # of DataFrame.join in earlier versions of pandas. 162 | values.columns.name = 'sid' 163 | 164 | return values 165 | 166 | 167 | def get_sector_exposures(positions, symbol_sector_map): 168 | """ 169 | Sum position exposures by sector. 170 | 171 | Parameters 172 | ---------- 173 | positions : pd.DataFrame 174 | Contains position values or amounts. 175 | - Example 176 | index 'AAPL' 'MSFT' 'CHK' cash 177 | 2004-01-09 13939.380 -15012.993 -403.870 1477.483 178 | 2004-01-12 14492.630 -18624.870 142.630 3989.610 179 | 2004-01-13 -13853.280 13653.640 -100.980 100.000 180 | symbol_sector_map : dict or pd.Series 181 | Security identifier to sector mapping. 182 | Security ids as keys/index, sectors as values. 183 | - Example: 184 | {'AAPL' : 'Technology' 185 | 'MSFT' : 'Technology' 186 | 'CHK' : 'Natural Resources'} 187 | 188 | Returns 189 | ------- 190 | sector_exp : pd.DataFrame 191 | Sectors and their allocations. 192 | - Example: 193 | index 'Technology' 'Natural Resources' cash 194 | 2004-01-09 -1073.613 -403.870 1477.4830 195 | 2004-01-12 -4132.240 142.630 3989.6100 196 | 2004-01-13 -199.640 -100.980 100.0000 197 | """ 198 | 199 | cash = positions['cash'] 200 | positions = positions.drop('cash', axis=1) 201 | 202 | unmapped_pos = np.setdiff1d(positions.columns.values, 203 | list(symbol_sector_map.keys())) 204 | if len(unmapped_pos) > 0: 205 | warn_message = """Warning: Symbols {} have no sector mapping. 206 | They will not be included in sector allocations""".format( 207 | ", ".join(map(str, unmapped_pos))) 208 | warnings.warn(warn_message, UserWarning) 209 | 210 | sector_exp = positions.T.groupby( 211 | by=symbol_sector_map).sum().T 212 | 213 | sector_exp['cash'] = cash 214 | 215 | return sector_exp 216 | 217 | 218 | def get_long_short_pos(positions): 219 | """ 220 | Determines the long and short allocations in a portfolio. 221 | 222 | Parameters 223 | ---------- 224 | positions : pd.DataFrame 225 | The positions that the strategy takes over time. 226 | 227 | Returns 228 | ------- 229 | df_long_short : pd.DataFrame 230 | Long and short allocations as a decimal 231 | percentage of the total net liquidation 232 | """ 233 | 234 | pos_wo_cash = positions.drop('cash', axis=1) 235 | longs = pos_wo_cash[pos_wo_cash > 0].sum(axis=1).fillna(0) 236 | shorts = pos_wo_cash[pos_wo_cash < 0].sum(axis=1).fillna(0) 237 | cash = positions.cash 238 | net_liquidation = longs + shorts + cash 239 | df_pos = pd.DataFrame({'long': longs.divide(net_liquidation, axis='index'), 240 | 'short': shorts.divide(net_liquidation, 241 | axis='index')}) 242 | df_pos['net exposure'] = df_pos['long'] + df_pos['short'] 243 | return df_pos 244 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_round_trips.py: -------------------------------------------------------------------------------- 1 | from parameterized import parameterized 2 | 3 | from unittest import TestCase 4 | 5 | from pandas import ( 6 | Series, 7 | DataFrame, 8 | DatetimeIndex, 9 | date_range, 10 | Timedelta, 11 | read_csv 12 | ) 13 | from pandas.testing import (assert_frame_equal) 14 | 15 | import os 16 | import gzip 17 | 18 | from pyfolio.round_trips import (extract_round_trips, 19 | add_closing_transactions, 20 | _groupby_consecutive, 21 | ) 22 | 23 | 24 | class RoundTripTestCase(TestCase): 25 | dates = date_range(start='2015-01-01', freq='D', periods=20) 26 | dates_intraday = date_range(start='2015-01-01', 27 | freq='2BH', periods=8) 28 | 29 | @parameterized.expand([ 30 | (DataFrame(data=[[2, 10., 'A'], 31 | [2, 20., 'A'], 32 | [-2, 20., 'A'], 33 | [-2, 10., 'A'], 34 | ], 35 | columns=['amount', 'price', 'symbol'], 36 | index=dates_intraday[:4]), 37 | DataFrame(data=[[4, 15., 'A'], 38 | [-4, 15., 'A'], 39 | ], 40 | columns=['amount', 'price', 'symbol'], 41 | index=dates_intraday[[0, 2]]) 42 | .rename_axis('dt', axis='index') 43 | ), 44 | (DataFrame(data=[[2, 10., 'A'], 45 | [2, 20., 'A'], 46 | [2, 20., 'A'], 47 | [2, 10., 'A'], 48 | ], 49 | columns=['amount', 'price', 'symbol'], 50 | index=dates_intraday[[0, 1, 4, 5]]), 51 | DataFrame(data=[[4, 15., 'A'], 52 | [4, 15., 'A'], 53 | ], 54 | columns=['amount', 'price', 'symbol'], 55 | index=dates_intraday[[0, 4]]) 56 | .rename_axis('dt', axis='index') 57 | ), 58 | ]) 59 | def test_groupby_consecutive(self, transactions, expected): 60 | grouped_txn = _groupby_consecutive(transactions) 61 | assert_frame_equal(grouped_txn.sort_index(axis='columns'), 62 | expected.sort_index(axis='columns')) 63 | 64 | @parameterized.expand([ 65 | # Simple round-trip 66 | (DataFrame(data=[[2, 10., 'A'], 67 | [-2, 15., 'A']], 68 | columns=['amount', 'price', 'symbol'], 69 | index=dates[:2]), 70 | DataFrame(data=[[dates[0], dates[1], 71 | Timedelta(days=1), 10., .5, 72 | True, 'A']], 73 | columns=['open_dt', 'close_dt', 74 | 'duration', 'pnl', 'rt_returns', 75 | 'long', 'symbol'], 76 | index=[0]) 77 | ), 78 | # Round-trip with left-over txn that shouldn't be counted 79 | (DataFrame(data=[[2, 10., 'A'], 80 | [2, 15., 'A'], 81 | [-9, 10., 'A']], 82 | columns=['amount', 'price', 'symbol'], 83 | index=dates[:3]), 84 | DataFrame(data=[[dates[0], dates[2], 85 | Timedelta(days=2), -10., -.2, 86 | True, 'A']], 87 | columns=['open_dt', 'close_dt', 88 | 'duration', 'pnl', 'rt_returns', 89 | 'long', 'symbol'], 90 | index=[0]) 91 | ), 92 | # Round-trip with sell that crosses 0 and should be split 93 | (DataFrame(data=[[2, 10., 'A'], 94 | [-4, 15., 'A'], 95 | [3, 20., 'A']], 96 | columns=['amount', 'price', 'symbol'], 97 | index=dates[:3]), 98 | DataFrame(data=[[dates[0], dates[1], 99 | Timedelta(days=1), 10., .5, 100 | True, 'A'], 101 | [dates[1], dates[2], 102 | Timedelta(days=1), 103 | -10, (-1. / 3), 104 | False, 'A']], 105 | columns=['open_dt', 'close_dt', 106 | 'duration', 'pnl', 'rt_returns', 107 | 'long', 'symbol'], 108 | index=[0, 1]) 109 | ), 110 | # Round-trip that does not cross 0 111 | (DataFrame(data=[[4, 10., 'A'], 112 | [-2, 15., 'A'], 113 | [2, 20., 'A']], 114 | columns=['amount', 'price', 'symbol'], 115 | index=dates[:3]), 116 | DataFrame(data=[[dates[0], dates[1], 117 | Timedelta(days=1), 10., .5, 118 | True, 'A']], 119 | columns=['open_dt', 'close_dt', 120 | 'duration', 'pnl', 'rt_returns', 121 | 'long', 'symbol'], 122 | index=[0]) 123 | ), 124 | # Round-trip that does not cross 0 and has portfolio value 125 | (DataFrame(data=[[4, 10., 'A'], 126 | [-2, 15., 'A'], 127 | [2, 20., 'A']], 128 | columns=['amount', 'price', 'symbol'], 129 | index=dates[:3]), 130 | DataFrame(data=[[dates[0], dates[1], 131 | Timedelta(days=1), 10., .5, 132 | True, 'A', 0.1]], 133 | columns=['open_dt', 'close_dt', 134 | 'duration', 'pnl', 'rt_returns', 135 | 'long', 'symbol', 'returns'], 136 | index=[0]), 137 | Series([100., 100., 100.], index=dates[:3]), 138 | ), 139 | 140 | ]) 141 | def test_extract_round_trips(self, transactions, expected, 142 | portfolio_value=None): 143 | round_trips = extract_round_trips(transactions, 144 | portfolio_value=portfolio_value) 145 | 146 | assert_frame_equal(round_trips.sort_index(axis='columns'), 147 | expected.sort_index(axis='columns')) 148 | 149 | def test_add_closing_trades(self): 150 | dates = date_range(start='2015-01-01', periods=20) 151 | transactions = DataFrame(data=[[2, 10, 'A'], 152 | [-5, 10, 'A'], 153 | [-1, 10, 'B']], 154 | columns=['amount', 'price', 'symbol'], 155 | index=dates[:3]) 156 | positions = DataFrame(data=[[20, 10, 0], 157 | [-30, 10, 30], 158 | [-60, 0, 30]], 159 | columns=['A', 'B', 'cash'], 160 | index=dates[:3]) 161 | 162 | expected_ix = dates[:3].append(DatetimeIndex([dates[2] + 163 | Timedelta(seconds=1)])) 164 | expected = DataFrame(data=[['A', 2, 10], 165 | ['A', -5, 10], 166 | ['B', -1, 10.], 167 | ['A', 3, 20.]], 168 | columns=['symbol', 'amount', 'price'], 169 | index=expected_ix) 170 | 171 | transactions_closed = add_closing_transactions(positions, transactions) 172 | assert_frame_equal(transactions_closed, expected) 173 | 174 | def test_txn_pnl_matches_round_trip_pnl(self): 175 | __location__ = os.path.realpath( 176 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 177 | 178 | test_txn = read_csv(gzip.open( 179 | __location__ + '/test_data/test_txn.csv.gz'), 180 | index_col=0, parse_dates=True) 181 | test_pos = read_csv(gzip.open( 182 | __location__ + '/test_data/test_pos.csv.gz'), 183 | index_col=0, parse_dates=True) 184 | 185 | transactions_closed = add_closing_transactions(test_pos, test_txn) 186 | transactions_closed['txn_dollars'] = transactions_closed.amount * \ 187 | -1. * transactions_closed.price 188 | round_trips = extract_round_trips(transactions_closed) 189 | 190 | self.assertAlmostEqual(round_trips.pnl.sum(), 191 | transactions_closed.txn_dollars.sum()) 192 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_quantrocket_moonshot.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 QuantRocket LLC - All Rights Reserved 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # To run: python3 -m unittest discover -s _tests/ -p test_quantrocket*.py -t . -v 16 | 17 | import matplotlib as mpl 18 | mpl.use("Agg") 19 | import unittest 20 | from unittest.mock import patch 21 | import io 22 | import pandas as pd 23 | import pyfolio 24 | import numpy as np 25 | 26 | MOONSHOT_RESULTS = { 27 | 'Field': [ 28 | 'Benchmark', 29 | 'Benchmark', 30 | 'Benchmark', 31 | 'NetExposure', 32 | 'NetExposure', 33 | 'NetExposure', 34 | 'Return', 35 | 'Return', 36 | 'Return'], 37 | 'Date': [ 38 | '2018-05-07', 39 | '2018-05-08', 40 | '2018-05-09', 41 | '2018-05-07', 42 | '2018-05-08', 43 | '2018-05-09', 44 | '2018-05-07', 45 | '2018-05-08', 46 | '2018-05-09'], 47 | 'AAPL(265598)': [ 48 | 0.0, 49 | 0.0048067, 50 | 0.0063961, 51 | 0.25, 52 | 0.2, 53 | -0.5, 54 | 0.0018087363324810761, 55 | 0.0012016634262259631, 56 | 0.0015990325181403089], 57 | 'AMZN(3691937)': [ 58 | 0.0, 59 | 0.0, 60 | 0.0, 61 | 0.25, 62 | 0.3, 63 | 0.5, 64 | 0.0030345678231443185, 65 | -0.0012108315522391664, 66 | 0.0022937220153353977] 67 | } 68 | 69 | MOONSHOT_INTRADAY_RESULTS = { 70 | 'Field': [ 71 | 'NetExposure', 72 | 'NetExposure', 73 | 'NetExposure', 74 | 'NetExposure', 75 | 'Return', 76 | 'Return', 77 | 'Return', 78 | 'Return'], 79 | 'Date': [ 80 | '2018-05-07', 81 | '2018-05-07', 82 | '2018-05-08', 83 | '2018-05-08', 84 | '2018-05-07', 85 | '2018-05-07', 86 | '2018-05-08', 87 | '2018-05-08'], 88 | 'Time': [ 89 | '10:00:00', 90 | '11:00:00', 91 | '10:00:00', 92 | '11:00:00', 93 | '10:00:00', 94 | '11:00:00', 95 | '10:00:00', 96 | '11:00:00'], 97 | 'AAPL(265598)': [ 98 | 0.25, 99 | 0.2, 100 | -0.5, 101 | 0.4, 102 | 0.0018087363324810761, 103 | 0.0012016634262259631, 104 | 0.0015990325181403089, 105 | -0.0015790325181403089], 106 | 'AMZN(3691937)': [ 107 | 0.25, 108 | 0.3, 109 | 0.5, 110 | -0.25, 111 | 0.0030345678231443185, 112 | -0.0012108315522391664, 113 | 0.0022937220153353977, 114 | 0.0062937220153353977] 115 | } 116 | 117 | class PyFolioFromMoonshotTestCase(unittest.TestCase): 118 | 119 | def setUp(self): 120 | self.maxDiff = None 121 | 122 | @patch("pyfolio.quantrocket_moonshot.create_full_tear_sheet") 123 | def test_from_moonshot_csv(self, mock_create_full_tear_sheet): 124 | 125 | f = io.StringIO() 126 | moonshot_results = pd.DataFrame(MOONSHOT_RESULTS) 127 | moonshot_results.to_csv(f,index=False) 128 | f.seek(0) 129 | 130 | pyfolio.from_moonshot_csv(f) 131 | 132 | tear_sheet_call = mock_create_full_tear_sheet.mock_calls[0] 133 | 134 | _, args, kwargs = tear_sheet_call 135 | self.assertEqual(len(args), 1) 136 | returns = args[0] 137 | self.assertEqual(returns.index.tz.tzname(None), "UTC") 138 | # returns were padded to len 127 (more than 6 months=126 days) 139 | self.assertEqual(returns.index.size, 127) 140 | self.assertTrue((returns.iloc[:124] == 0).all()) 141 | self.assertDictEqual( 142 | returns.iloc[124:].to_dict(), 143 | { 144 | pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'): 0.0048433041556253, 145 | pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'): -9.168126013200028e-06, 146 | pd.Timestamp('2018-05-09 00:00:00+0000', tz='UTC'): 0.0038927545334756 147 | }) 148 | 149 | benchmark_rets = kwargs["benchmark_rets"] 150 | positions = kwargs["positions"] 151 | self.assertListEqual( 152 | positions.reset_index().to_dict(orient="records"), 153 | [ 154 | {'Date': pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'), 155 | 'AAPL(265598)': 0.25, 156 | 'AMZN(3691937)': 0.25, 157 | 'cash': 0.5 158 | }, 159 | {'Date': pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'), 160 | 'AAPL(265598)': 0.2, 161 | 'AMZN(3691937)': 0.3, 162 | 'cash': 0.5 163 | }, 164 | {'Date': pd.Timestamp('2018-05-09 00:00:00+0000', tz='UTC'), 165 | 'AAPL(265598)': -0.5, 166 | 'AMZN(3691937)': 0.5, 167 | 'cash': 0 168 | } 169 | ] 170 | ) 171 | # benchmark_rets were also padded to len 127 172 | self.assertEqual(benchmark_rets.index.size, 127) 173 | self.assertTrue((benchmark_rets.iloc[:124] == 0).all()) 174 | self.assertDictEqual( 175 | # replace nan with "nan" to allow equality comparisons 176 | benchmark_rets.iloc[124:].to_dict(), 177 | { 178 | pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'): 0, 179 | pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'): 0.0048067, 180 | pd.Timestamp('2018-05-09 00:00:00+0000', tz='UTC'): 0.0063961 181 | } 182 | ) 183 | 184 | @patch("pyfolio.quantrocket_moonshot.create_full_tear_sheet") 185 | def test_from_moonshot_csv_no_benchmark(self, mock_create_full_tear_sheet): 186 | 187 | f = io.StringIO() 188 | moonshot_results = pd.DataFrame(MOONSHOT_RESULTS) 189 | moonshot_results = moonshot_results[moonshot_results.Field != "Benchmark"] 190 | moonshot_results.to_csv(f,index=False) 191 | f.seek(0) 192 | 193 | pyfolio.from_moonshot_csv(f) 194 | 195 | tear_sheet_call = mock_create_full_tear_sheet.mock_calls[0] 196 | 197 | _, args, kwargs = tear_sheet_call 198 | self.assertEqual(len(args), 1) 199 | returns = args[0] 200 | self.assertEqual(returns.index.tz.tzname(None), "UTC") 201 | # returns were padded to len 127 (more than 6 months=126 days) 202 | self.assertEqual(returns.index.size, 127) 203 | self.assertTrue((returns.iloc[:124] == 0).all()) 204 | self.assertDictEqual( 205 | returns.iloc[124:].to_dict(), 206 | { 207 | pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'): 0.0048433041556253, 208 | pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'): -9.168126013200028e-06, 209 | pd.Timestamp('2018-05-09 00:00:00+0000', tz='UTC'): 0.0038927545334756 210 | }) 211 | 212 | positions = kwargs["positions"] 213 | self.assertListEqual( 214 | positions.reset_index().to_dict(orient="records"), 215 | [ 216 | {'Date': pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'), 217 | 'AAPL(265598)': 0.25, 218 | 'AMZN(3691937)': 0.25, 219 | 'cash': 0.5 220 | }, 221 | {'Date': pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'), 222 | 'AAPL(265598)': 0.2, 223 | 'AMZN(3691937)': 0.3, 224 | 'cash': 0.5 225 | }, 226 | {'Date': pd.Timestamp('2018-05-09 00:00:00+0000', tz='UTC'), 227 | 'AAPL(265598)': -0.5, 228 | 'AMZN(3691937)': 0.5, 229 | 'cash': 0 230 | } 231 | ] 232 | ) 233 | 234 | @patch("pyfolio.quantrocket_moonshot.create_full_tear_sheet") 235 | def test_from_intraday_moonshot_csv(self, mock_create_full_tear_sheet): 236 | 237 | f = io.StringIO() 238 | moonshot_results = pd.DataFrame(MOONSHOT_INTRADAY_RESULTS) 239 | moonshot_results.to_csv(f,index=False) 240 | f.seek(0) 241 | 242 | pyfolio.from_moonshot_csv(f) 243 | 244 | tear_sheet_call = mock_create_full_tear_sheet.mock_calls[0] 245 | 246 | _, args, kwargs = tear_sheet_call 247 | self.assertEqual(len(args), 1) 248 | returns = args[0] 249 | self.assertEqual(returns.index.tz.tzname(None), "UTC") 250 | # returns were padded to len 127 (more than 6 months=126 days) 251 | self.assertEqual(returns.index.size, 127) 252 | self.assertTrue((returns.iloc[:125] == 0).all()) 253 | self.assertDictEqual( 254 | returns.iloc[125:].to_dict(), 255 | { 256 | pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'): 0.004834136029612099, 257 | pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'): 0.0086074440306706}) 258 | 259 | positions = kwargs["positions"] 260 | self.assertListEqual( 261 | positions.reset_index().to_dict(orient="records"), 262 | [ 263 | {'Date': pd.Timestamp('2018-05-07 00:00:00+0000', tz='UTC'), 264 | 'AAPL(265598)': 0.25, 265 | 'AMZN(3691937)': 0.3, 266 | 'cash': 0.44999999999999996}, 267 | {'Date': pd.Timestamp('2018-05-08 00:00:00+0000', tz='UTC'), 268 | 'AAPL(265598)': -0.5, 269 | 'AMZN(3691937)': 0.5, 270 | 'cash': 0} 271 | ] 272 | ) 273 | -------------------------------------------------------------------------------- /pyfolio/capacity.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import empyrical as ep 4 | import numpy as np 5 | import pandas as pd 6 | 7 | from . import pos 8 | 9 | 10 | def daily_txns_with_bar_data(transactions, market_data): 11 | """ 12 | Sums the absolute value of shares traded in each name on each day. 13 | Adds columns containing the closing price and total daily volume for 14 | each day-ticker combination. 15 | 16 | Parameters 17 | ---------- 18 | transactions : pd.DataFrame 19 | Prices and amounts of executed trades. One row per trade. 20 | - See full explanation in tears.create_full_tear_sheet 21 | market_data : pd.DataFrame 22 | Daily market_data 23 | - DataFrame has a multi-index index, one level is dates and another is 24 | market_data contains volume & price, equities as columns 25 | 26 | Returns 27 | ------- 28 | txn_daily : pd.DataFrame 29 | Daily totals for transacted shares in each traded name. 30 | price and volume columns for close price and daily volume for 31 | the corresponding ticker, respectively. 32 | """ 33 | 34 | transactions.index.name = 'date' 35 | txn_daily = pd.DataFrame(transactions.assign( 36 | amount=abs(transactions.amount)).groupby( 37 | ['symbol', pd.Grouper(freq='D')]).sum()['amount']) 38 | txn_daily['price'] = market_data.xs('price', level=1).unstack() 39 | txn_daily['volume'] = market_data.xs('volume', level=1).unstack() 40 | 41 | txn_daily = txn_daily.reset_index().set_index('date') 42 | 43 | return txn_daily 44 | 45 | 46 | def days_to_liquidate_positions(positions, market_data, 47 | max_bar_consumption=0.2, 48 | capital_base=1e6, 49 | mean_volume_window=5): 50 | """ 51 | Compute the number of days that would have been required 52 | to fully liquidate each position on each day based on the 53 | trailing n day mean daily bar volume and a limit on the proportion 54 | of a daily bar that we are allowed to consume. 55 | 56 | This analysis uses portfolio allocations and a provided capital base 57 | rather than the dollar values in the positions DataFrame to remove the 58 | effect of compounding on days to liquidate. In other words, this function 59 | assumes that the net liquidation portfolio value will always remain 60 | constant at capital_base. 61 | 62 | Parameters 63 | ---------- 64 | positions: pd.DataFrame 65 | Contains daily position values including cash 66 | - See full explanation in tears.create_full_tear_sheet 67 | market_data : pd.DataFrame 68 | Daily market_data 69 | - DataFrame has a multi-index index, one level is dates and another is 70 | market_data contains volume & price, equities as columns 71 | max_bar_consumption : float 72 | Max proportion of a daily bar that can be consumed in the 73 | process of liquidating a position. 74 | capital_base : integer 75 | Capital base multiplied by portfolio allocation to compute 76 | position value that needs liquidating. 77 | mean_volume_window : float 78 | Trailing window to use in mean volume calculation. 79 | 80 | Returns 81 | ------- 82 | days_to_liquidate : pd.DataFrame 83 | Number of days required to fully liquidate daily positions. 84 | Datetime index, symbols as columns. 85 | """ 86 | 87 | DV = market_data.xs('volume', level=1) * market_data.xs('price', level=1) 88 | roll_mean_dv = DV.rolling(window=mean_volume_window, 89 | center=False).mean().shift() 90 | roll_mean_dv = roll_mean_dv.replace(0, np.nan) 91 | 92 | positions_alloc = pos.get_percent_alloc(positions) 93 | positions_alloc = positions_alloc.drop('cash', axis=1) 94 | 95 | days_to_liquidate = (positions_alloc * capital_base) / \ 96 | (max_bar_consumption * roll_mean_dv) 97 | 98 | return days_to_liquidate.iloc[mean_volume_window:] 99 | 100 | 101 | def get_max_days_to_liquidate_by_ticker(positions, market_data, 102 | max_bar_consumption=0.2, 103 | capital_base=1e6, 104 | mean_volume_window=5, 105 | last_n_days=None): 106 | """ 107 | Finds the longest estimated liquidation time for each traded 108 | name over the course of backtest (or last n days of the backtest). 109 | 110 | Parameters 111 | ---------- 112 | positions: pd.DataFrame 113 | Contains daily position values including cash 114 | - See full explanation in tears.create_full_tear_sheet 115 | market_data : pd.DataFrame 116 | Daily market_data 117 | - DataFrame has a multi-index index, one level is dates and another is 118 | market_data contains volume & price, equities as columns 119 | max_bar_consumption : float 120 | Max proportion of a daily bar that can be consumed in the 121 | process of liquidating a position. 122 | capital_base : integer 123 | Capital base multiplied by portfolio allocation to compute 124 | position value that needs liquidating. 125 | mean_volume_window : float 126 | Trailing window to use in mean volume calculation. 127 | last_n_days : integer 128 | Compute for only the last n days of the passed backtest data. 129 | 130 | Returns 131 | ------- 132 | days_to_liquidate : pd.DataFrame 133 | Max Number of days required to fully liquidate each traded name. 134 | Index of symbols. Columns for days_to_liquidate and the corresponding 135 | date and position_alloc on that day. 136 | """ 137 | 138 | dtlp = days_to_liquidate_positions(positions, market_data, 139 | max_bar_consumption=max_bar_consumption, 140 | capital_base=capital_base, 141 | mean_volume_window=mean_volume_window) 142 | 143 | if last_n_days is not None: 144 | dtlp = dtlp.loc[dtlp.index.max() - pd.Timedelta(days=last_n_days):] 145 | 146 | pos_alloc = pos.get_percent_alloc(positions) 147 | pos_alloc = pos_alloc.drop('cash', axis=1) 148 | 149 | liq_desc = pd.DataFrame() 150 | liq_desc['days_to_liquidate'] = dtlp.unstack() 151 | liq_desc['pos_alloc_pct'] = pos_alloc.unstack() * 100 152 | liq_desc.index.set_names(['symbol', 'date'], inplace=True) 153 | 154 | worst_liq = liq_desc.reset_index().sort_values( 155 | 'days_to_liquidate', ascending=False).groupby('symbol').first() 156 | 157 | return worst_liq 158 | 159 | 160 | def get_low_liquidity_transactions(transactions, market_data, 161 | last_n_days=None): 162 | """ 163 | For each traded name, find the daily transaction total that consumed 164 | the greatest proportion of available daily bar volume. 165 | 166 | Parameters 167 | ---------- 168 | transactions : pd.DataFrame 169 | Prices and amounts of executed trades. One row per trade. 170 | - See full explanation in create_full_tear_sheet. 171 | market_data : pd.DataFrame 172 | Daily market_data 173 | - DataFrame has a multi-index index, one level is dates and another is 174 | market_data contains volume & price, equities as columns 175 | last_n_days : integer 176 | Compute for only the last n days of the passed backtest data. 177 | """ 178 | 179 | txn_daily_w_bar = daily_txns_with_bar_data(transactions, market_data) 180 | txn_daily_w_bar.index.name = 'date' 181 | txn_daily_w_bar = txn_daily_w_bar.reset_index() 182 | 183 | if last_n_days is not None: 184 | md = txn_daily_w_bar.date.max() - pd.Timedelta(days=last_n_days) 185 | txn_daily_w_bar = txn_daily_w_bar[txn_daily_w_bar.date > md] 186 | 187 | bar_consumption = txn_daily_w_bar.assign( 188 | max_pct_bar_consumed=( 189 | txn_daily_w_bar.amount/txn_daily_w_bar.volume)*100 190 | ).sort_values('max_pct_bar_consumed', ascending=False) 191 | max_bar_consumption = bar_consumption.groupby('symbol').first() 192 | 193 | return max_bar_consumption[['date', 'max_pct_bar_consumed']] 194 | 195 | 196 | def apply_slippage_penalty(returns, txn_daily, simulate_starting_capital, 197 | backtest_starting_capital, impact=0.1): 198 | """ 199 | Applies quadratic volumeshare slippage model to daily returns based 200 | on the proportion of the observed historical daily bar dollar volume 201 | consumed by the strategy's trades. Scales the size of trades based 202 | on the ratio of the starting capital we wish to test to the starting 203 | capital of the passed backtest data. 204 | 205 | Parameters 206 | ---------- 207 | returns : pd.Series 208 | Time series of daily returns. 209 | txn_daily : pd.Series 210 | Daily transaciton totals, closing price, and daily volume for 211 | each traded name. See price_volume_daily_txns for more details. 212 | simulate_starting_capital : integer 213 | capital at which we want to test 214 | backtest_starting_capital: capital base at which backtest was 215 | origionally run. impact: See Zipline volumeshare slippage model 216 | impact : float 217 | Scales the size of the slippage penalty. 218 | 219 | Returns 220 | ------- 221 | adj_returns : pd.Series 222 | Slippage penalty adjusted daily returns. 223 | """ 224 | 225 | mult = simulate_starting_capital / backtest_starting_capital 226 | simulate_traded_shares = abs(mult * txn_daily.amount) 227 | simulate_traded_dollars = txn_daily.price * simulate_traded_shares 228 | simulate_pct_volume_used = simulate_traded_shares / txn_daily.volume 229 | 230 | penalties = simulate_pct_volume_used**2 \ 231 | * impact * simulate_traded_dollars 232 | 233 | daily_penalty = penalties.resample('D').sum() 234 | daily_penalty = daily_penalty.reindex(returns.index).fillna(0) 235 | 236 | # Since we are scaling the numerator of the penalties linearly 237 | # by capital base, it makes the most sense to scale the denominator 238 | # similarly. In other words, since we aren't applying compounding to 239 | # simulate_traded_shares, we shouldn't apply compounding to pv. 240 | portfolio_value = ep.cum_returns( 241 | returns, starting_value=backtest_starting_capital) * mult 242 | 243 | adj_returns = returns - (daily_penalty / portfolio_value) 244 | 245 | return adj_returns 246 | -------------------------------------------------------------------------------- /pyfolio/quantrocket_moonshot.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 QuantRocket LLC - All Rights Reserved 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from typing import Union, TextIO 16 | import pandas as pd 17 | import numpy as np 18 | from quantrocket.moonshot import read_moonshot_csv 19 | from moonchart.utils import intraday_to_daily 20 | from .tears import create_full_tear_sheet 21 | from .quantrocket_utils import pad_initial 22 | 23 | __all__ = [ 24 | "from_moonshot_csv", 25 | ] 26 | 27 | def _get_benchmark_returns(benchmark): 28 | """ 29 | Returns a Series of benchmark returns, if any. If more than one column has 30 | benchmark returns, uses the first. 31 | """ 32 | have_benchmarks = (benchmark.fillna(0) != 0).any(axis=0) 33 | have_benchmarks = have_benchmarks[have_benchmarks] 34 | if have_benchmarks.empty: 35 | return None 36 | 37 | col = have_benchmarks.index[0] 38 | if len(have_benchmarks.index) > 1: 39 | import warnings 40 | warnings.warn("Multiple benchmarks found, only using first ({0})".format(col)) 41 | 42 | benchmark_returns = benchmark[col] 43 | benchmark_returns.name = "benchmark" 44 | return benchmark_returns 45 | 46 | def from_moonshot( 47 | results: pd.DataFrame, 48 | slippage: float = None, 49 | live_start_date: pd.Timestamp = None, 50 | sector_mappings: Union[dict[str, str], 'pd.Series[str]'] = None, 51 | round_trips: bool = False, 52 | estimate_intraday: Union[bool, str] = 'infer', 53 | hide_positions: bool = False, 54 | cone_std: Union[float, tuple[float, float, float]] = (1.0, 1.5, 2.0), 55 | bootstrap: bool = False, 56 | unadjusted_returns: 'pd.Series[float]' = None, 57 | turnover_denom: str = 'AGB', 58 | set_context: bool = True, 59 | header_rows: dict[str, str] = None 60 | ) -> None: 61 | """ 62 | Creates a full tear sheet from a moonshot backtest results DataFrame. 63 | 64 | Parameters 65 | ---------- 66 | results : DataFrame 67 | multiindex (Field, Date) DataFrame of backtest results 68 | 69 | slippage : int/float, optional 70 | Basis points of slippage to apply to returns before generating 71 | tearsheet stats and plots. 72 | If a value is provided, slippage parameter sweep 73 | plots will be generated from the unadjusted returns. 74 | Transactions and positions must also be passed. 75 | 76 | - See txn.adjust_returns_for_slippage for more details. 77 | 78 | live_start_date : datetime, optional 79 | The point in time when the strategy began live trading, 80 | after its backtest period. This datetime should be normalized. 81 | 82 | hide_positions : bool, optional 83 | If True, will not output any symbol names. 84 | 85 | round_trips: boolean, optional 86 | If True, causes the generation of a round trip tear sheet. 87 | 88 | sector_mappings : dict or pd.Series, optional 89 | Security identifier to sector mapping. 90 | Security ids as keys, sectors as values. 91 | 92 | estimate_intraday: boolean or str, optional 93 | Instead of using the end-of-day positions, use the point in the day 94 | where we have the most $ invested. This will adjust positions to 95 | better approximate and represent how an intraday strategy behaves. 96 | By default, this is 'infer', and an attempt will be made to detect 97 | an intraday strategy. Specifying this value will prevent detection. 98 | 99 | cone_std : float, or tuple, optional 100 | If float, The standard deviation to use for the cone plots. 101 | If tuple, Tuple of standard deviation values to use for the cone plots 102 | 103 | - The cone is a normal distribution with this standard deviation 104 | centered around a linear regression. 105 | 106 | bootstrap : boolean (optional) 107 | Whether to perform bootstrap analysis for the performance 108 | metrics. Takes a few minutes longer. 109 | 110 | turnover_denom : str 111 | Either AGB or portfolio_value, default AGB. 112 | 113 | - See full explanation in txn.get_turnover. 114 | 115 | header_rows : dict or OrderedDict, optional 116 | Extra rows to display at the top of the perf stats table. 117 | 118 | set_context : boolean, optional 119 | If True, set default plotting style context. 120 | 121 | - See plotting.context(). 122 | 123 | Returns 124 | ------- 125 | None 126 | """ 127 | if "Time" in results.index.names: 128 | results = intraday_to_daily(results) 129 | 130 | # pandas DatetimeIndexes are serialized with UTC offsets, and pandas 131 | # parses them back to UTC but doesn't set the tz; pyfolio needs tz-aware 132 | if not results.index.get_level_values("Date").tz: 133 | results = results.tz_localize("UTC", level="Date") 134 | 135 | returns = results.loc["Return"].sum(axis=1) 136 | positions = results.loc["NetExposure"] 137 | positions["cash"] = 1 - positions.abs().sum(axis=1) 138 | 139 | returns.name = "returns" 140 | returns = pad_initial(returns) 141 | 142 | fields = results.index.get_level_values("Field").unique() 143 | if "Benchmark" in fields: 144 | benchmark_rets = _get_benchmark_returns( 145 | results.loc["Benchmark"].astype(np.float64)) 146 | benchmark_rets.name = "benchmark_returns" 147 | benchmark_rets = pad_initial(benchmark_rets) 148 | else: 149 | benchmark_rets = None 150 | 151 | return create_full_tear_sheet( 152 | returns, 153 | positions=positions, 154 | benchmark_rets=benchmark_rets, 155 | slippage=slippage, 156 | live_start_date=live_start_date, 157 | sector_mappings=sector_mappings, 158 | round_trips=round_trips, 159 | estimate_intraday=estimate_intraday, 160 | hide_positions=hide_positions, 161 | cone_std=cone_std, 162 | bootstrap=bootstrap, 163 | unadjusted_returns=unadjusted_returns, 164 | turnover_denom=turnover_denom, 165 | set_context=set_context, 166 | header_rows=header_rows 167 | ) 168 | 169 | def from_moonshot_csv( 170 | filepath_or_buffer: Union[str, TextIO], 171 | slippage: float = None, 172 | live_start_date: pd.Timestamp = None, 173 | sector_mappings: Union[dict[str, str], 'pd.Series[str]'] = None, 174 | round_trips: bool = False, 175 | estimate_intraday: Union[bool, str] = 'infer', 176 | hide_positions: bool = False, 177 | cone_std: Union[float, tuple[float, float, float]] = (1.0, 1.5, 2.0), 178 | bootstrap: bool = False, 179 | unadjusted_returns: 'pd.Series[float]' = None, 180 | turnover_denom: str = 'AGB', 181 | set_context: bool = True, 182 | header_rows: dict[str, str] = None 183 | ) -> None: 184 | """ 185 | Create a full tear sheet from a moonshot backtest results CSV. 186 | 187 | Parameters 188 | ---------- 189 | filepath_or_buffer : str or file-like object 190 | filepath or file-like object of the CSV 191 | 192 | slippage : int/float, optional 193 | Basis points of slippage to apply to returns before generating 194 | tearsheet stats and plots. 195 | If a value is provided, slippage parameter sweep 196 | plots will be generated from the unadjusted returns. 197 | Transactions and positions must also be passed. 198 | 199 | - See txn.adjust_returns_for_slippage for more details. 200 | 201 | live_start_date : datetime, optional 202 | The point in time when the strategy began live trading, 203 | after its backtest period. This datetime should be normalized. 204 | 205 | hide_positions : bool, optional 206 | If True, will not output any symbol names. 207 | 208 | round_trips: boolean, optional 209 | If True, causes the generation of a round trip tear sheet. 210 | 211 | sector_mappings : dict or pd.Series, optional 212 | Security identifier to sector mapping. 213 | Security ids as keys, sectors as values. 214 | 215 | estimate_intraday: boolean or str, optional 216 | Instead of using the end-of-day positions, use the point in the day 217 | where we have the most $ invested. This will adjust positions to 218 | better approximate and represent how an intraday strategy behaves. 219 | By default, this is 'infer', and an attempt will be made to detect 220 | an intraday strategy. Specifying this value will prevent detection. 221 | 222 | cone_std : float, or tuple, optional 223 | If float, The standard deviation to use for the cone plots. 224 | If tuple, Tuple of standard deviation values to use for the cone plots 225 | 226 | - The cone is a normal distribution with this standard deviation 227 | centered around a linear regression. 228 | 229 | bootstrap : boolean (optional) 230 | Whether to perform bootstrap analysis for the performance 231 | metrics. Takes a few minutes longer. 232 | 233 | turnover_denom : str 234 | Either AGB or portfolio_value, default AGB. 235 | 236 | - See full explanation in txn.get_turnover. 237 | 238 | header_rows : dict or OrderedDict, optional 239 | Extra rows to display at the top of the perf stats table. 240 | 241 | set_context : boolean, optional 242 | If True, set default plotting style context. 243 | 244 | - See plotting.context(). 245 | 246 | Returns 247 | ------- 248 | None 249 | 250 | Notes 251 | ----- 252 | Usage Guide: 253 | 254 | * Moonshot backtesting: https://qrok.it/dl/pf/moonshot-backtest 255 | """ 256 | results = read_moonshot_csv(filepath_or_buffer) 257 | return from_moonshot( 258 | results, 259 | slippage=slippage, 260 | live_start_date=live_start_date, 261 | sector_mappings=sector_mappings, 262 | round_trips=round_trips, 263 | estimate_intraday=estimate_intraday, 264 | hide_positions=hide_positions, 265 | cone_std=cone_std, 266 | bootstrap=bootstrap, 267 | unadjusted_returns=unadjusted_returns, 268 | turnover_denom=turnover_denom, 269 | set_context=set_context, 270 | header_rows=header_rows) 271 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018 Quantopian, Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/residuals.csv: -------------------------------------------------------------------------------- 1 | ,19001,19002 2 | 2016-01-04,0.0,0.0 3 | 2016-01-05,0.0,0.0 4 | 2016-01-06,0.0,0.0 5 | 2016-01-07,0.0,0.0 6 | 2016-01-08,0.0,0.0 7 | 2016-01-11,0.0,0.0 8 | 2016-01-12,0.0,0.0 9 | 2016-01-13,0.0,0.0 10 | 2016-01-14,0.0,0.0 11 | 2016-01-15,0.0,0.0 12 | 2016-01-18,0.0,0.0 13 | 2016-01-19,0.0,0.0 14 | 2016-01-20,0.0,0.0 15 | 2016-01-21,0.0,0.0 16 | 2016-01-22,0.0,0.0 17 | 2016-01-25,0.0,0.0 18 | 2016-01-26,0.0,0.0 19 | 2016-01-27,0.0,0.0 20 | 2016-01-28,0.0,0.0 21 | 2016-01-29,0.0,0.0 22 | 2016-02-01,0.0,0.0 23 | 2016-02-02,0.0,0.0 24 | 2016-02-03,0.0,0.0 25 | 2016-02-04,0.0,0.0 26 | 2016-02-05,0.0,0.0 27 | 2016-02-08,0.0,0.0 28 | 2016-02-09,0.0,0.0 29 | 2016-02-10,0.0,0.0 30 | 2016-02-11,0.0,0.0 31 | 2016-02-12,0.0,0.0 32 | 2016-02-15,0.0,0.0 33 | 2016-02-16,0.0,0.0 34 | 2016-02-17,0.0,0.0 35 | 2016-02-18,0.0,0.0 36 | 2016-02-19,0.0,0.0 37 | 2016-02-22,0.0,0.0 38 | 2016-02-23,0.0,0.0 39 | 2016-02-24,0.0,0.0 40 | 2016-02-25,0.0,0.0 41 | 2016-02-26,0.0,0.0 42 | 2016-02-29,0.0,0.0 43 | 2016-03-01,0.0,0.0 44 | 2016-03-02,0.0,0.0 45 | 2016-03-03,0.0,0.0 46 | 2016-03-04,0.0,0.0 47 | 2016-03-07,0.0,0.0 48 | 2016-03-08,0.0,0.0 49 | 2016-03-09,0.0,0.0 50 | 2016-03-10,0.0,0.0 51 | 2016-03-11,0.0,0.0 52 | 2016-03-14,0.0,0.0 53 | 2016-03-15,0.0,0.0 54 | 2016-03-16,0.0,0.0 55 | 2016-03-17,0.0,0.0 56 | 2016-03-18,0.0,0.0 57 | 2016-03-21,0.0,0.0 58 | 2016-03-22,0.0,0.0 59 | 2016-03-23,0.0,0.0 60 | 2016-03-24,0.0,0.0 61 | 2016-03-25,0.0,0.0 62 | 2016-03-28,0.0,0.0 63 | 2016-03-29,0.0,0.0 64 | 2016-03-30,0.0,0.0 65 | 2016-03-31,0.0,0.0 66 | 2016-04-01,0.0,0.0 67 | 2016-04-04,0.0,0.0 68 | 2016-04-05,0.0,0.0 69 | 2016-04-06,0.0,0.0 70 | 2016-04-07,0.0,0.0 71 | 2016-04-08,0.0,0.0 72 | 2016-04-11,0.0,0.0 73 | 2016-04-12,0.0,0.0 74 | 2016-04-13,0.0,0.0 75 | 2016-04-14,0.0,0.0 76 | 2016-04-15,0.0,0.0 77 | 2016-04-18,0.0,0.0 78 | 2016-04-19,0.0,0.0 79 | 2016-04-20,0.0,0.0 80 | 2016-04-21,0.0,0.0 81 | 2016-04-22,0.0,0.0 82 | 2016-04-25,0.0,0.0 83 | 2016-04-26,0.0,0.0 84 | 2016-04-27,0.0,0.0 85 | 2016-04-28,0.0,0.0 86 | 2016-04-29,0.0,0.0 87 | 2016-05-02,0.0,0.0 88 | 2016-05-03,0.0,0.0 89 | 2016-05-04,0.0,0.0 90 | 2016-05-05,0.0,0.0 91 | 2016-05-06,0.0,0.0 92 | 2016-05-09,0.0,0.0 93 | 2016-05-10,0.0,0.0 94 | 2016-05-11,0.0,0.0 95 | 2016-05-12,0.0,0.0 96 | 2016-05-13,0.0,0.0 97 | 2016-05-16,0.0,0.0 98 | 2016-05-17,0.0,0.0 99 | 2016-05-18,0.0,0.0 100 | 2016-05-19,0.0,0.0 101 | 2016-05-20,0.0,0.0 102 | 2016-05-23,0.0,0.0 103 | 2016-05-24,0.0,0.0 104 | 2016-05-25,0.0,0.0 105 | 2016-05-26,0.0,0.0 106 | 2016-05-27,0.0,0.0 107 | 2016-05-30,0.0,0.0 108 | 2016-05-31,0.0,0.0 109 | 2016-06-01,0.0,0.0 110 | 2016-06-02,0.0,0.0 111 | 2016-06-03,0.0,0.0 112 | 2016-06-06,0.0,0.0 113 | 2016-06-07,0.0,0.0 114 | 2016-06-08,0.0,0.0 115 | 2016-06-09,0.0,0.0 116 | 2016-06-10,0.0,0.0 117 | 2016-06-13,0.0,0.0 118 | 2016-06-14,0.0,0.0 119 | 2016-06-15,0.0,0.0 120 | 2016-06-16,0.0,0.0 121 | 2016-06-17,0.0,0.0 122 | 2016-06-20,0.0,0.0 123 | 2016-06-21,0.0,0.0 124 | 2016-06-22,0.0,0.0 125 | 2016-06-23,0.0,0.0 126 | 2016-06-24,0.0,0.0 127 | 2016-06-27,0.0,0.0 128 | 2016-06-28,0.0,0.0 129 | 2016-06-29,0.0,0.0 130 | 2016-06-30,0.0,0.0 131 | 2016-07-01,0.0,0.0 132 | 2016-07-04,0.0,0.0 133 | 2016-07-05,0.0,0.0 134 | 2016-07-06,0.0,0.0 135 | 2016-07-07,0.0,0.0 136 | 2016-07-08,0.0,0.0 137 | 2016-07-11,0.0,0.0 138 | 2016-07-12,0.0,0.0 139 | 2016-07-13,0.0,0.0 140 | 2016-07-14,0.0,0.0 141 | 2016-07-15,0.0,0.0 142 | 2016-07-18,0.0,0.0 143 | 2016-07-19,0.0,0.0 144 | 2016-07-20,0.0,0.0 145 | 2016-07-21,0.0,0.0 146 | 2016-07-22,0.0,0.0 147 | 2016-07-25,0.0,0.0 148 | 2016-07-26,0.0,0.0 149 | 2016-07-27,0.0,0.0 150 | 2016-07-28,0.0,0.0 151 | 2016-07-29,0.0,0.0 152 | 2016-08-01,0.0,0.0 153 | 2016-08-02,0.0,0.0 154 | 2016-08-03,0.0,0.0 155 | 2016-08-04,0.0,0.0 156 | 2016-08-05,0.0,0.0 157 | 2016-08-08,0.0,0.0 158 | 2016-08-09,0.0,0.0 159 | 2016-08-10,0.0,0.0 160 | 2016-08-11,0.0,0.0 161 | 2016-08-12,0.0,0.0 162 | 2016-08-15,0.0,0.0 163 | 2016-08-16,0.0,0.0 164 | 2016-08-17,0.0,0.0 165 | 2016-08-18,0.0,0.0 166 | 2016-08-19,0.0,0.0 167 | 2016-08-22,0.0,0.0 168 | 2016-08-23,0.0,0.0 169 | 2016-08-24,0.0,0.0 170 | 2016-08-25,0.0,0.0 171 | 2016-08-26,0.0,0.0 172 | 2016-08-29,0.0,0.0 173 | 2016-08-30,0.0,0.0 174 | 2016-08-31,0.0,0.0 175 | 2016-09-01,0.0,0.0 176 | 2016-09-02,0.0,0.0 177 | 2016-09-05,0.0,0.0 178 | 2016-09-06,0.0,0.0 179 | 2016-09-07,0.0,0.0 180 | 2016-09-08,0.0,0.0 181 | 2016-09-09,0.0,0.0 182 | 2016-09-12,0.0,0.0 183 | 2016-09-13,0.0,0.0 184 | 2016-09-14,0.0,0.0 185 | 2016-09-15,0.0,0.0 186 | 2016-09-16,0.0,0.0 187 | 2016-09-19,0.0,0.0 188 | 2016-09-20,0.0,0.0 189 | 2016-09-21,0.0,0.0 190 | 2016-09-22,0.0,0.0 191 | 2016-09-23,0.0,0.0 192 | 2016-09-26,0.0,0.0 193 | 2016-09-27,0.0,0.0 194 | 2016-09-28,0.0,0.0 195 | 2016-09-29,0.0,0.0 196 | 2016-09-30,0.0,0.0 197 | 2016-10-03,0.0,0.0 198 | 2016-10-04,0.0,0.0 199 | 2016-10-05,0.0,0.0 200 | 2016-10-06,0.0,0.0 201 | 2016-10-07,0.0,0.0 202 | 2016-10-10,0.0,0.0 203 | 2016-10-11,0.0,0.0 204 | 2016-10-12,0.0,0.0 205 | 2016-10-13,0.0,0.0 206 | 2016-10-14,0.0,0.0 207 | 2016-10-17,0.0,0.0 208 | 2016-10-18,0.0,0.0 209 | 2016-10-19,0.0,0.0 210 | 2016-10-20,0.0,0.0 211 | 2016-10-21,0.0,0.0 212 | 2016-10-24,0.0,0.0 213 | 2016-10-25,0.0,0.0 214 | 2016-10-26,0.0,0.0 215 | 2016-10-27,0.0,0.0 216 | 2016-10-28,0.0,0.0 217 | 2016-10-31,0.0,0.0 218 | 2016-11-01,0.0,0.0 219 | 2016-11-02,0.0,0.0 220 | 2016-11-03,0.0,0.0 221 | 2016-11-04,0.0,0.0 222 | 2016-11-07,0.0,0.0 223 | 2016-11-08,0.0,0.0 224 | 2016-11-09,0.0,0.0 225 | 2016-11-10,0.0,0.0 226 | 2016-11-11,0.0,0.0 227 | 2016-11-14,0.0,0.0 228 | 2016-11-15,0.0,0.0 229 | 2016-11-16,0.0,0.0 230 | 2016-11-17,0.0,0.0 231 | 2016-11-18,0.0,0.0 232 | 2016-11-21,0.0,0.0 233 | 2016-11-22,0.0,0.0 234 | 2016-11-23,0.0,0.0 235 | 2016-11-24,0.0,0.0 236 | 2016-11-25,0.0,0.0 237 | 2016-11-28,0.0,0.0 238 | 2016-11-29,0.0,0.0 239 | 2016-11-30,0.0,0.0 240 | 2016-12-01,0.0,0.0 241 | 2016-12-02,0.0,0.0 242 | 2016-12-05,0.0,0.0 243 | 2016-12-06,0.0,0.0 244 | 2016-12-07,0.0,0.0 245 | 2016-12-08,0.0,0.0 246 | 2016-12-09,0.0,0.0 247 | 2016-12-12,0.0,0.0 248 | 2016-12-13,0.0,0.0 249 | 2016-12-14,0.0,0.0 250 | 2016-12-15,0.0,0.0 251 | 2016-12-16,0.0,0.0 252 | 2016-12-19,0.0,0.0 253 | 2016-12-20,0.0,0.0 254 | 2016-12-21,0.0,0.0 255 | 2016-12-22,0.0,0.0 256 | 2016-12-23,0.0,0.0 257 | 2016-12-26,0.0,0.0 258 | 2016-12-27,0.0,0.0 259 | 2016-12-28,0.0,0.0 260 | 2016-12-29,0.0,0.0 261 | 2016-12-30,0.0,0.0 262 | 2017-01-02,0.0,0.0 263 | 2017-01-03,0.0,0.0 264 | 2017-01-04,0.0,0.0 265 | 2017-01-05,0.0,0.0 266 | 2017-01-06,0.0,0.0 267 | 2017-01-09,0.0,0.0 268 | 2017-01-10,0.0,0.0 269 | 2017-01-11,0.0,0.0 270 | 2017-01-12,0.0,0.0 271 | 2017-01-13,0.0,0.0 272 | 2017-01-16,0.0,0.0 273 | 2017-01-17,0.0,0.0 274 | 2017-01-18,0.0,0.0 275 | 2017-01-19,0.0,0.0 276 | 2017-01-20,0.0,0.0 277 | 2017-01-23,0.0,0.0 278 | 2017-01-24,0.0,0.0 279 | 2017-01-25,0.0,0.0 280 | 2017-01-26,0.0,0.0 281 | 2017-01-27,0.0,0.0 282 | 2017-01-30,0.0,0.0 283 | 2017-01-31,0.0,0.0 284 | 2017-02-01,0.0,0.0 285 | 2017-02-02,0.0,0.0 286 | 2017-02-03,0.0,0.0 287 | 2017-02-06,0.0,0.0 288 | 2017-02-07,0.0,0.0 289 | 2017-02-08,0.0,0.0 290 | 2017-02-09,0.0,0.0 291 | 2017-02-10,0.0,0.0 292 | 2017-02-13,0.0,0.0 293 | 2017-02-14,0.0,0.0 294 | 2017-02-15,0.0,0.0 295 | 2017-02-16,0.0,0.0 296 | 2017-02-17,0.0,0.0 297 | 2017-02-20,0.0,0.0 298 | 2017-02-21,0.0,0.0 299 | 2017-02-22,0.0,0.0 300 | 2017-02-23,0.0,0.0 301 | 2017-02-24,0.0,0.0 302 | 2017-02-27,0.0,0.0 303 | 2017-02-28,0.0,0.0 304 | 2017-03-01,0.0,0.0 305 | 2017-03-02,0.0,0.0 306 | 2017-03-03,0.0,0.0 307 | 2017-03-06,0.0,0.0 308 | 2017-03-07,0.0,0.0 309 | 2017-03-08,0.0,0.0 310 | 2017-03-09,0.0,0.0 311 | 2017-03-10,0.0,0.0 312 | 2017-03-13,0.0,0.0 313 | 2017-03-14,0.0,0.0 314 | 2017-03-15,0.0,0.0 315 | 2017-03-16,0.0,0.0 316 | 2017-03-17,0.0,0.0 317 | 2017-03-20,0.0,0.0 318 | 2017-03-21,0.0,0.0 319 | 2017-03-22,0.0,0.0 320 | 2017-03-23,0.0,0.0 321 | 2017-03-24,0.0,0.0 322 | 2017-03-27,0.0,0.0 323 | 2017-03-28,0.0,0.0 324 | 2017-03-29,0.0,0.0 325 | 2017-03-30,0.0,0.0 326 | 2017-03-31,0.0,0.0 327 | 2017-04-03,0.0,0.0 328 | 2017-04-04,0.0,0.0 329 | 2017-04-05,0.0,0.0 330 | 2017-04-06,0.0,0.0 331 | 2017-04-07,0.0,0.0 332 | 2017-04-10,0.0,0.0 333 | 2017-04-11,0.0,0.0 334 | 2017-04-12,0.0,0.0 335 | 2017-04-13,0.0,0.0 336 | 2017-04-14,0.0,0.0 337 | 2017-04-17,0.0,0.0 338 | 2017-04-18,0.0,0.0 339 | 2017-04-19,0.0,0.0 340 | 2017-04-20,0.0,0.0 341 | 2017-04-21,0.0,0.0 342 | 2017-04-24,0.0,0.0 343 | 2017-04-25,0.0,0.0 344 | 2017-04-26,0.0,0.0 345 | 2017-04-27,0.0,0.0 346 | 2017-04-28,0.0,0.0 347 | 2017-05-01,0.0,0.0 348 | 2017-05-02,0.0,0.0 349 | 2017-05-03,0.0,0.0 350 | 2017-05-04,0.0,0.0 351 | 2017-05-05,0.0,0.0 352 | 2017-05-08,0.0,0.0 353 | 2017-05-09,0.0,0.0 354 | 2017-05-10,0.0,0.0 355 | 2017-05-11,0.0,0.0 356 | 2017-05-12,0.0,0.0 357 | 2017-05-15,0.0,0.0 358 | 2017-05-16,0.0,0.0 359 | 2017-05-17,0.0,0.0 360 | 2017-05-18,0.0,0.0 361 | 2017-05-19,0.0,0.0 362 | 2017-05-22,0.0,0.0 363 | 2017-05-23,0.0,0.0 364 | 2017-05-24,0.0,0.0 365 | 2017-05-25,0.0,0.0 366 | 2017-05-26,0.0,0.0 367 | 2017-05-29,0.0,0.0 368 | 2017-05-30,0.0,0.0 369 | 2017-05-31,0.0,0.0 370 | 2017-06-01,0.0,0.0 371 | 2017-06-02,0.0,0.0 372 | 2017-06-05,0.0,0.0 373 | 2017-06-06,0.0,0.0 374 | 2017-06-07,0.0,0.0 375 | 2017-06-08,0.0,0.0 376 | 2017-06-09,0.0,0.0 377 | 2017-06-12,0.0,0.0 378 | 2017-06-13,0.0,0.0 379 | 2017-06-14,0.0,0.0 380 | 2017-06-15,0.0,0.0 381 | 2017-06-16,0.0,0.0 382 | 2017-06-19,0.0,0.0 383 | 2017-06-20,0.0,0.0 384 | 2017-06-21,0.0,0.0 385 | 2017-06-22,0.0,0.0 386 | 2017-06-23,0.0,0.0 387 | 2017-06-26,0.0,0.0 388 | 2017-06-27,0.0,0.0 389 | 2017-06-28,0.0,0.0 390 | 2017-06-29,0.0,0.0 391 | 2017-06-30,0.0,0.0 392 | 2017-07-03,0.0,0.0 393 | 2017-07-04,0.0,0.0 394 | 2017-07-05,0.0,0.0 395 | 2017-07-06,0.0,0.0 396 | 2017-07-07,0.0,0.0 397 | 2017-07-10,0.0,0.0 398 | 2017-07-11,0.0,0.0 399 | 2017-07-12,0.0,0.0 400 | 2017-07-13,0.0,0.0 401 | 2017-07-14,0.0,0.0 402 | 2017-07-17,0.0,0.0 403 | 2017-07-18,0.0,0.0 404 | 2017-07-19,0.0,0.0 405 | 2017-07-20,0.0,0.0 406 | 2017-07-21,0.0,0.0 407 | 2017-07-24,0.0,0.0 408 | 2017-07-25,0.0,0.0 409 | 2017-07-26,0.0,0.0 410 | 2017-07-27,0.0,0.0 411 | 2017-07-28,0.0,0.0 412 | 2017-07-31,0.0,0.0 413 | 2017-08-01,0.0,0.0 414 | 2017-08-02,0.0,0.0 415 | 2017-08-03,0.0,0.0 416 | 2017-08-04,0.0,0.0 417 | 2017-08-07,0.0,0.0 418 | 2017-08-08,0.0,0.0 419 | 2017-08-09,0.0,0.0 420 | 2017-08-10,0.0,0.0 421 | 2017-08-11,0.0,0.0 422 | 2017-08-14,0.0,0.0 423 | 2017-08-15,0.0,0.0 424 | 2017-08-16,0.0,0.0 425 | 2017-08-17,0.0,0.0 426 | 2017-08-18,0.0,0.0 427 | 2017-08-21,0.0,0.0 428 | 2017-08-22,0.0,0.0 429 | 2017-08-23,0.0,0.0 430 | 2017-08-24,0.0,0.0 431 | 2017-08-25,0.0,0.0 432 | 2017-08-28,0.0,0.0 433 | 2017-08-29,0.0,0.0 434 | 2017-08-30,0.0,0.0 435 | 2017-08-31,0.0,0.0 436 | 2017-09-01,0.0,0.0 437 | 2017-09-04,0.0,0.0 438 | 2017-09-05,0.0,0.0 439 | 2017-09-06,0.0,0.0 440 | 2017-09-07,0.0,0.0 441 | 2017-09-08,0.0,0.0 442 | 2017-09-11,0.0,0.0 443 | 2017-09-12,0.0,0.0 444 | 2017-09-13,0.0,0.0 445 | 2017-09-14,0.0,0.0 446 | 2017-09-15,0.0,0.0 447 | 2017-09-18,0.0,0.0 448 | 2017-09-19,0.0,0.0 449 | 2017-09-20,0.0,0.0 450 | 2017-09-21,0.0,0.0 451 | 2017-09-22,0.0,0.0 452 | 2017-09-25,0.0,0.0 453 | 2017-09-26,0.0,0.0 454 | 2017-09-27,0.0,0.0 455 | 2017-09-28,0.0,0.0 456 | 2017-09-29,0.0,0.0 457 | 2017-10-02,0.0,0.0 458 | 2017-10-03,0.0,0.0 459 | 2017-10-04,0.0,0.0 460 | 2017-10-05,0.0,0.0 461 | 2017-10-06,0.0,0.0 462 | 2017-10-09,0.0,0.0 463 | 2017-10-10,0.0,0.0 464 | 2017-10-11,0.0,0.0 465 | 2017-10-12,0.0,0.0 466 | 2017-10-13,0.0,0.0 467 | 2017-10-16,0.0,0.0 468 | 2017-10-17,0.0,0.0 469 | 2017-10-18,0.0,0.0 470 | 2017-10-19,0.0,0.0 471 | 2017-10-20,0.0,0.0 472 | 2017-10-23,0.0,0.0 473 | 2017-10-24,0.0,0.0 474 | 2017-10-25,0.0,0.0 475 | 2017-10-26,0.0,0.0 476 | 2017-10-27,0.0,0.0 477 | 2017-10-30,0.0,0.0 478 | 2017-10-31,0.0,0.0 479 | 2017-11-01,0.0,0.0 480 | 2017-11-02,0.0,0.0 481 | 2017-11-03,0.0,0.0 482 | 2017-11-06,0.0,0.0 483 | 2017-11-07,0.0,0.0 484 | 2017-11-08,0.0,0.0 485 | 2017-11-09,0.0,0.0 486 | 2017-11-10,0.0,0.0 487 | 2017-11-13,0.0,0.0 488 | 2017-11-14,0.0,0.0 489 | 2017-11-15,0.0,0.0 490 | 2017-11-16,0.0,0.0 491 | 2017-11-17,0.0,0.0 492 | 2017-11-20,0.0,0.0 493 | 2017-11-21,0.0,0.0 494 | 2017-11-22,0.0,0.0 495 | 2017-11-23,0.0,0.0 496 | 2017-11-24,0.0,0.0 497 | 2017-11-27,0.0,0.0 498 | 2017-11-28,0.0,0.0 499 | 2017-11-29,0.0,0.0 500 | 2017-11-30,0.0,0.0 501 | 2017-12-01,0.0,0.0 502 | 2017-12-04,0.0,0.0 503 | 2017-12-05,0.0,0.0 504 | 2017-12-06,0.0,0.0 505 | 2017-12-07,0.0,0.0 506 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_timeseries.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import os 4 | from unittest import TestCase 5 | from parameterized import parameterized 6 | from numpy.testing import assert_allclose, assert_almost_equal 7 | from pandas.testing import assert_series_equal 8 | 9 | import numpy as np 10 | import pandas as pd 11 | 12 | from .. import timeseries 13 | from pyfolio.utils import to_utc, to_series 14 | import gzip 15 | 16 | 17 | DECIMAL_PLACES = 8 18 | 19 | 20 | class TestDrawdown(TestCase): 21 | drawdown_list = np.array( 22 | [100, 90, 75] 23 | ) / 10. 24 | dt = pd.date_range('2000-1-3', periods=3, freq='D') 25 | 26 | drawdown_serie = pd.Series(drawdown_list, index=dt) 27 | 28 | @parameterized.expand([ 29 | (drawdown_serie,) 30 | ]) 31 | def test_get_max_drawdown_begins_first_day(self, px): 32 | rets = px.pct_change() 33 | drawdowns = timeseries.gen_drawdown_table(rets, top=1) 34 | self.assertEqual(drawdowns.loc[0, 'Net drawdown in %'], 25) 35 | 36 | drawdown_list = np.array( 37 | [100, 110, 120, 150, 180, 200, 100, 120, 38 | 160, 180, 200, 300, 400, 500, 600, 800, 39 | 900, 1000, 650, 600] 40 | ) / 10. 41 | dt = pd.date_range('2000-1-3', periods=20, freq='D') 42 | 43 | drawdown_serie = pd.Series(drawdown_list, index=dt) 44 | 45 | @parameterized.expand([ 46 | (drawdown_serie, 47 | pd.Timestamp('2000-01-08'), 48 | pd.Timestamp('2000-01-09'), 49 | pd.Timestamp('2000-01-13'), 50 | 50, 51 | pd.Timestamp('2000-01-20'), 52 | pd.Timestamp('2000-01-22'), 53 | None, 54 | 40 55 | ) 56 | ]) 57 | def test_gen_drawdown_table_relative( 58 | self, px, 59 | first_expected_peak, first_expected_valley, 60 | first_expected_recovery, first_net_drawdown, 61 | second_expected_peak, second_expected_valley, 62 | second_expected_recovery, second_net_drawdown 63 | ): 64 | 65 | rets = px.pct_change() 66 | 67 | drawdowns = timeseries.gen_drawdown_table(rets, top=2) 68 | 69 | self.assertEqual(np.round(drawdowns.loc[0, 'Net drawdown in %']), 70 | first_net_drawdown) 71 | self.assertEqual(drawdowns.loc[0, 'Peak date'], 72 | first_expected_peak) 73 | self.assertEqual(drawdowns.loc[0, 'Valley date'], 74 | first_expected_valley) 75 | self.assertEqual(drawdowns.loc[0, 'Recovery date'], 76 | first_expected_recovery) 77 | 78 | self.assertEqual(np.round(drawdowns.loc[1, 'Net drawdown in %']), 79 | second_net_drawdown) 80 | self.assertEqual(drawdowns.loc[1, 'Peak date'], 81 | second_expected_peak) 82 | self.assertEqual(drawdowns.loc[1, 'Valley date'], 83 | second_expected_valley) 84 | self.assertTrue(pd.isnull(drawdowns.loc[1, 'Recovery date'])) 85 | 86 | px_list_1 = np.array( 87 | [100, 120, 100, 80, 70, 110, 180, 150]) / 100. # Simple 88 | px_list_2 = np.array( 89 | [100, 120, 100, 80, 70, 80, 90, 90]) / 100. # Ends in drawdown 90 | dt = pd.date_range('2000-1-3', periods=8, freq='D') 91 | 92 | @parameterized.expand([ 93 | (pd.Series(px_list_1, 94 | index=dt), 95 | pd.Timestamp('2000-1-4'), 96 | pd.Timestamp('2000-1-7'), 97 | pd.Timestamp('2000-1-9')), 98 | (pd.Series(px_list_2, 99 | index=dt), 100 | pd.Timestamp('2000-1-4'), 101 | pd.Timestamp('2000-1-7'), 102 | None) 103 | ]) 104 | def test_get_max_drawdown( 105 | self, px, expected_peak, expected_valley, expected_recovery): 106 | rets = px.pct_change().iloc[1:] 107 | 108 | peak, valley, recovery = timeseries.get_max_drawdown(rets) 109 | # Need to use isnull because the result can be NaN, NaT, etc. 110 | self.assertTrue( 111 | pd.isnull(peak)) if expected_peak is None else self.assertEqual( 112 | peak, 113 | expected_peak) 114 | self.assertTrue( 115 | pd.isnull(valley)) if expected_valley is None else \ 116 | self.assertEqual( 117 | valley, 118 | expected_valley) 119 | self.assertTrue( 120 | pd.isnull(recovery)) if expected_recovery is None else \ 121 | self.assertEqual( 122 | recovery, 123 | expected_recovery) 124 | 125 | @parameterized.expand([ 126 | (pd.Series(px_list_2, 127 | index=dt), 128 | pd.Timestamp('2000-1-4'), 129 | pd.Timestamp('2000-1-7'), 130 | None, 131 | None), 132 | (pd.Series(px_list_1, 133 | index=dt), 134 | pd.Timestamp('2000-1-4'), 135 | pd.Timestamp('2000-1-7'), 136 | pd.Timestamp('2000-1-9'), 137 | 4) 138 | ]) 139 | def test_gen_drawdown_table(self, px, expected_peak, 140 | expected_valley, expected_recovery, 141 | expected_duration): 142 | rets = px.pct_change().iloc[1:] 143 | 144 | drawdowns = timeseries.gen_drawdown_table(rets, top=1) 145 | self.assertTrue( 146 | pd.isnull( 147 | drawdowns.loc[ 148 | 0, 149 | 'Peak date'])) if expected_peak is None \ 150 | else self.assertEqual(drawdowns.loc[0, 'Peak date'], 151 | expected_peak) 152 | self.assertTrue( 153 | pd.isnull( 154 | drawdowns.loc[0, 'Valley date'])) \ 155 | if expected_valley is None else self.assertEqual( 156 | drawdowns.loc[0, 'Valley date'], 157 | expected_valley) 158 | self.assertTrue( 159 | pd.isnull( 160 | drawdowns.loc[0, 'Recovery date'])) \ 161 | if expected_recovery is None else self.assertEqual( 162 | drawdowns.loc[0, 'Recovery date'], 163 | expected_recovery) 164 | self.assertTrue( 165 | pd.isnull(drawdowns.loc[0, 'Duration'])) \ 166 | if expected_duration is None else self.assertEqual( 167 | drawdowns.loc[0, 'Duration'], expected_duration) 168 | 169 | def test_drawdown_overlaps(self): 170 | rand = np.random.RandomState(1337) 171 | n_samples = 252 * 5 172 | spy_returns = pd.Series( 173 | rand.standard_t(3.1, n_samples), 174 | pd.date_range('2005-01-02', periods=n_samples), 175 | ) 176 | spy_drawdowns = timeseries.gen_drawdown_table( 177 | spy_returns, 178 | top=20).sort_values(by='Peak date') 179 | # Compare the recovery date of each drawdown with the peak of the next 180 | # Last pair might contain a NaT if drawdown didn't finish, so ignore it 181 | pairs = list(zip(spy_drawdowns['Recovery date'], 182 | spy_drawdowns['Peak date'].shift(-1)))[:-1] 183 | self.assertGreater(len(pairs), 0) 184 | for recovery, peak in pairs: 185 | if not pd.isnull(recovery): 186 | self.assertLessEqual(recovery, peak) 187 | 188 | @parameterized.expand([ 189 | (pd.Series(px_list_1, 190 | index=dt), 191 | 1, 192 | [(pd.Timestamp('2000-01-03 00:00:00'), 193 | pd.Timestamp('2000-01-03 00:00:00'), 194 | pd.Timestamp('2000-01-03 00:00:00'))]) 195 | ]) 196 | def test_top_drawdowns(self, returns, top, expected): 197 | self.assertEqual( 198 | timeseries.get_top_drawdowns( 199 | returns, 200 | top=top), 201 | expected) 202 | 203 | 204 | class TestVariance(TestCase): 205 | 206 | @parameterized.expand([ 207 | (1e7, 0.5, 1, 1, -10000000.0) 208 | ]) 209 | def test_var_cov_var_normal(self, P, c, mu, sigma, expected): 210 | self.assertEqual( 211 | timeseries.var_cov_var_normal( 212 | P, 213 | c, 214 | mu, 215 | sigma), 216 | expected) 217 | 218 | 219 | class TestNormalize(TestCase): 220 | dt = pd.date_range('2000-1-3', periods=8, freq='D') 221 | px_list = [1.0, 1.2, 1.0, 0.8, 0.7, 0.8, 0.8, 0.8] 222 | 223 | @parameterized.expand([ 224 | (pd.Series(np.array(px_list) * 100, index=dt), 225 | pd.Series(px_list, index=dt)) 226 | ]) 227 | def test_normalize(self, returns, expected): 228 | self.assertTrue(timeseries.normalize(returns).equals(expected)) 229 | 230 | 231 | class TestStats(TestCase): 232 | simple_rets = pd.Series( 233 | [0.1] * 3 + [0] * 497, 234 | pd.date_range( 235 | '2000-1-3', 236 | periods=500, 237 | freq='D')) 238 | 239 | simple_week_rets = pd.Series( 240 | [0.1] * 3 + [0] * 497, 241 | pd.date_range( 242 | '2000-1-31', 243 | periods=500, 244 | freq='W')) 245 | 246 | simple_month_rets = pd.Series( 247 | [0.1] * 3 + [0] * 497, 248 | pd.date_range( 249 | '2000-1-31', 250 | periods=500, 251 | freq='M')) 252 | 253 | simple_benchmark = pd.Series( 254 | [0.03] * 4 + [0] * 496, 255 | pd.date_range( 256 | '2000-1-1', 257 | periods=500, 258 | freq='D')) 259 | px_list = np.array( 260 | [10, -10, 10]) / 100. # Ends in drawdown 261 | dt = pd.date_range('2000-1-3', periods=3, freq='D') 262 | 263 | px_list_2 = [1.0, 1.2, 1.0, 0.8, 0.7, 0.8, 0.8, 0.8] 264 | dt_2 = pd.date_range('2000-1-3', periods=8, freq='D') 265 | 266 | @parameterized.expand([ 267 | (simple_rets[:5], 2, [0, np.inf, np.inf, 11.224972160321, 0]) 268 | ]) 269 | def test_sharpe_2(self, returns, rolling_sharpe_window, expected): 270 | np.testing.assert_array_almost_equal( 271 | timeseries.rolling_sharpe(returns, 272 | rolling_sharpe_window).fillna(0).values, 273 | np.asarray(expected)) 274 | 275 | @parameterized.expand([ 276 | (simple_rets[:5], simple_benchmark, 2, 0) 277 | ]) 278 | def test_beta(self, returns, benchmark_rets, rolling_window, expected): 279 | actual = timeseries.rolling_beta( 280 | returns, 281 | benchmark_rets, 282 | rolling_window=rolling_window, 283 | ).values.tolist()[2] 284 | 285 | np.testing.assert_almost_equal(actual, expected) 286 | 287 | 288 | class TestCone(TestCase): 289 | def test_bootstrap_cone_against_linear_cone_normal_returns(self): 290 | random_seed = 100 291 | np.random.seed(random_seed) 292 | days_forward = 200 293 | cone_stdevs = (1., 1.5, 2.) 294 | mu = .005 295 | sigma = .002 296 | rets = pd.Series(np.random.normal(mu, sigma, 10000)) 297 | 298 | midline = np.cumprod(1 + (rets.mean() * np.ones(days_forward))) 299 | stdev = rets.std() * midline * np.sqrt(np.arange(days_forward)+1) 300 | 301 | normal_cone = pd.DataFrame(columns=pd.Index([], dtype='float64')) 302 | for s in cone_stdevs: 303 | normal_cone[s] = midline + s * stdev 304 | normal_cone[-s] = midline - s * stdev 305 | 306 | bootstrap_cone = timeseries.forecast_cone_bootstrap( 307 | rets, days_forward, cone_stdevs, starting_value=1, 308 | random_seed=random_seed, num_samples=10000) 309 | 310 | for col, vals in bootstrap_cone.items(): 311 | expected = normal_cone[col].values 312 | assert_allclose(vals.values, expected, rtol=.005) 313 | 314 | 315 | class TestBootstrap(TestCase): 316 | @parameterized.expand([ 317 | (0., 1., 1000), 318 | (1., 2., 500), 319 | (-1., 0.1, 10), 320 | ]) 321 | def test_calc_bootstrap(self, true_mean, true_sd, n): 322 | """Compare bootstrap distribution of the mean to sampling distribution 323 | of the mean. 324 | 325 | """ 326 | np.random.seed(123) 327 | func = np.mean 328 | returns = pd.Series((np.random.randn(n) * true_sd) + 329 | true_mean) 330 | 331 | samples = timeseries.calc_bootstrap(func, returns, 332 | n_samples=10000) 333 | 334 | # Calculate statistics of sampling distribution of the mean 335 | mean_of_mean = np.mean(returns) 336 | sd_of_mean = np.std(returns) / np.sqrt(n) 337 | 338 | assert_almost_equal( 339 | np.mean(samples), 340 | mean_of_mean, 341 | 3, 342 | 'Mean of bootstrap does not match theoretical mean of' 343 | 'sampling distribution') 344 | 345 | assert_almost_equal( 346 | np.std(samples), 347 | sd_of_mean, 348 | 3, 349 | 'SD of bootstrap does not match theoretical SD of' 350 | 'sampling distribution') 351 | 352 | 353 | class TestGrossLev(TestCase): 354 | __location__ = os.path.realpath( 355 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 356 | 357 | test_pos = to_utc(pd.read_csv( 358 | gzip.open(__location__ + '/test_data/test_pos.csv.gz'), 359 | index_col=0, parse_dates=True)) 360 | test_gross_lev = pd.read_csv( 361 | gzip.open( 362 | __location__ + '/test_data/test_gross_lev.csv.gz'), 363 | index_col=0, parse_dates=True) 364 | test_gross_lev = to_series(to_utc(test_gross_lev)) 365 | 366 | def test_gross_lev_calculation(self): 367 | assert_series_equal( 368 | timeseries.gross_lev(self.test_pos)['2004-02-01':], 369 | self.test_gross_lev['2004-02-01':], check_names=False) 370 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/positions.csv: -------------------------------------------------------------------------------- 1 | ,19001,19002,cash 2 | 2016-01-04,1.0,1.0,0.0 3 | 2016-01-05,1.0,1.0,0.0 4 | 2016-01-06,1.0,1.0,0.0 5 | 2016-01-07,1.0,1.0,0.0 6 | 2016-01-08,1.0,1.0,0.0 7 | 2016-01-11,1.0,1.0,0.0 8 | 2016-01-12,1.0,1.0,0.0 9 | 2016-01-13,1.0,1.0,0.0 10 | 2016-01-14,1.0,1.0,0.0 11 | 2016-01-15,1.0,1.0,0.0 12 | 2016-01-18,1.0,1.0,0.0 13 | 2016-01-19,1.0,1.0,0.0 14 | 2016-01-20,1.0,1.0,0.0 15 | 2016-01-21,1.0,1.0,0.0 16 | 2016-01-22,1.0,1.0,0.0 17 | 2016-01-25,1.0,1.0,0.0 18 | 2016-01-26,1.0,1.0,0.0 19 | 2016-01-27,1.0,1.0,0.0 20 | 2016-01-28,1.0,1.0,0.0 21 | 2016-01-29,1.0,1.0,0.0 22 | 2016-02-01,1.0,1.0,0.0 23 | 2016-02-02,1.0,1.0,0.0 24 | 2016-02-03,1.0,1.0,0.0 25 | 2016-02-04,1.0,1.0,0.0 26 | 2016-02-05,1.0,1.0,0.0 27 | 2016-02-08,1.0,1.0,0.0 28 | 2016-02-09,1.0,1.0,0.0 29 | 2016-02-10,1.0,1.0,0.0 30 | 2016-02-11,1.0,1.0,0.0 31 | 2016-02-12,1.0,1.0,0.0 32 | 2016-02-15,1.0,1.0,0.0 33 | 2016-02-16,1.0,1.0,0.0 34 | 2016-02-17,1.0,1.0,0.0 35 | 2016-02-18,1.0,1.0,0.0 36 | 2016-02-19,1.0,1.0,0.0 37 | 2016-02-22,1.0,1.0,0.0 38 | 2016-02-23,1.0,1.0,0.0 39 | 2016-02-24,1.0,1.0,0.0 40 | 2016-02-25,1.0,1.0,0.0 41 | 2016-02-26,1.0,1.0,0.0 42 | 2016-02-29,1.0,1.0,0.0 43 | 2016-03-01,1.0,1.0,0.0 44 | 2016-03-02,1.0,1.0,0.0 45 | 2016-03-03,1.0,1.0,0.0 46 | 2016-03-04,1.0,1.0,0.0 47 | 2016-03-07,1.0,1.0,0.0 48 | 2016-03-08,1.0,1.0,0.0 49 | 2016-03-09,1.0,1.0,0.0 50 | 2016-03-10,1.0,1.0,0.0 51 | 2016-03-11,1.0,1.0,0.0 52 | 2016-03-14,1.0,1.0,0.0 53 | 2016-03-15,1.0,1.0,0.0 54 | 2016-03-16,1.0,1.0,0.0 55 | 2016-03-17,1.0,1.0,0.0 56 | 2016-03-18,1.0,1.0,0.0 57 | 2016-03-21,1.0,1.0,0.0 58 | 2016-03-22,1.0,1.0,0.0 59 | 2016-03-23,1.0,1.0,0.0 60 | 2016-03-24,1.0,1.0,0.0 61 | 2016-03-25,1.0,1.0,0.0 62 | 2016-03-28,1.0,1.0,0.0 63 | 2016-03-29,1.0,1.0,0.0 64 | 2016-03-30,1.0,1.0,0.0 65 | 2016-03-31,1.0,1.0,0.0 66 | 2016-04-01,1.0,1.0,0.0 67 | 2016-04-04,1.0,1.0,0.0 68 | 2016-04-05,1.0,1.0,0.0 69 | 2016-04-06,1.0,1.0,0.0 70 | 2016-04-07,1.0,1.0,0.0 71 | 2016-04-08,1.0,1.0,0.0 72 | 2016-04-11,1.0,1.0,0.0 73 | 2016-04-12,1.0,1.0,0.0 74 | 2016-04-13,1.0,1.0,0.0 75 | 2016-04-14,1.0,1.0,0.0 76 | 2016-04-15,1.0,1.0,0.0 77 | 2016-04-18,1.0,1.0,0.0 78 | 2016-04-19,1.0,1.0,0.0 79 | 2016-04-20,1.0,1.0,0.0 80 | 2016-04-21,1.0,1.0,0.0 81 | 2016-04-22,1.0,1.0,0.0 82 | 2016-04-25,1.0,1.0,0.0 83 | 2016-04-26,1.0,1.0,0.0 84 | 2016-04-27,1.0,1.0,0.0 85 | 2016-04-28,1.0,1.0,0.0 86 | 2016-04-29,1.0,1.0,0.0 87 | 2016-05-02,1.0,1.0,0.0 88 | 2016-05-03,1.0,1.0,0.0 89 | 2016-05-04,1.0,1.0,0.0 90 | 2016-05-05,1.0,1.0,0.0 91 | 2016-05-06,1.0,1.0,0.0 92 | 2016-05-09,1.0,1.0,0.0 93 | 2016-05-10,1.0,1.0,0.0 94 | 2016-05-11,1.0,1.0,0.0 95 | 2016-05-12,1.0,1.0,0.0 96 | 2016-05-13,1.0,1.0,0.0 97 | 2016-05-16,1.0,1.0,0.0 98 | 2016-05-17,1.0,1.0,0.0 99 | 2016-05-18,1.0,1.0,0.0 100 | 2016-05-19,1.0,1.0,0.0 101 | 2016-05-20,1.0,1.0,0.0 102 | 2016-05-23,1.0,1.0,0.0 103 | 2016-05-24,1.0,1.0,0.0 104 | 2016-05-25,1.0,1.0,0.0 105 | 2016-05-26,1.0,1.0,0.0 106 | 2016-05-27,1.0,1.0,0.0 107 | 2016-05-30,1.0,1.0,0.0 108 | 2016-05-31,1.0,1.0,0.0 109 | 2016-06-01,1.0,1.0,0.0 110 | 2016-06-02,1.0,1.0,0.0 111 | 2016-06-03,1.0,1.0,0.0 112 | 2016-06-06,1.0,1.0,0.0 113 | 2016-06-07,1.0,1.0,0.0 114 | 2016-06-08,1.0,1.0,0.0 115 | 2016-06-09,1.0,1.0,0.0 116 | 2016-06-10,1.0,1.0,0.0 117 | 2016-06-13,1.0,1.0,0.0 118 | 2016-06-14,1.0,1.0,0.0 119 | 2016-06-15,1.0,1.0,0.0 120 | 2016-06-16,1.0,1.0,0.0 121 | 2016-06-17,1.0,1.0,0.0 122 | 2016-06-20,1.0,1.0,0.0 123 | 2016-06-21,1.0,1.0,0.0 124 | 2016-06-22,1.0,1.0,0.0 125 | 2016-06-23,1.0,1.0,0.0 126 | 2016-06-24,1.0,1.0,0.0 127 | 2016-06-27,1.0,1.0,0.0 128 | 2016-06-28,1.0,1.0,0.0 129 | 2016-06-29,1.0,1.0,0.0 130 | 2016-06-30,1.0,1.0,0.0 131 | 2016-07-01,1.0,1.0,0.0 132 | 2016-07-04,1.0,1.0,0.0 133 | 2016-07-05,1.0,1.0,0.0 134 | 2016-07-06,1.0,1.0,0.0 135 | 2016-07-07,1.0,1.0,0.0 136 | 2016-07-08,1.0,1.0,0.0 137 | 2016-07-11,1.0,1.0,0.0 138 | 2016-07-12,1.0,1.0,0.0 139 | 2016-07-13,1.0,1.0,0.0 140 | 2016-07-14,1.0,1.0,0.0 141 | 2016-07-15,1.0,1.0,0.0 142 | 2016-07-18,1.0,1.0,0.0 143 | 2016-07-19,1.0,1.0,0.0 144 | 2016-07-20,1.0,1.0,0.0 145 | 2016-07-21,1.0,1.0,0.0 146 | 2016-07-22,1.0,1.0,0.0 147 | 2016-07-25,1.0,1.0,0.0 148 | 2016-07-26,1.0,1.0,0.0 149 | 2016-07-27,1.0,1.0,0.0 150 | 2016-07-28,1.0,1.0,0.0 151 | 2016-07-29,1.0,1.0,0.0 152 | 2016-08-01,1.0,1.0,0.0 153 | 2016-08-02,1.0,1.0,0.0 154 | 2016-08-03,1.0,1.0,0.0 155 | 2016-08-04,1.0,1.0,0.0 156 | 2016-08-05,1.0,1.0,0.0 157 | 2016-08-08,1.0,1.0,0.0 158 | 2016-08-09,1.0,1.0,0.0 159 | 2016-08-10,1.0,1.0,0.0 160 | 2016-08-11,1.0,1.0,0.0 161 | 2016-08-12,1.0,1.0,0.0 162 | 2016-08-15,1.0,1.0,0.0 163 | 2016-08-16,1.0,1.0,0.0 164 | 2016-08-17,1.0,1.0,0.0 165 | 2016-08-18,1.0,1.0,0.0 166 | 2016-08-19,1.0,1.0,0.0 167 | 2016-08-22,1.0,1.0,0.0 168 | 2016-08-23,1.0,1.0,0.0 169 | 2016-08-24,1.0,1.0,0.0 170 | 2016-08-25,1.0,1.0,0.0 171 | 2016-08-26,1.0,1.0,0.0 172 | 2016-08-29,1.0,1.0,0.0 173 | 2016-08-30,1.0,1.0,0.0 174 | 2016-08-31,1.0,1.0,0.0 175 | 2016-09-01,1.0,1.0,0.0 176 | 2016-09-02,1.0,1.0,0.0 177 | 2016-09-05,1.0,1.0,0.0 178 | 2016-09-06,1.0,1.0,0.0 179 | 2016-09-07,1.0,1.0,0.0 180 | 2016-09-08,1.0,1.0,0.0 181 | 2016-09-09,1.0,1.0,0.0 182 | 2016-09-12,1.0,1.0,0.0 183 | 2016-09-13,1.0,1.0,0.0 184 | 2016-09-14,1.0,1.0,0.0 185 | 2016-09-15,1.0,1.0,0.0 186 | 2016-09-16,1.0,1.0,0.0 187 | 2016-09-19,1.0,1.0,0.0 188 | 2016-09-20,1.0,1.0,0.0 189 | 2016-09-21,1.0,1.0,0.0 190 | 2016-09-22,1.0,1.0,0.0 191 | 2016-09-23,1.0,1.0,0.0 192 | 2016-09-26,1.0,1.0,0.0 193 | 2016-09-27,1.0,1.0,0.0 194 | 2016-09-28,1.0,1.0,0.0 195 | 2016-09-29,1.0,1.0,0.0 196 | 2016-09-30,1.0,1.0,0.0 197 | 2016-10-03,1.0,1.0,0.0 198 | 2016-10-04,1.0,1.0,0.0 199 | 2016-10-05,1.0,1.0,0.0 200 | 2016-10-06,1.0,1.0,0.0 201 | 2016-10-07,1.0,1.0,0.0 202 | 2016-10-10,1.0,1.0,0.0 203 | 2016-10-11,1.0,1.0,0.0 204 | 2016-10-12,1.0,1.0,0.0 205 | 2016-10-13,1.0,1.0,0.0 206 | 2016-10-14,1.0,1.0,0.0 207 | 2016-10-17,1.0,1.0,0.0 208 | 2016-10-18,1.0,1.0,0.0 209 | 2016-10-19,1.0,1.0,0.0 210 | 2016-10-20,1.0,1.0,0.0 211 | 2016-10-21,1.0,1.0,0.0 212 | 2016-10-24,1.0,1.0,0.0 213 | 2016-10-25,1.0,1.0,0.0 214 | 2016-10-26,1.0,1.0,0.0 215 | 2016-10-27,1.0,1.0,0.0 216 | 2016-10-28,1.0,1.0,0.0 217 | 2016-10-31,1.0,1.0,0.0 218 | 2016-11-01,1.0,1.0,0.0 219 | 2016-11-02,1.0,1.0,0.0 220 | 2016-11-03,1.0,1.0,0.0 221 | 2016-11-04,1.0,1.0,0.0 222 | 2016-11-07,1.0,1.0,0.0 223 | 2016-11-08,1.0,1.0,0.0 224 | 2016-11-09,1.0,1.0,0.0 225 | 2016-11-10,1.0,1.0,0.0 226 | 2016-11-11,1.0,1.0,0.0 227 | 2016-11-14,1.0,1.0,0.0 228 | 2016-11-15,1.0,1.0,0.0 229 | 2016-11-16,1.0,1.0,0.0 230 | 2016-11-17,1.0,1.0,0.0 231 | 2016-11-18,1.0,1.0,0.0 232 | 2016-11-21,1.0,1.0,0.0 233 | 2016-11-22,1.0,1.0,0.0 234 | 2016-11-23,1.0,1.0,0.0 235 | 2016-11-24,1.0,1.0,0.0 236 | 2016-11-25,1.0,1.0,0.0 237 | 2016-11-28,1.0,1.0,0.0 238 | 2016-11-29,1.0,1.0,0.0 239 | 2016-11-30,1.0,1.0,0.0 240 | 2016-12-01,1.0,1.0,0.0 241 | 2016-12-02,1.0,1.0,0.0 242 | 2016-12-05,1.0,1.0,0.0 243 | 2016-12-06,1.0,1.0,0.0 244 | 2016-12-07,1.0,1.0,0.0 245 | 2016-12-08,1.0,1.0,0.0 246 | 2016-12-09,1.0,1.0,0.0 247 | 2016-12-12,1.0,1.0,0.0 248 | 2016-12-13,1.0,1.0,0.0 249 | 2016-12-14,1.0,1.0,0.0 250 | 2016-12-15,1.0,1.0,0.0 251 | 2016-12-16,1.0,1.0,0.0 252 | 2016-12-19,1.0,1.0,0.0 253 | 2016-12-20,1.0,1.0,0.0 254 | 2016-12-21,1.0,1.0,0.0 255 | 2016-12-22,1.0,1.0,0.0 256 | 2016-12-23,1.0,1.0,0.0 257 | 2016-12-26,1.0,1.0,0.0 258 | 2016-12-27,1.0,1.0,0.0 259 | 2016-12-28,1.0,1.0,0.0 260 | 2016-12-29,1.0,1.0,0.0 261 | 2016-12-30,1.0,1.0,0.0 262 | 2017-01-02,1.0,1.0,0.0 263 | 2017-01-03,1.0,1.0,0.0 264 | 2017-01-04,1.0,1.0,0.0 265 | 2017-01-05,1.0,1.0,0.0 266 | 2017-01-06,1.0,1.0,0.0 267 | 2017-01-09,1.0,1.0,0.0 268 | 2017-01-10,1.0,1.0,0.0 269 | 2017-01-11,1.0,1.0,0.0 270 | 2017-01-12,1.0,1.0,0.0 271 | 2017-01-13,1.0,1.0,0.0 272 | 2017-01-16,1.0,1.0,0.0 273 | 2017-01-17,1.0,1.0,0.0 274 | 2017-01-18,1.0,1.0,0.0 275 | 2017-01-19,1.0,1.0,0.0 276 | 2017-01-20,1.0,1.0,0.0 277 | 2017-01-23,1.0,1.0,0.0 278 | 2017-01-24,1.0,1.0,0.0 279 | 2017-01-25,1.0,1.0,0.0 280 | 2017-01-26,1.0,1.0,0.0 281 | 2017-01-27,1.0,1.0,0.0 282 | 2017-01-30,1.0,1.0,0.0 283 | 2017-01-31,1.0,1.0,0.0 284 | 2017-02-01,1.0,1.0,0.0 285 | 2017-02-02,1.0,1.0,0.0 286 | 2017-02-03,1.0,1.0,0.0 287 | 2017-02-06,1.0,1.0,0.0 288 | 2017-02-07,1.0,1.0,0.0 289 | 2017-02-08,1.0,1.0,0.0 290 | 2017-02-09,1.0,1.0,0.0 291 | 2017-02-10,1.0,1.0,0.0 292 | 2017-02-13,1.0,1.0,0.0 293 | 2017-02-14,1.0,1.0,0.0 294 | 2017-02-15,1.0,1.0,0.0 295 | 2017-02-16,1.0,1.0,0.0 296 | 2017-02-17,1.0,1.0,0.0 297 | 2017-02-20,1.0,1.0,0.0 298 | 2017-02-21,1.0,1.0,0.0 299 | 2017-02-22,1.0,1.0,0.0 300 | 2017-02-23,1.0,1.0,0.0 301 | 2017-02-24,1.0,1.0,0.0 302 | 2017-02-27,1.0,1.0,0.0 303 | 2017-02-28,1.0,1.0,0.0 304 | 2017-03-01,1.0,1.0,0.0 305 | 2017-03-02,1.0,1.0,0.0 306 | 2017-03-03,1.0,1.0,0.0 307 | 2017-03-06,1.0,1.0,0.0 308 | 2017-03-07,1.0,1.0,0.0 309 | 2017-03-08,1.0,1.0,0.0 310 | 2017-03-09,1.0,1.0,0.0 311 | 2017-03-10,1.0,1.0,0.0 312 | 2017-03-13,1.0,1.0,0.0 313 | 2017-03-14,1.0,1.0,0.0 314 | 2017-03-15,1.0,1.0,0.0 315 | 2017-03-16,1.0,1.0,0.0 316 | 2017-03-17,1.0,1.0,0.0 317 | 2017-03-20,1.0,1.0,0.0 318 | 2017-03-21,1.0,1.0,0.0 319 | 2017-03-22,1.0,1.0,0.0 320 | 2017-03-23,1.0,1.0,0.0 321 | 2017-03-24,1.0,1.0,0.0 322 | 2017-03-27,1.0,1.0,0.0 323 | 2017-03-28,1.0,1.0,0.0 324 | 2017-03-29,1.0,1.0,0.0 325 | 2017-03-30,1.0,1.0,0.0 326 | 2017-03-31,1.0,1.0,0.0 327 | 2017-04-03,1.0,1.0,0.0 328 | 2017-04-04,1.0,1.0,0.0 329 | 2017-04-05,1.0,1.0,0.0 330 | 2017-04-06,1.0,1.0,0.0 331 | 2017-04-07,1.0,1.0,0.0 332 | 2017-04-10,1.0,1.0,0.0 333 | 2017-04-11,1.0,1.0,0.0 334 | 2017-04-12,1.0,1.0,0.0 335 | 2017-04-13,1.0,1.0,0.0 336 | 2017-04-14,1.0,1.0,0.0 337 | 2017-04-17,1.0,1.0,0.0 338 | 2017-04-18,1.0,1.0,0.0 339 | 2017-04-19,1.0,1.0,0.0 340 | 2017-04-20,1.0,1.0,0.0 341 | 2017-04-21,1.0,1.0,0.0 342 | 2017-04-24,1.0,1.0,0.0 343 | 2017-04-25,1.0,1.0,0.0 344 | 2017-04-26,1.0,1.0,0.0 345 | 2017-04-27,1.0,1.0,0.0 346 | 2017-04-28,1.0,1.0,0.0 347 | 2017-05-01,1.0,1.0,0.0 348 | 2017-05-02,1.0,1.0,0.0 349 | 2017-05-03,1.0,1.0,0.0 350 | 2017-05-04,1.0,1.0,0.0 351 | 2017-05-05,1.0,1.0,0.0 352 | 2017-05-08,1.0,1.0,0.0 353 | 2017-05-09,1.0,1.0,0.0 354 | 2017-05-10,1.0,1.0,0.0 355 | 2017-05-11,1.0,1.0,0.0 356 | 2017-05-12,1.0,1.0,0.0 357 | 2017-05-15,1.0,1.0,0.0 358 | 2017-05-16,1.0,1.0,0.0 359 | 2017-05-17,1.0,1.0,0.0 360 | 2017-05-18,1.0,1.0,0.0 361 | 2017-05-19,1.0,1.0,0.0 362 | 2017-05-22,1.0,1.0,0.0 363 | 2017-05-23,1.0,1.0,0.0 364 | 2017-05-24,1.0,1.0,0.0 365 | 2017-05-25,1.0,1.0,0.0 366 | 2017-05-26,1.0,1.0,0.0 367 | 2017-05-29,1.0,1.0,0.0 368 | 2017-05-30,1.0,1.0,0.0 369 | 2017-05-31,1.0,1.0,0.0 370 | 2017-06-01,1.0,1.0,0.0 371 | 2017-06-02,1.0,1.0,0.0 372 | 2017-06-05,1.0,1.0,0.0 373 | 2017-06-06,1.0,1.0,0.0 374 | 2017-06-07,1.0,1.0,0.0 375 | 2017-06-08,1.0,1.0,0.0 376 | 2017-06-09,1.0,1.0,0.0 377 | 2017-06-12,1.0,1.0,0.0 378 | 2017-06-13,1.0,1.0,0.0 379 | 2017-06-14,1.0,1.0,0.0 380 | 2017-06-15,1.0,1.0,0.0 381 | 2017-06-16,1.0,1.0,0.0 382 | 2017-06-19,1.0,1.0,0.0 383 | 2017-06-20,1.0,1.0,0.0 384 | 2017-06-21,1.0,1.0,0.0 385 | 2017-06-22,1.0,1.0,0.0 386 | 2017-06-23,1.0,1.0,0.0 387 | 2017-06-26,1.0,1.0,0.0 388 | 2017-06-27,1.0,1.0,0.0 389 | 2017-06-28,1.0,1.0,0.0 390 | 2017-06-29,1.0,1.0,0.0 391 | 2017-06-30,1.0,1.0,0.0 392 | 2017-07-03,1.0,1.0,0.0 393 | 2017-07-04,1.0,1.0,0.0 394 | 2017-07-05,1.0,1.0,0.0 395 | 2017-07-06,1.0,1.0,0.0 396 | 2017-07-07,1.0,1.0,0.0 397 | 2017-07-10,1.0,1.0,0.0 398 | 2017-07-11,1.0,1.0,0.0 399 | 2017-07-12,1.0,1.0,0.0 400 | 2017-07-13,1.0,1.0,0.0 401 | 2017-07-14,1.0,1.0,0.0 402 | 2017-07-17,1.0,1.0,0.0 403 | 2017-07-18,1.0,1.0,0.0 404 | 2017-07-19,1.0,1.0,0.0 405 | 2017-07-20,1.0,1.0,0.0 406 | 2017-07-21,1.0,1.0,0.0 407 | 2017-07-24,1.0,1.0,0.0 408 | 2017-07-25,1.0,1.0,0.0 409 | 2017-07-26,1.0,1.0,0.0 410 | 2017-07-27,1.0,1.0,0.0 411 | 2017-07-28,1.0,1.0,0.0 412 | 2017-07-31,1.0,1.0,0.0 413 | 2017-08-01,1.0,1.0,0.0 414 | 2017-08-02,1.0,1.0,0.0 415 | 2017-08-03,1.0,1.0,0.0 416 | 2017-08-04,1.0,1.0,0.0 417 | 2017-08-07,1.0,1.0,0.0 418 | 2017-08-08,1.0,1.0,0.0 419 | 2017-08-09,1.0,1.0,0.0 420 | 2017-08-10,1.0,1.0,0.0 421 | 2017-08-11,1.0,1.0,0.0 422 | 2017-08-14,1.0,1.0,0.0 423 | 2017-08-15,1.0,1.0,0.0 424 | 2017-08-16,1.0,1.0,0.0 425 | 2017-08-17,1.0,1.0,0.0 426 | 2017-08-18,1.0,1.0,0.0 427 | 2017-08-21,1.0,1.0,0.0 428 | 2017-08-22,1.0,1.0,0.0 429 | 2017-08-23,1.0,1.0,0.0 430 | 2017-08-24,1.0,1.0,0.0 431 | 2017-08-25,1.0,1.0,0.0 432 | 2017-08-28,1.0,1.0,0.0 433 | 2017-08-29,1.0,1.0,0.0 434 | 2017-08-30,1.0,1.0,0.0 435 | 2017-08-31,1.0,1.0,0.0 436 | 2017-09-01,1.0,1.0,0.0 437 | 2017-09-04,1.0,1.0,0.0 438 | 2017-09-05,1.0,1.0,0.0 439 | 2017-09-06,1.0,1.0,0.0 440 | 2017-09-07,1.0,1.0,0.0 441 | 2017-09-08,1.0,1.0,0.0 442 | 2017-09-11,1.0,1.0,0.0 443 | 2017-09-12,1.0,1.0,0.0 444 | 2017-09-13,1.0,1.0,0.0 445 | 2017-09-14,1.0,1.0,0.0 446 | 2017-09-15,1.0,1.0,0.0 447 | 2017-09-18,1.0,1.0,0.0 448 | 2017-09-19,1.0,1.0,0.0 449 | 2017-09-20,1.0,1.0,0.0 450 | 2017-09-21,1.0,1.0,0.0 451 | 2017-09-22,1.0,1.0,0.0 452 | 2017-09-25,1.0,1.0,0.0 453 | 2017-09-26,1.0,1.0,0.0 454 | 2017-09-27,1.0,1.0,0.0 455 | 2017-09-28,1.0,1.0,0.0 456 | 2017-09-29,1.0,1.0,0.0 457 | 2017-10-02,1.0,1.0,0.0 458 | 2017-10-03,1.0,1.0,0.0 459 | 2017-10-04,1.0,1.0,0.0 460 | 2017-10-05,1.0,1.0,0.0 461 | 2017-10-06,1.0,1.0,0.0 462 | 2017-10-09,1.0,1.0,0.0 463 | 2017-10-10,1.0,1.0,0.0 464 | 2017-10-11,1.0,1.0,0.0 465 | 2017-10-12,1.0,1.0,0.0 466 | 2017-10-13,1.0,1.0,0.0 467 | 2017-10-16,1.0,1.0,0.0 468 | 2017-10-17,1.0,1.0,0.0 469 | 2017-10-18,1.0,1.0,0.0 470 | 2017-10-19,1.0,1.0,0.0 471 | 2017-10-20,1.0,1.0,0.0 472 | 2017-10-23,1.0,1.0,0.0 473 | 2017-10-24,1.0,1.0,0.0 474 | 2017-10-25,1.0,1.0,0.0 475 | 2017-10-26,1.0,1.0,0.0 476 | 2017-10-27,1.0,1.0,0.0 477 | 2017-10-30,1.0,1.0,0.0 478 | 2017-10-31,1.0,1.0,0.0 479 | 2017-11-01,1.0,1.0,0.0 480 | 2017-11-02,1.0,1.0,0.0 481 | 2017-11-03,1.0,1.0,0.0 482 | 2017-11-06,1.0,1.0,0.0 483 | 2017-11-07,1.0,1.0,0.0 484 | 2017-11-08,1.0,1.0,0.0 485 | 2017-11-09,1.0,1.0,0.0 486 | 2017-11-10,1.0,1.0,0.0 487 | 2017-11-13,1.0,1.0,0.0 488 | 2017-11-14,1.0,1.0,0.0 489 | 2017-11-15,1.0,1.0,0.0 490 | 2017-11-16,1.0,1.0,0.0 491 | 2017-11-17,1.0,1.0,0.0 492 | 2017-11-20,1.0,1.0,0.0 493 | 2017-11-21,1.0,1.0,0.0 494 | 2017-11-22,1.0,1.0,0.0 495 | 2017-11-23,1.0,1.0,0.0 496 | 2017-11-24,1.0,1.0,0.0 497 | 2017-11-27,1.0,1.0,0.0 498 | 2017-11-28,1.0,1.0,0.0 499 | 2017-11-29,1.0,1.0,0.0 500 | 2017-11-30,1.0,1.0,0.0 501 | 2017-12-01,1.0,1.0,0.0 502 | 2017-12-04,1.0,1.0,0.0 503 | 2017-12-05,1.0,1.0,0.0 504 | 2017-12-06,1.0,1.0,0.0 505 | 2017-12-07,1.0,1.0,0.0 506 | -------------------------------------------------------------------------------- /WHATSNEW.md: -------------------------------------------------------------------------------- 1 | # What's New 2 | 3 | These are new features and improvements of note in each release. 4 | 5 | ## v0.9.0 (Aug 1st, 2018) 6 | 7 | ### New features 8 | 9 | - Previously, `pyfolio` has required a benchmark, usually the U.S. market 10 | returns `SPY`. In order to provide support for international equities and 11 | alternative data sets, `pyfolio` is now completely independent of benchmarks. 12 | If a benchmark is passed, all benchmark-related analyses will be performed; 13 | if not, they will simply be skipped. By [George Ho](https://github.com/eigenfoo) 14 | - Performance attribution tearsheet [PR441](https://github.com/quantopian/pyfolio/pull/441), [PR433](https://github.com/quantopian/pyfolio/pull/433), [PR442](https://github.com/quantopian/pyfolio/pull/442). By [Vikram Narayan](https://github.com/vikram-narayan). 15 | - Improved implementation of `get_turnover` [PR332](https://github.com/quantopian/pyfolio/pull/432). By [Gus Gordon](https://github.com/gusgordon). 16 | - Users can now pass in extra rows (as a dict or OrderedDict) to display in the perf_stats table [PR445](https://github.com/quantopian/pyfolio/pull/445). By [Gus Gordon](https://github.com/gusgordon). 17 | 18 | ### Maintenance 19 | 20 | - Many features have been more extensively troubleshooted, maintained and 21 | tested. By [Ana Ruelas](https://github.com/ahgnaw) and [Vikram 22 | Narayan](https://github.com/vikram-narayan). 23 | - Various fixes to support pandas versions >= 0.18.1 [PR443](https://github.com/quantopian/pyfolio/pull/443). By [Andrew Daniels](https://github.com/yankees714). 24 | 25 | ## v0.8.0 (Aug 23rd, 2017) 26 | 27 | This is a major release from `0.7.0`, and all users are recommended to upgrade. 28 | 29 | ### New features 30 | 31 | - Risk tear sheet: added a new tear sheet to analyze risk exposures to common 32 | factors (e.g. mean reversion and momentum), sector (e.g. Morningstar 33 | sectors), market cap and illiquid stocks. By [George 34 | Ho](https://github.com/eigenfoo). 35 | - Simple tear sheet: added a new tear sheet that presents only the most 36 | important plots in the full tear sheet, for a quick general overview of a 37 | portfolio's performance. By [George Ho](https://github.com/eigenfoo). 38 | - Performance attribution: added new table to do performance attribution 39 | analysis, such as the amount of returns attributable to common factors, and 40 | summary statistics such as the multi-factor alpha and multi-factor Sharpe 41 | ratio. By [Vikram Narayan](https://github.com/vikram-narayan). 42 | - Volatility plot: added a rolling annual volatility plot to the returns tear 43 | sheet. By [hkopp](https://github.com/hkopp). 44 | 45 | ### Bugfixes 46 | 47 | - Yahoo and pandas data-reader: fixed bug regarding Yahoo backend for market 48 | data and pandas data-reader. By [Thomas Wiecki](https://github.com/twiecki) 49 | and [Gus Gordon](https://github.com/gusgordon). 50 | - `empyrical` compatibility: removed `information_ratio` to remain compatible 51 | with `empyrical`. By [Thomas Wiecki](https://github.com/twiecki). 52 | - Fama-French rolling multivariate regression: fixed bug where the rolling 53 | Fama-French plot performed separate linear regressions instead of a 54 | multivariate regression. By [George Ho](https://github.com/eigenfoo). 55 | - Other minor bugfixes. By [Scott Sanderson](https://github.com/ssanderson), 56 | [Jonathan Ng](https://github.com/jonathanng), 57 | [SylvainDe](https://github.com/SylvainDe) and 58 | [mckelvin](https://github.com/mckelvin). 59 | 60 | ### Maintenance 61 | 62 | - Documentation: updated and improved `pyfolio` documentation and example 63 | Jupyter notebooks. By [George Ho](https://github.com/eigenfoo). 64 | - Data loader migration: all data loaders have been migrated from `pyfolio` to 65 | `empyrical`. By [James Christopher](https://github.com/jameschristopher). 66 | - Improved plotting style: fixed issues with formatting and presentation of 67 | plots. By [George Ho](https://github.com/eigenfoo). 68 | 69 | ## v0.7.0 (Jan 28th, 2017) 70 | 71 | This is a major release from `0.6.0`, and all users are recommended to upgrade. 72 | 73 | ### New features 74 | 75 | - Adds a transaction timing plot, which gives insight into the strategies' 76 | trade times. 77 | - Adds a plot showing the number of longs and shorts held over time. 78 | - New round trips plot selects a sample of held positions (16 by default) and 79 | shows their round trips. This replaces the old round trip plot, which became 80 | unreadable for strategies that traded many positions. 81 | - Adds basic capability for analyzing intraday strategies. If a strategy makes 82 | a large amount of transactions relative to its end-of-day positions, then 83 | pyfolio will attempt to reconstruct the intraday positions, take the point of 84 | peak exposure to the market during each day, and plot that data with the 85 | positions tear sheet. By default pyfolio will automatically detect this, but 86 | the behavior can be changed by passing either `estimate_intraday=True` or 87 | `estimate_intraday=False` to the tear sheet functions ([see 88 | here](https://github.com/quantopian/pyfolio/blob/master/pyfolio/tears.py#L131)). 89 | - Now formats [zipline](https://github.com/quantopian/zipline) assets, 90 | displaying their ticker symbol. 91 | - Gross leverage is no longer required to be passed, and will now be calculated 92 | from the passed positions DataFrame. 93 | 94 | ### Bugfixes 95 | 96 | - Cone plotting location is now correct. 97 | - Adjust scaling of beta and Fama-French plots. 98 | - Removed multiple dependencies, some of which were previously unused. 99 | - Various text fixes. 100 | 101 | ## v0.6.0 (Oct 17, 2016) 102 | 103 | This is a major new release from `0.5.1`. All users are recommended to upgrade. 104 | 105 | ### New features 106 | 107 | * Computation of performance and risk measures has been split off into 108 | [`empyrical`](https://github.com/quantopian/empyrical). This allows 109 | [`Zipline`](https://zipline.io) and `pyfolio` to use the same code to 110 | calculate its risk statistics. By [Ana Ruelas](https://github.com/ahgnaw) and 111 | [Abhi Kalyan](https://github.com/abhijeetkalyan). 112 | * New multistrike cone which redraws the cone when it crossed its initial bounds 113 | [PR310](https://github.com/quantopian/pyfolio/pull/310). By [Ana 114 | Ruelas](https://github.com/ahgnaw) and [Abhi 115 | Kalyan](https://github.com/abhijeetkalyan). 116 | 117 | ### Bugfixes 118 | 119 | * Can use most recent PyMC3 now. 120 | * Depends on seaborn 0.7.0 or later now 121 | [PR331](https://github.com/quantopian/pyfolio/pull/331). 122 | * Disable buggy computation of round trips per day and per month 123 | [PR339](https://github.com/quantopian/pyfolio/pull/339). 124 | 125 | ## v0.5.1 (June 10, 2016) 126 | 127 | This is a bugfix release from `0.5.0` with limited new functionality. All users are recommended to upgrade. 128 | 129 | ### New features 130 | 131 | * OOS data is now overlaid on top of box plot 132 | [PR306](https://github.com/quantopian/pyfolio/pull/306) by [Ana 133 | Ruelas](https://github.com/ahgnaw) 134 | * New logo [PR298](https://github.com/quantopian/pyfolio/pull/298) by [Taso 135 | Petridis](https://github.com/tasopetridis) and [Richard 136 | Frank](https://github.com/richafrank) 137 | * Raw returns plot and cumulative log returns plot 138 | [PR294](https://github.com/quantopian/pyfolio/pull/294) by [Thomas 139 | Wiecki](https://github.com/twiecki) 140 | * Net exposure line to the long/short exposure plot 141 | [PR301](https://github.com/quantopian/pyfolio/pull/301) by [Ana 142 | Ruelas](https://github.com/ahgnaw) 143 | 144 | ### Bugfixes 145 | 146 | * Fix drawdown behavior and pandas exception in tear-sheet creation 147 | [PR297](https://github.com/quantopian/pyfolio/pull/297) by [Flavio 148 | Duarte](https://github.com/flaviodrt) 149 | 150 | ## v0.5.0 (April 21, 2016) -- Olympia 151 | 152 | This is a major release from `0.4.0` that includes many new analyses and 153 | features. We recommend that all users upgrade to this new version. Also update 154 | your dependencies, specifically, `pandas>=0.18.0`, `seaborn>=0.6.0` and 155 | `zipline>=0.8.4`. 156 | 157 | ### New features 158 | 159 | * New capacity tear-sheet to assess how much capital can be traded on a strategy 160 | [PR284](https://github.com/quantopian/pyfolio/pull/284). [Andrew 161 | Campbell](https://github.com/a-campbell). 162 | * Bootstrap analysis to assess uncertainty in performance metrics 163 | [PR261](https://github.com/quantopian/pyfolio/pull/261). [Thomas 164 | Wiecki](https://github.com/twiecki) 165 | * Refactored round-trip analysis to be more general and have better output. Now 166 | does full portfolio reconstruction to match trades 167 | [PR293](https://github.com/quantopian/pyfolio/pull/293). [Thomas 168 | Wiecki](https://github.com/twiecki), [Andrew 169 | Campbell](https://github.com/a-campbell). See the 170 | [tutorial](http://quantopian.github.io/pyfolio/round_trip_example/) for more 171 | information. 172 | * Prettier printing of tables in notebooks 173 | [PR289](https://github.com/quantopian/pyfolio/pull/289). [Thomas 174 | Wiecki](https://github.com/twiecki) 175 | * Faster max-drawdown calculation 176 | [PR281](https://github.com/quantopian/pyfolio/pull/281). [Devin 177 | Stevenson](https://github.com/devinstevenson) 178 | * New metrics tail-ratio and common sense ratio 179 | [PR276](https://github.com/quantopian/pyfolio/pull/276). [Thomas 180 | Wiecki](https://github.com/twiecki) 181 | * Log-scaled cumulative returns plot and raw returns plot 182 | [PR294](https://github.com/quantopian/pyfolio/pull/294). [Thomas 183 | Wiecki](https://github.com/twiecki) 184 | 185 | ### Bug fixes 186 | * Many depracation fixes for Pandas 0.18.0, seaborn 0.6.0, and zipline 0.8.4 187 | 188 | 189 | ## v0.4.0 (Dec 10, 2015) 190 | 191 | This is a major release from 0.3.1 that includes new features and quite a few bug fixes. We recommend that all users upgrade to this new version. 192 | 193 | ### New features 194 | 195 | * Round-trip analysis [PR210](https://github.com/quantopian/pyfolio/pull/210) 196 | Andrew, Thomas 197 | * Improved cone to forecast returns that uses a bootstrap instead of linear 198 | forecasting [PR233](https://github.com/quantopian/pyfolio/pull/233) Andrew, 199 | Thomas 200 | * Plot max and median long/short exposures 201 | [PR237](https://github.com/quantopian/pyfolio/pull/237) Andrew 202 | 203 | ### Bug fixes 204 | 205 | * Sharpe ratio was calculated incorrectly 206 | [PR219](https://github.com/quantopian/pyfolio/pull/219) Thomas, Justin 207 | * annual_return() now only computes CAGR in the correct way 208 | [PR234](https://github.com/quantopian/pyfolio/pull/234) Justin 209 | * Cache SPY and Fama-French returns in home-directory instead of 210 | install-directory [PR241](https://github.com/quantopian/pyfolio/pull/241) Joe 211 | * Remove data files from package 212 | [PR241](https://github.com/quantopian/pyfolio/pull/241) Joe 213 | * Cast factor.name to str 214 | [PR223](https://github.com/quantopian/pyfolio/pull/223) Scotty 215 | * Test all `create_*_tear_sheet` functions in all configurations 216 | [PR247](https://github.com/quantopian/pyfolio/pull/247) Thomas 217 | 218 | 219 | ## v0.3.1 (Nov 12, 2015) 220 | 221 | This is a minor release from 0.3 that includes mostly bugfixes but also some new features. We recommend that all users upgrade to this new version. 222 | 223 | ### New features 224 | 225 | * Add Information Ratio [PR194](https://github.com/quantopian/pyfolio/pull/194) 226 | by @MridulS 227 | * Bayesian tear-sheet now accepts 'Fama-French' option to do Bayesian 228 | multivariate regression against Fama-French risk factors 229 | [PR200](https://github.com/quantopian/pyfolio/pull/200) by Shane Bussman 230 | * Plotting of monthly returns 231 | [PR195](https://github.com/quantopian/pyfolio/pull/195) 232 | 233 | ### Bug fixes 234 | 235 | * `pos.get_percent_alloc` was not handling short allocations correctly 236 | [PR201](https://github.com/quantopian/pyfolio/pull/201) 237 | * UTC bug with cached Fama-French factors 238 | [commit](https://github.com/quantopian/pyfolio/commit/709553a55b5df7c908d17f443cb17b51854a65be) 239 | * Sector map was not being passed from `create_returns_tearsheet` 240 | [commit](https://github.com/quantopian/pyfolio/commit/894b753e365f9cb4861ffca2ef214c5a64b2bef4) 241 | * New sector mapping feature was not Python 3 compatible 242 | [PR201](https://github.com/quantopian/pyfolio/pull/201) 243 | 244 | 245 | ### Maintenance 246 | 247 | * We now depend on pandas-datareader as the yahoo finance loaders from pandas 248 | will be deprecated [PR181](https://github.com/quantopian/pyfolio/pull/181) by 249 | @tswrightsandpointe 250 | 251 | ### Contributors 252 | 253 | Besiders the core developers, we have seen an increase in outside contributions 254 | which we greatly appreciate. Specifically, these people contributed to this 255 | release: 256 | 257 | * Shane Bussman 258 | * @MridulS 259 | * @YihaoLu 260 | * @jkrauss82 261 | * @tswrightsandpointe 262 | * @cgdeboer 263 | 264 | 265 | ## v0.3 (Oct 23, 2015) 266 | 267 | This is a major release from 0.2 that includes many exciting new features. We 268 | recommend that all users upgrade to this new version. 269 | 270 | ### New features 271 | 272 | * Sector exposures: sum positions by sector given a dictionary or series of 273 | symbol to sector mappings 274 | [PR166](https://github.com/quantopian/pyfolio/pull/166) 275 | * Ability to make cones with multiple shades stdev regions 276 | [PR168](https://github.com/quantopian/pyfolio/pull/168) 277 | * Slippage sweep: See how an algorithm performs with various levels of slippage 278 | [PR170](https://github.com/quantopian/pyfolio/pull/170) 279 | * Stochastic volatility model in Bayesian tear sheet 280 | [PR174](https://github.com/quantopian/pyfolio/pull/174) 281 | * Ability to suppress display of position information 282 | [PR177](https://github.com/quantopian/pyfolio/pull/177) 283 | 284 | ### Bug fixes 285 | 286 | * Various fixes to make pyfolio pandas 0.17 compatible 287 | 288 | ## v0.2 (Oct 16, 2015) 289 | 290 | This is a major release from 0.1 that includes mainly bugfixes and refactorings 291 | but also some new features. We recommend that all users upgrade to this new 292 | version. 293 | 294 | ### New features 295 | 296 | * Volatility matched cumulative returns plot 297 | [PR126](https://github.com/quantopian/pyfolio/pull/126). 298 | * Allow for different periodicity (annualization factors) in the annual_() 299 | methods [PR164](https://github.com/quantopian/pyfolio/pull/164). 300 | * Users can supply their own interesting periods 301 | [PR163](https://github.com/quantopian/pyfolio/pull/163). 302 | * Ability to weight a portfolio of holdings by a metric valued 303 | [PR161](https://github.com/quantopian/pyfolio/pull/161). 304 | 305 | ### Bug fixes 306 | 307 | * Fix drawdown overlaps [PR150](https://github.com/quantopian/pyfolio/pull/150). 308 | * Monthly returns distribution should not stack by year 309 | [PR162](https://github.com/quantopian/pyfolio/pull/162). 310 | * Fix gross leverage [PR147](https://github.com/quantopian/pyfolio/pull/147) 311 | -------------------------------------------------------------------------------- /pyfolio/_tests/test_data/returns.csv: -------------------------------------------------------------------------------- 1 | 2016-01-04,2.16420955433 2 | 2016-01-05,3.21963118331 3 | 2016-01-06,0.890280110274 4 | 2016-01-07,0.798731209228 5 | 2016-01-08,0.307379650145 6 | 2016-01-11,1.59831707812 7 | 2016-01-12,0.88271274164 8 | 2016-01-13,0.77753756012 9 | 2016-01-14,1.28892080939 10 | 2016-01-15,-0.541028037651 11 | 2016-01-18,-1.89937122039 12 | 2016-01-19,0.122271178453 13 | 2016-01-20,0.815388949389 14 | 2016-01-21,-0.141425332724 15 | 2016-01-22,3.00213798426 16 | 2016-01-25,0.533109945299 17 | 2016-01-26,-2.86858221585 18 | 2016-01-27,-0.191563180222 19 | 2016-01-28,2.43267052951 20 | 2016-01-29,-0.689629567983 21 | 2016-02-01,-2.46857090225 22 | 2016-02-02,0.244505204607 23 | 2016-02-03,-0.947726483363 24 | 2016-02-04,-0.475305004218 25 | 2016-02-05,-1.82663812777 26 | 2016-02-08,-0.508564063334 27 | 2016-02-09,-1.69143732169 28 | 2016-02-10,0.400149642192 29 | 2016-02-11,0.368989120123 30 | 2016-02-12,-0.997063259668 31 | 2016-02-15,-1.03201360932 32 | 2016-02-16,-2.53942888438 33 | 2016-02-17,-0.224354793955 34 | 2016-02-18,-1.16741609144 35 | 2016-02-19,-0.855352968587 36 | 2016-02-22,0.858073472935 37 | 2016-02-23,-0.0954251358104 38 | 2016-02-24,-0.282468449763 39 | 2016-02-25,-1.44964681395 40 | 2016-02-26,-0.255387189898 41 | 2016-02-29,-0.264323353829 42 | 2016-03-01,-1.07058124655 43 | 2016-03-02,3.38414136983 44 | 2016-03-03,0.998854735347 45 | 2016-03-04,-0.0163008945794 46 | 2016-03-07,0.819268123409 47 | 2016-03-08,1.18491401456 48 | 2016-03-09,1.06293956537 49 | 2016-03-10,1.79637051463 50 | 2016-03-11,0.528901456148 51 | 2016-03-14,0.535391635914 52 | 2016-03-15,-0.301088290328 53 | 2016-03-16,0.770497780535 54 | 2016-03-17,-1.1610737922 55 | 2016-03-18,3.40345681791 56 | 2016-03-21,2.7736036187 57 | 2016-03-22,1.04883926804 58 | 2016-03-23,0.534453024845 59 | 2016-03-24,0.792241874683 60 | 2016-03-25,1.53628604191 61 | 2016-03-28,-0.722975259429 62 | 2016-03-29,1.62462407089 63 | 2016-03-30,-0.844202400059 64 | 2016-03-31,1.41411017676 65 | 2016-04-01,1.07975659325 66 | 2016-04-04,-0.230666883153 67 | 2016-04-05,-0.642502102383 68 | 2016-04-06,0.0405872165676 69 | 2016-04-07,0.368292061037 70 | 2016-04-08,-0.697054796069 71 | 2016-04-11,-1.05186589144 72 | 2016-04-12,0.801704932265 73 | 2016-04-13,3.32762426185 74 | 2016-04-14,0.204194062652 75 | 2016-04-15,-1.77749201533 76 | 2016-04-18,1.64510111632 77 | 2016-04-19,-1.57119336071 78 | 2016-04-20,-0.761930810788 79 | 2016-04-21,0.0467044137431 80 | 2016-04-22,-1.58528869716 81 | 2016-04-25,1.43149960312 82 | 2016-04-26,1.03697204831 83 | 2016-04-27,-0.381072542429 84 | 2016-04-28,-2.54498644417 85 | 2016-04-29,1.50497240428 86 | 2016-05-02,1.23958647672 87 | 2016-05-03,0.205805018603 88 | 2016-05-04,-0.352648323503 89 | 2016-05-05,-1.49295944192 90 | 2016-05-06,-0.438053344492 91 | 2016-05-09,-1.72894520467 92 | 2016-05-10,-2.86702155506 93 | 2016-05-11,-0.97682620458 94 | 2016-05-12,-1.05221826017 95 | 2016-05-13,0.803451599015 96 | 2016-05-16,-1.02580604037 97 | 2016-05-17,-1.20737631597 98 | 2016-05-18,0.35173032931 99 | 2016-05-19,1.59529470518 100 | 2016-05-20,3.49976389872 101 | 2016-05-23,-0.608561015518 102 | 2016-05-24,1.75492332661 103 | 2016-05-25,-0.976824518213 104 | 2016-05-26,-0.762357033605 105 | 2016-05-27,0.1817742094 106 | 2016-05-30,1.22739712328 107 | 2016-05-31,0.319908865373 108 | 2016-06-01,-1.35449594912 109 | 2016-06-02,0.362131321694 110 | 2016-06-03,2.21705179903 111 | 2016-06-06,-1.30192677619 112 | 2016-06-07,0.0178854991274 113 | 2016-06-08,-1.47753502024 114 | 2016-06-09,0.388687574166 115 | 2016-06-10,-0.835237798701 116 | 2016-06-13,-1.91738079234 117 | 2016-06-14,-0.126811429755 118 | 2016-06-15,-0.374984330112 119 | 2016-06-16,-0.575500480522 120 | 2016-06-17,1.10316676581 121 | 2016-06-20,-1.03470883988 122 | 2016-06-21,-0.430671456989 123 | 2016-06-22,-1.98501677538 124 | 2016-06-23,2.23195015682 125 | 2016-06-24,-2.27978858701 126 | 2016-06-27,-0.0547230933603 127 | 2016-06-28,-0.177375253824 128 | 2016-06-29,1.38628789473 129 | 2016-06-30,-2.10896133386 130 | 2016-07-01,-0.972559018228 131 | 2016-07-04,-1.69567561208 132 | 2016-07-05,-0.64888133472 133 | 2016-07-06,-1.74750120905 134 | 2016-07-07,0.612313110879 135 | 2016-07-08,-0.21348600543 136 | 2016-07-11,-2.37354641079 137 | 2016-07-12,2.34600563094 138 | 2016-07-13,-1.04336195757 139 | 2016-07-14,0.377637838315 140 | 2016-07-15,0.0338083935778 141 | 2016-07-18,0.909632054483 142 | 2016-07-19,0.844327206461 143 | 2016-07-20,0.895187523368 144 | 2016-07-21,0.165891923536 145 | 2016-07-22,1.9916643941 146 | 2016-07-25,-1.1091146781 147 | 2016-07-26,1.24390087496 148 | 2016-07-27,1.00094166192 149 | 2016-07-28,0.680678647468 150 | 2016-07-29,-0.0293931414154 151 | 2016-08-01,0.351603827883 152 | 2016-08-02,-0.798342249125 153 | 2016-08-03,0.205663294643 154 | 2016-08-04,-2.6809759772 155 | 2016-08-05,0.534199714544 156 | 2016-08-08,0.944042246308 157 | 2016-08-09,-1.85750356162 158 | 2016-08-10,-0.290528219864 159 | 2016-08-11,-0.32905864368 160 | 2016-08-12,-0.168931678387 161 | 2016-08-15,-1.53259737711 162 | 2016-08-16,-0.616398725272 163 | 2016-08-17,-1.46964751032 164 | 2016-08-18,2.09905648113 165 | 2016-08-19,0.238560449113 166 | 2016-08-22,-0.441756620999 167 | 2016-08-23,-0.410627662791 168 | 2016-08-24,-2.05285271364 169 | 2016-08-25,-1.30495612163 170 | 2016-08-26,0.975539898453 171 | 2016-08-29,0.615123595465 172 | 2016-08-30,-1.90191501412 173 | 2016-08-31,-0.721278127477 174 | 2016-09-01,-0.207989689119 175 | 2016-09-02,0.928175954722 176 | 2016-09-05,-2.20193539771 177 | 2016-09-06,0.675082663553 178 | 2016-09-07,-1.17348291224 179 | 2016-09-08,-2.3210435542 180 | 2016-09-09,0.140702484336 181 | 2016-09-12,0.702228038194 182 | 2016-09-13,1.27181335792 183 | 2016-09-14,0.145246056696 184 | 2016-09-15,-0.585503007615 185 | 2016-09-16,-1.39574486836 186 | 2016-09-19,-0.712681905613 187 | 2016-09-20,0.592172683913 188 | 2016-09-21,0.543331757931 189 | 2016-09-22,-0.927308943571 190 | 2016-09-23,0.673275235917 191 | 2016-09-26,-1.31082534404 192 | 2016-09-27,-3.27807107304 193 | 2016-09-28,-1.61808455048 194 | 2016-09-29,-2.45734574515 195 | 2016-09-30,1.81236268769 196 | 2016-10-03,0.344615177338 197 | 2016-10-04,-1.96990593741 198 | 2016-10-05,-1.05332957456 199 | 2016-10-06,1.99902579095 200 | 2016-10-07,2.31913065504 201 | 2016-10-10,-1.71455092288 202 | 2016-10-11,1.12295599912 203 | 2016-10-12,-1.41305665793 204 | 2016-10-13,0.873445411669 205 | 2016-10-14,-0.992702158626 206 | 2016-10-17,-0.646236750223 207 | 2016-10-18,-0.542581106315 208 | 2016-10-19,2.41722229378 209 | 2016-10-20,0.512886806468 210 | 2016-10-21,3.23958416818 211 | 2016-10-24,1.51172970288 212 | 2016-10-25,-1.97088115697 213 | 2016-10-26,-0.0361537248081 214 | 2016-10-27,-1.79663107987 215 | 2016-10-28,-0.299407698529 216 | 2016-10-31,-1.88375165918 217 | 2016-11-01,1.14583539274 218 | 2016-11-02,-0.656287365929 219 | 2016-11-03,0.826878358349 220 | 2016-11-04,0.878824978593 221 | 2016-11-07,-1.55464949905 222 | 2016-11-08,0.108362171074 223 | 2016-11-09,0.7607252931 224 | 2016-11-10,-0.507196407513 225 | 2016-11-11,-0.893018454854 226 | 2016-11-14,-0.23438062666 227 | 2016-11-15,0.742226093711 228 | 2016-11-16,2.3599476867 229 | 2016-11-17,-2.67030547347 230 | 2016-11-18,0.148696655935 231 | 2016-11-21,-1.49634890187 232 | 2016-11-22,-0.257851092584 233 | 2016-11-23,1.9096369789 234 | 2016-11-24,-1.75362174434 235 | 2016-11-25,-2.03713562499 236 | 2016-11-28,-2.55586126117 237 | 2016-11-29,-0.985398500407 238 | 2016-11-30,2.73326706877 239 | 2016-12-01,0.436718057752 240 | 2016-12-02,1.62459501086 241 | 2016-12-05,1.80084477746 242 | 2016-12-06,-1.33308086694 243 | 2016-12-07,-1.79302308165 244 | 2016-12-08,2.06646014678 245 | 2016-12-09,0.174803695097 246 | 2016-12-12,-1.3798786479 247 | 2016-12-13,2.39830631055 248 | 2016-12-14,2.62229938628 249 | 2016-12-15,-1.17278693274 250 | 2016-12-16,-1.09589663123 251 | 2016-12-19,0.34849014948 252 | 2016-12-20,0.862131044321 253 | 2016-12-21,-0.928719129359 254 | 2016-12-22,-3.20040225054 255 | 2016-12-23,0.122270141027 256 | 2016-12-26,2.27022433928 257 | 2016-12-27,-3.30083634438 258 | 2016-12-28,-0.484237366838 259 | 2016-12-29,1.54666243088 260 | 2016-12-30,2.02694845146 261 | 2017-01-02,-1.13568489899 262 | 2017-01-03,-2.57018957359 263 | 2017-01-04,-0.646602296369 264 | 2017-01-05,2.34907016957 265 | 2017-01-06,-1.50553460473 266 | 2017-01-09,-1.83810500357 267 | 2017-01-10,1.28972667054 268 | 2017-01-11,-1.86512037748 269 | 2017-01-12,-0.443890229501 270 | 2017-01-13,-0.312779620076 271 | 2017-01-16,-0.995093604823 272 | 2017-01-17,1.27624134049 273 | 2017-01-18,-0.828481516298 274 | 2017-01-19,-1.48098736263 275 | 2017-01-20,0.549474843283 276 | 2017-01-23,0.260249928374 277 | 2017-01-24,0.674873372985 278 | 2017-01-25,0.619820009087 279 | 2017-01-26,-2.34383963544 280 | 2017-01-27,-2.10949881089 281 | 2017-01-30,1.96666125501 282 | 2017-01-31,-1.58649315855 283 | 2017-02-01,-0.532487258066 284 | 2017-02-02,0.971644247506 285 | 2017-02-03,0.535632107372 286 | 2017-02-06,-1.37595849837 287 | 2017-02-07,0.804908129643 288 | 2017-02-08,0.226021010764 289 | 2017-02-09,-1.92393843186 290 | 2017-02-10,1.00202586802 291 | 2017-02-13,-2.61169583121 292 | 2017-02-14,-0.354844934186 293 | 2017-02-15,-1.02494728473 294 | 2017-02-16,0.228443680958 295 | 2017-02-17,-3.43853205295 296 | 2017-02-20,0.98235484906 297 | 2017-02-21,-1.303577649 298 | 2017-02-22,0.731015644217 299 | 2017-02-23,-0.686764353276 300 | 2017-02-24,-1.10874559461 301 | 2017-02-27,-1.13311052405 302 | 2017-02-28,-0.706265342992 303 | 2017-03-01,-1.99602056214 304 | 2017-03-02,-1.77118921694 305 | 2017-03-03,-0.26399968974 306 | 2017-03-06,-3.04559895192 307 | 2017-03-07,1.50067606963 308 | 2017-03-08,0.272853172261 309 | 2017-03-09,0.553466545441 310 | 2017-03-10,-0.221014391134 311 | 2017-03-13,0.294451776784 312 | 2017-03-14,-0.526508664707 313 | 2017-03-15,-1.60134330844 314 | 2017-03-16,1.85428223205 315 | 2017-03-17,-0.0575180631839 316 | 2017-03-20,-0.804773583575 317 | 2017-03-21,0.0959239853297 318 | 2017-03-22,-0.0505395008888 319 | 2017-03-23,-0.665508142742 320 | 2017-03-24,2.18027033894 321 | 2017-03-27,1.27721523253 322 | 2017-03-28,0.0381972461105 323 | 2017-03-29,-1.52290214945 324 | 2017-03-30,0.956648485035 325 | 2017-03-31,0.951585622391 326 | 2017-04-03,-2.03368978779 327 | 2017-04-04,0.837201240864 328 | 2017-04-05,0.675320754703 329 | 2017-04-06,-1.38567147857 330 | 2017-04-07,-1.31631979878 331 | 2017-04-10,-2.1958092599 332 | 2017-04-11,0.550385238052 333 | 2017-04-12,-1.09750329041 334 | 2017-04-13,1.05577162309 335 | 2017-04-14,-1.62733919465 336 | 2017-04-17,-2.430297819 337 | 2017-04-18,-2.8584865773 338 | 2017-04-19,0.612572489773 339 | 2017-04-20,0.0780394187355 340 | 2017-04-21,1.81907008147 341 | 2017-04-24,0.533016516702 342 | 2017-04-25,1.62280310702 343 | 2017-04-26,-3.49101818025 344 | 2017-04-27,0.505912618034 345 | 2017-04-28,2.34497727936 346 | 2017-05-01,1.27982322983 347 | 2017-05-02,-3.28006352412 348 | 2017-05-03,0.558046942455 349 | 2017-05-04,-1.14088576872 350 | 2017-05-05,1.27990250842 351 | 2017-05-08,-2.6554831932 352 | 2017-05-09,0.305969120203 353 | 2017-05-10,2.36697493652 354 | 2017-05-11,0.901350548961 355 | 2017-05-12,1.47657485082 356 | 2017-05-15,-0.0249465082623 357 | 2017-05-16,-0.986723754665 358 | 2017-05-17,1.22650120974 359 | 2017-05-18,-1.26747907878 360 | 2017-05-19,0.469249912172 361 | 2017-05-22,-0.897163586484 362 | 2017-05-23,-0.201564266035 363 | 2017-05-24,-2.48901699082 364 | 2017-05-25,0.310530342949 365 | 2017-05-26,1.39993342151 366 | 2017-05-29,-1.32114985926 367 | 2017-05-30,-1.55939770421 368 | 2017-05-31,0.251878743216 369 | 2017-06-01,-0.720543762919 370 | 2017-06-02,-1.09234543399 371 | 2017-06-05,-2.31782526342 372 | 2017-06-06,1.62199773143 373 | 2017-06-07,-0.209915230395 374 | 2017-06-08,0.730383073908 375 | 2017-06-09,-1.52065275148 376 | 2017-06-12,-0.888903454012 377 | 2017-06-13,2.14437685725 378 | 2017-06-14,0.80654823367 379 | 2017-06-15,-0.0369352471997 380 | 2017-06-16,-1.52722797628 381 | 2017-06-19,-0.185615062136 382 | 2017-06-20,0.747712618986 383 | 2017-06-21,-0.382922482812 384 | 2017-06-22,-0.0824178900418 385 | 2017-06-23,1.63542459048 386 | 2017-06-26,-0.477665414151 387 | 2017-06-27,-0.726359595805 388 | 2017-06-28,-2.15638276459 389 | 2017-06-29,-0.376129645064 390 | 2017-06-30,-1.69955745668 391 | 2017-07-03,2.01065971035 392 | 2017-07-04,-0.729569532852 393 | 2017-07-05,0.625347950302 394 | 2017-07-06,0.951673860043 395 | 2017-07-07,-1.40118153706 396 | 2017-07-10,-0.80795495471 397 | 2017-07-11,0.415069440239 398 | 2017-07-12,-1.75791454491 399 | 2017-07-13,-1.00251266286 400 | 2017-07-14,-1.25462789997 401 | 2017-07-17,2.19697589072 402 | 2017-07-18,-0.448686570639 403 | 2017-07-19,1.3461216949 404 | 2017-07-20,0.471860167339 405 | 2017-07-21,-1.80069601033 406 | 2017-07-24,0.112565354251 407 | 2017-07-25,0.353891388233 408 | 2017-07-26,2.20426423196 409 | 2017-07-27,1.0142090195 410 | 2017-07-28,-0.829626091563 411 | 2017-07-31,0.000353288028221 412 | 2017-08-01,-1.42886114567 413 | 2017-08-02,-0.340757690955 414 | 2017-08-03,2.55597944625 415 | 2017-08-04,0.861145764153 416 | 2017-08-07,1.32198759659 417 | 2017-08-08,-0.0390397541084 418 | 2017-08-09,0.918851571578 419 | 2017-08-10,-1.17398999163 420 | 2017-08-11,0.781880216401 421 | 2017-08-14,-0.130218406447 422 | 2017-08-15,3.10640403635 423 | 2017-08-16,0.213238792126 424 | 2017-08-17,0.216607652142 425 | 2017-08-18,-0.716881597089 426 | 2017-08-21,-3.73674699662 427 | 2017-08-22,-1.70135071407 428 | 2017-08-23,-1.46939143935 429 | 2017-08-24,-2.04903708979 430 | 2017-08-25,-0.509864956148 431 | 2017-08-28,1.32668844699 432 | 2017-08-29,0.120516478373 433 | 2017-08-30,-0.789345873489 434 | 2017-08-31,0.193975917066 435 | 2017-09-01,-0.505107059727 436 | 2017-09-04,0.450000046009 437 | 2017-09-05,-1.11952813426 438 | 2017-09-06,-0.361841803858 439 | 2017-09-07,-1.08139691805 440 | 2017-09-08,-1.74327499448 441 | 2017-09-11,0.361855218159 442 | 2017-09-12,-0.152628361654 443 | 2017-09-13,-1.64989464856 444 | 2017-09-14,0.410757950451 445 | 2017-09-15,-0.530326700757 446 | 2017-09-18,-0.17493428176 447 | 2017-09-19,0.755092093784 448 | 2017-09-20,0.57603620811 449 | 2017-09-21,-2.39813670791 450 | 2017-09-22,2.19039229392 451 | 2017-09-25,-2.14517245505 452 | 2017-09-26,0.557856453616 453 | 2017-09-27,0.970994402874 454 | 2017-09-28,-1.7062662684 455 | 2017-09-29,2.289756245 456 | 2017-10-02,-2.21884039066 457 | 2017-10-03,-1.01688534564 458 | 2017-10-04,-0.259175509346 459 | 2017-10-05,-0.319289896615 460 | 2017-10-06,0.200042182949 461 | 2017-10-09,-0.0226113761569 462 | 2017-10-10,1.53034661666 463 | 2017-10-11,2.38475882145 464 | 2017-10-12,-0.53600982685 465 | 2017-10-13,1.83580320538 466 | 2017-10-16,1.33419812274 467 | 2017-10-17,-1.0697522211 468 | 2017-10-18,-1.1522665034 469 | 2017-10-19,0.674744963968 470 | 2017-10-20,-1.32389256982 471 | 2017-10-23,1.66367405489 472 | 2017-10-24,3.24047024041 473 | 2017-10-25,0.184048461979 474 | 2017-10-26,1.71065006077 475 | 2017-10-27,0.391009250722 476 | 2017-10-30,-0.703045138945 477 | 2017-10-31,0.990963037634 478 | 2017-11-01,0.775091407101 479 | 2017-11-02,0.0587659177434 480 | 2017-11-03,1.0674859235 481 | 2017-11-06,0.57254145092 482 | 2017-11-07,1.12671933158 483 | 2017-11-08,-0.570907316663 484 | 2017-11-09,1.58149159817 485 | 2017-11-10,1.48710113275 486 | 2017-11-13,0.310956546026 487 | 2017-11-14,1.61472697925 488 | 2017-11-15,1.70729437889 489 | 2017-11-16,-1.27034812155 490 | 2017-11-17,-0.525604960667 491 | 2017-11-20,0.214937582637 492 | 2017-11-21,0.702985855346 493 | 2017-11-22,-0.504772278 494 | 2017-11-23,0.318426777681 495 | 2017-11-24,1.0821632933 496 | 2017-11-27,0.619825773006 497 | 2017-11-28,-0.558634889801 498 | 2017-11-29,0.701991325725 499 | 2017-11-30,-0.10420659651 500 | 2017-12-01,-1.50572502032 501 | 2017-12-04,1.44843656704 502 | 2017-12-05,-0.317600794692 503 | 2017-12-06,0.429533271829 504 | 2017-12-07,-1.27730404508 505 | -------------------------------------------------------------------------------- /pyfolio/utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2018 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from __future__ import division 17 | 18 | import warnings 19 | 20 | from itertools import cycle 21 | from matplotlib.pyplot import cm 22 | import numpy as np 23 | import pandas as pd 24 | from IPython.display import display, HTML 25 | 26 | import empyrical.utils 27 | 28 | from . import pos 29 | from . import txn 30 | 31 | APPROX_BDAYS_PER_MONTH = 21 32 | APPROX_BDAYS_PER_YEAR = 252 33 | 34 | MONTHS_PER_YEAR = 12 35 | WEEKS_PER_YEAR = 52 36 | 37 | MM_DISPLAY_UNIT = 1000000. 38 | 39 | DAILY = 'daily' 40 | WEEKLY = 'weekly' 41 | MONTHLY = 'monthly' 42 | YEARLY = 'yearly' 43 | 44 | ANNUALIZATION_FACTORS = { 45 | DAILY: APPROX_BDAYS_PER_YEAR, 46 | WEEKLY: WEEKS_PER_YEAR, 47 | MONTHLY: MONTHS_PER_YEAR 48 | } 49 | 50 | COLORMAP = 'Paired' 51 | COLORS = ['#e6194b', '#3cb44b', '#ffe119', '#0082c8', '#f58231', 52 | '#911eb4', '#46f0f0', '#f032e6', '#d2f53c', '#fabebe', 53 | '#008080', '#e6beff', '#aa6e28', '#800000', '#aaffc3', 54 | '#808000', '#ffd8b1', '#000080', '#808080'] 55 | 56 | 57 | def pnl_format_fn(value, pos): 58 | """ 59 | Formats numbers as follows: 60 | 61 | 1,200,000 -> 1.2M 62 | 1,200 -> 1.2K 63 | 500 -> 500 64 | """ 65 | if abs(value) >= 1_000_000: 66 | return f'{value/1_000_000:.1f}M' 67 | elif abs(value) >= 1_000: 68 | return f'{value/1_000:.1f}K' 69 | else: 70 | return f'{value:.0f}' 71 | 72 | def one_dec_places(x, pos): 73 | """ 74 | Adds 1/10th decimal to plot ticks. 75 | """ 76 | 77 | return '%.1f' % x 78 | 79 | 80 | def two_dec_places(x, pos): 81 | """ 82 | Adds 1/100th decimal to plot ticks. 83 | """ 84 | 85 | return '%.2f' % x 86 | 87 | 88 | def percentage(x, pos): 89 | """ 90 | Adds percentage sign to plot ticks. 91 | """ 92 | 93 | return '%.0f%%' % x 94 | 95 | 96 | def format_asset(asset): 97 | """ 98 | If zipline asset objects are used, we want to print them out prettily 99 | within the tear sheet. This function should only be applied directly 100 | before displaying. 101 | """ 102 | 103 | try: 104 | import zipline.assets 105 | except ImportError: 106 | return asset 107 | 108 | if isinstance(asset, zipline.assets.Asset): 109 | return asset.symbol 110 | else: 111 | return asset 112 | 113 | 114 | def vectorize(func): 115 | """ 116 | Decorator so that functions can be written to work on Series but 117 | may still be called with DataFrames. 118 | """ 119 | 120 | def wrapper(df, *args, **kwargs): 121 | if df.ndim == 1: 122 | return func(df, *args, **kwargs) 123 | elif df.ndim == 2: 124 | return df.apply(func, *args, **kwargs) 125 | 126 | return wrapper 127 | 128 | 129 | def extract_rets_pos_txn_from_zipline(backtest): 130 | """ 131 | Extract returns, positions, transactions and leverage from the 132 | backtest data structure returned by zipline.TradingAlgorithm.run(). 133 | 134 | The returned data structures are in a format compatible with the 135 | rest of pyfolio and can be directly passed to 136 | e.g. tears.create_full_tear_sheet(). 137 | 138 | Parameters 139 | ---------- 140 | backtest : pd.DataFrame 141 | DataFrame returned by zipline.TradingAlgorithm.run() 142 | 143 | Returns 144 | ------- 145 | returns : pd.Series 146 | Daily returns of strategy. 147 | - See full explanation in tears.create_full_tear_sheet. 148 | positions : pd.DataFrame 149 | Daily net position values. 150 | - See full explanation in tears.create_full_tear_sheet. 151 | transactions : pd.DataFrame 152 | Prices and amounts of executed trades. One row per trade. 153 | - See full explanation in tears.create_full_tear_sheet. 154 | """ 155 | 156 | backtest.index = backtest.index.normalize() 157 | if backtest.index.tzinfo is None: 158 | backtest.index = backtest.index.tz_localize('UTC') 159 | returns = backtest.returns 160 | raw_positions = [] 161 | for dt, pos_row in backtest.positions.items(): 162 | df = pd.DataFrame(pos_row) 163 | df.index = [dt] * len(df) 164 | raw_positions.append(df) 165 | if not raw_positions: 166 | raise ValueError("The backtest does not have any positions.") 167 | positions = pd.concat(raw_positions) 168 | positions = pos.extract_pos(positions, backtest.ending_cash) 169 | # Reindex positions to fill in any missing days, so that days with 170 | # no positions are included in the positions DataFrame 171 | positions = positions.reindex(backtest.index).fillna(0) 172 | transactions = txn.make_transaction_frame(backtest.transactions) 173 | if transactions.index.tzinfo is None: 174 | transactions.index = transactions.index.tz_localize('utc') 175 | 176 | return returns, positions, transactions 177 | 178 | 179 | def print_table(table, 180 | name=None, 181 | float_format=None, 182 | formatters=None, 183 | header_rows=None): 184 | """ 185 | Pretty print a pandas DataFrame. 186 | 187 | Uses HTML output if running inside Jupyter Notebook, otherwise 188 | formatted text output. 189 | 190 | Parameters 191 | ---------- 192 | table : pandas.Series or pandas.DataFrame 193 | Table to pretty-print. 194 | name : str, optional 195 | Table name to display in upper left corner. 196 | float_format : function, optional 197 | Formatter to use for displaying table elements, passed as the 198 | `float_format` arg to pd.Dataframe.to_html. 199 | E.g. `'{0:.2%}'.format` for displaying 100 as '100.00%'. 200 | formatters : list or dict, optional 201 | Formatters to use by column, passed as the `formatters` arg to 202 | pd.Dataframe.to_html. 203 | header_rows : dict, optional 204 | Extra rows to display at the top of the table. 205 | """ 206 | 207 | if isinstance(table, pd.Series): 208 | table = pd.DataFrame(table) 209 | 210 | if name is not None: 211 | table.columns.name = name 212 | 213 | html = table.to_html(float_format=float_format, formatters=formatters) 214 | 215 | if header_rows is not None: 216 | # Count the number of columns for the text to span 217 | n_cols = html.split('')[1].split('')[0].count('') 218 | 219 | # Generate the HTML for the extra rows 220 | rows = '' 221 | for name, value in header_rows.items(): 222 | rows += ('\n %s' + 223 | '%s') % (name, n_cols, value) 224 | 225 | # Inject the new HTML 226 | html = html.replace('', '' + rows) 227 | 228 | display(HTML(html)) 229 | 230 | 231 | def standardize_data(x): 232 | """ 233 | Standardize an array with mean and standard deviation. 234 | 235 | Parameters 236 | ---------- 237 | x : np.array 238 | Array to standardize. 239 | 240 | Returns 241 | ------- 242 | np.array 243 | Standardized array. 244 | """ 245 | 246 | return (x - np.mean(x)) / np.std(x) 247 | 248 | 249 | def detect_intraday(positions, transactions, threshold=0.25): 250 | """ 251 | Attempt to detect an intraday strategy. Get the number of 252 | positions held at the end of the day, and divide that by the 253 | number of unique stocks transacted every day. If the average quotient 254 | is below a threshold, then an intraday strategy is detected. 255 | 256 | Parameters 257 | ---------- 258 | positions : pd.DataFrame 259 | Daily net position values. 260 | - See full explanation in create_full_tear_sheet. 261 | transactions : pd.DataFrame 262 | Prices and amounts of executed trades. One row per trade. 263 | - See full explanation in create_full_tear_sheet. 264 | 265 | Returns 266 | ------- 267 | boolean 268 | True if an intraday strategy is detected. 269 | """ 270 | 271 | daily_txn = transactions.copy() 272 | daily_txn.index = daily_txn.index.date 273 | txn_count = daily_txn.groupby(level=0).symbol.nunique().sum() 274 | daily_pos = positions.drop('cash', axis=1).replace(0, np.nan) 275 | return daily_pos.count(axis=1).sum() / txn_count < threshold 276 | 277 | 278 | def check_intraday(estimate, returns, positions, transactions): 279 | """ 280 | Logic for checking if a strategy is intraday and processing it. 281 | 282 | Parameters 283 | ---------- 284 | estimate: boolean or str, optional 285 | Approximate returns for intraday strategies. 286 | See description in tears.create_full_tear_sheet. 287 | returns : pd.Series 288 | Daily returns of the strategy, noncumulative. 289 | - See full explanation in create_full_tear_sheet. 290 | positions : pd.DataFrame 291 | Daily net position values. 292 | - See full explanation in create_full_tear_sheet. 293 | transactions : pd.DataFrame 294 | Prices and amounts of executed trades. One row per trade. 295 | - See full explanation in create_full_tear_sheet. 296 | 297 | Returns 298 | ------- 299 | pd.DataFrame 300 | Daily net position values, adjusted for intraday movement. 301 | """ 302 | 303 | if estimate == 'infer': 304 | if positions is not None and transactions is not None: 305 | if detect_intraday(positions, transactions): 306 | warnings.warn('Detected intraday strategy; inferring positi' + 307 | 'ons from transactions. Set estimate_intraday' + 308 | '=False to disable.') 309 | return estimate_intraday(returns, positions, transactions) 310 | else: 311 | return positions 312 | else: 313 | return positions 314 | 315 | elif estimate: 316 | if positions is not None and transactions is not None: 317 | return estimate_intraday(returns, positions, transactions) 318 | else: 319 | raise ValueError('Positions and txns needed to estimate intraday') 320 | else: 321 | return positions 322 | 323 | 324 | def estimate_intraday(returns, positions, transactions, EOD_hour=23): 325 | """ 326 | Intraday strategies will often not hold positions at the day end. 327 | This attempts to find the point in the day that best represents 328 | the activity of the strategy on that day, and effectively resamples 329 | the end-of-day positions with the positions at this point of day. 330 | The point of day is found by detecting when our exposure in the 331 | market is at its maximum point. Note that this is an estimate. 332 | 333 | Parameters 334 | ---------- 335 | returns : pd.Series 336 | Daily returns of the strategy, noncumulative. 337 | - See full explanation in create_full_tear_sheet. 338 | positions : pd.DataFrame 339 | Daily net position values. 340 | - See full explanation in create_full_tear_sheet. 341 | transactions : pd.DataFrame 342 | Prices and amounts of executed trades. One row per trade. 343 | - See full explanation in create_full_tear_sheet. 344 | 345 | Returns 346 | ------- 347 | pd.DataFrame 348 | Daily net position values, resampled for intraday behavior. 349 | """ 350 | 351 | # Construct DataFrame of transaction amounts 352 | txn_val = transactions.copy() 353 | txn_val.index.names = ['date'] 354 | txn_val['value'] = txn_val.amount * txn_val.price 355 | txn_val = txn_val.reset_index().pivot_table( 356 | index='date', values='value', 357 | columns='symbol').replace(np.nan, 0) 358 | 359 | # Cumulate transaction amounts each day 360 | txn_val = txn_val.groupby(txn_val.index.date).cumsum() 361 | 362 | # Calculate exposure, then take peak of exposure every day 363 | txn_val['exposure'] = txn_val.abs().sum(axis=1) 364 | condition = (txn_val['exposure'] == txn_val.groupby( 365 | pd.Grouper(freq='24H'))['exposure'].transform('max')) 366 | txn_val = txn_val[condition].drop('exposure', axis=1) 367 | 368 | # Compute cash delta 369 | txn_val['cash'] = -txn_val.sum(axis=1) 370 | 371 | # Shift EOD positions to positions at start of next trading day 372 | positions_shifted = positions.copy().shift(1).fillna(0) 373 | starting_capital = positions.iloc[0].sum() / (1 + returns.iloc[0]) 374 | positions_shifted.cash.iloc[0] = starting_capital 375 | 376 | # Format and add start positions to intraday position changes 377 | txn_val.index = txn_val.index.normalize() 378 | corrected_positions = positions_shifted.add(txn_val, fill_value=0) 379 | corrected_positions.index.name = 'period_close' 380 | corrected_positions.columns.name = 'sid' 381 | 382 | return corrected_positions 383 | 384 | 385 | def clip_returns_to_benchmark(rets, benchmark_rets): 386 | """ 387 | Drop entries from rets so that the start and end dates of rets match those 388 | of benchmark_rets. 389 | 390 | Parameters 391 | ---------- 392 | rets : pd.Series 393 | Daily returns of the strategy, noncumulative. 394 | - See pf.tears.create_full_tear_sheet for more details 395 | 396 | benchmark_rets : pd.Series 397 | Daily returns of the benchmark, noncumulative. 398 | 399 | Returns 400 | ------- 401 | clipped_rets : pd.Series 402 | Daily noncumulative returns with index clipped to match that of 403 | benchmark returns. 404 | """ 405 | 406 | if (rets.index[0] < benchmark_rets.index[0]) \ 407 | or (rets.index[-1] > benchmark_rets.index[-1]): 408 | clipped_rets = rets[benchmark_rets.index] 409 | else: 410 | clipped_rets = rets 411 | 412 | return clipped_rets 413 | 414 | 415 | def to_utc(df): 416 | """ 417 | For use in tests; applied UTC timestamp to DataFrame. 418 | """ 419 | 420 | try: 421 | df.index = df.index.tz_localize('UTC') 422 | except TypeError: 423 | df.index = df.index.tz_convert('UTC') 424 | 425 | return df 426 | 427 | 428 | def to_series(df): 429 | """ 430 | For use in tests; converts DataFrame's first column to Series. 431 | """ 432 | 433 | return df[df.columns[0]] 434 | 435 | 436 | def configure_legend(ax, autofmt_xdate=True, change_colors=False, 437 | rotation=30, ha='right'): 438 | """ 439 | Format legend for perf attribution plots: 440 | - put legend to the right of plot instead of overlapping with it 441 | - make legend order match up with graph lines 442 | - set colors according to colormap 443 | """ 444 | chartBox = ax.get_position() 445 | ax.set_position([chartBox.x0, chartBox.y0, 446 | chartBox.width * 0.75, chartBox.height]) 447 | 448 | # make legend order match graph lines 449 | handles, labels = ax.get_legend_handles_labels() 450 | handles_and_labels_sorted = sorted(zip(handles, labels), 451 | key=lambda x: x[0].get_ydata()[-1], 452 | reverse=True) 453 | 454 | handles_sorted = [h[0] for h in handles_and_labels_sorted] 455 | labels_sorted = [h[1] for h in handles_and_labels_sorted] 456 | 457 | if change_colors: 458 | for handle, color in zip(handles_sorted, 459 | cycle(COLORS)): 460 | 461 | handle.set_color(color) 462 | 463 | ax.legend(handles=handles_sorted, 464 | labels=labels_sorted, 465 | frameon=True, 466 | framealpha=0.5, 467 | loc='upper left', 468 | bbox_to_anchor=(1.05, 1), 469 | fontsize='small') 470 | 471 | # manually rotate xticklabels instead of using matplotlib's autofmt_xdate 472 | # because it disables xticklabels for all but the last plot 473 | if autofmt_xdate: 474 | for label in ax.get_xticklabels(): 475 | label.set_ha(ha) 476 | label.set_rotation(rotation) 477 | 478 | 479 | def sample_colormap(cmap_name, n_samples): 480 | """ 481 | Sample a colormap from matplotlib 482 | """ 483 | colors = [] 484 | colormap = cm.cmap_d[cmap_name] 485 | for i in np.linspace(0, 1, n_samples): 486 | colors.append(colormap(i)) 487 | 488 | return colors 489 | -------------------------------------------------------------------------------- /pyfolio/round_trips.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from __future__ import division 16 | from math import copysign 17 | import warnings 18 | from collections import deque, OrderedDict 19 | 20 | import pandas as pd 21 | import numpy as np 22 | 23 | from .utils import print_table, format_asset 24 | 25 | PNL_STATS = OrderedDict( 26 | [('Total profit', lambda x: x.sum()), 27 | ('Gross profit', lambda x: x[x > 0].sum()), 28 | ('Gross loss', lambda x: x[x < 0].sum()), 29 | ('Profit factor', lambda x: x[x > 0].sum() / x[x < 0].abs().sum() 30 | if x[x < 0].abs().sum() != 0 else np.nan), 31 | ('Avg. trade net profit', 'mean'), 32 | ('Avg. winning trade', lambda x: x[x > 0].mean()), 33 | ('Avg. losing trade', lambda x: x[x < 0].mean()), 34 | ('Ratio Avg. Win:Avg. Loss', lambda x: x[x > 0].mean() / 35 | x[x < 0].abs().mean() if x[x < 0].abs().mean() != 0 else np.nan), 36 | ('Largest winning trade', 'max'), 37 | ('Largest losing trade', 'min'), 38 | ]) 39 | 40 | SUMMARY_STATS = OrderedDict( 41 | [('Total number of round_trips', 'count'), 42 | ('Percent profitable', lambda x: len(x[x > 0]) / float(len(x))), 43 | ('Winning round_trips', lambda x: len(x[x > 0])), 44 | ('Losing round_trips', lambda x: len(x[x < 0])), 45 | ('Even round_trips', lambda x: len(x[x == 0])), 46 | ]) 47 | 48 | RETURN_STATS = OrderedDict( 49 | [('Avg returns all round_trips', lambda x: x.mean()), 50 | ('Avg returns winning', lambda x: x[x > 0].mean()), 51 | ('Avg returns losing', lambda x: x[x < 0].mean()), 52 | ('Median returns all round_trips', lambda x: x.median()), 53 | ('Median returns winning', lambda x: x[x > 0].median()), 54 | ('Median returns losing', lambda x: x[x < 0].median()), 55 | ('Largest winning trade', 'max'), 56 | ('Largest losing trade', 'min'), 57 | ]) 58 | 59 | DURATION_STATS = OrderedDict( 60 | [('Avg duration', lambda x: x.mean()), 61 | ('Median duration', lambda x: x.median()), 62 | ('Longest duration', lambda x: x.max()), 63 | ('Shortest duration', lambda x: x.min()) 64 | # FIXME: Instead of x.max() - x.min() this should be 65 | # rts.close_dt.max() - rts.open_dt.min() which is not 66 | # available here. As it would require a new approach here 67 | # that passes in multiple fields we disable these measures 68 | # for now. 69 | # ('Avg # round_trips per day', lambda x: float(len(x)) / 70 | # (x.max() - x.min()).days), 71 | # ('Avg # round_trips per month', lambda x: float(len(x)) / 72 | # (((x.max() - x.min()).days) / APPROX_BDAYS_PER_MONTH)), 73 | ]) 74 | 75 | 76 | def agg_all_long_short(round_trips, col, stats_dict): 77 | stats_all = (round_trips 78 | .assign(ones=1) 79 | .groupby('ones')[col] 80 | .agg(**stats_dict) 81 | .T 82 | .rename(columns={1.0: 'All trades'})) 83 | stats_long_short = (round_trips 84 | .groupby('long')[col] 85 | .agg(**stats_dict) 86 | .T 87 | .rename(columns={False: 'Short trades', 88 | True: 'Long trades'})) 89 | 90 | return stats_all.join(stats_long_short) 91 | 92 | 93 | def _groupby_consecutive(txn, max_delta=pd.Timedelta('8h')): 94 | """Merge transactions of the same direction separated by less than 95 | max_delta time duration. 96 | 97 | Parameters 98 | ---------- 99 | transactions : pd.DataFrame 100 | Prices and amounts of executed round_trips. One row per trade. 101 | - See full explanation in tears.create_full_tear_sheet 102 | 103 | max_delta : pandas.Timedelta (optional) 104 | Merge transactions in the same direction separated by less 105 | than max_delta time duration. 106 | 107 | 108 | Returns 109 | ------- 110 | transactions : pd.DataFrame 111 | 112 | """ 113 | def vwap(transaction): 114 | if transaction.amount.sum() == 0: 115 | warnings.warn('Zero transacted shares, setting vwap to nan.') 116 | return np.nan 117 | return (transaction.amount * transaction.price).sum() / \ 118 | transaction.amount.sum() 119 | 120 | out = [] 121 | for _, t in txn.groupby('symbol'): 122 | t = t.sort_index() 123 | t.index.name = 'dt' 124 | t = t.reset_index() 125 | 126 | t['order_sign'] = t.amount > 0 127 | t['block_dir'] = (t.order_sign.shift( 128 | 1) != t.order_sign).astype(int).cumsum() 129 | t['block_time'] = ((t.dt.sub(t.dt.shift(1))) > 130 | max_delta).astype(int).cumsum() 131 | grouped_price = (t.groupby(['block_dir', 132 | 'block_time']) 133 | .apply(vwap)) 134 | grouped_price.name = 'price' 135 | grouped_rest = t.groupby(['block_dir', 'block_time']).agg({ 136 | 'amount': 'sum', 137 | 'symbol': 'first', 138 | 'dt': 'first'}) 139 | 140 | grouped = grouped_rest.join(grouped_price) 141 | 142 | out.append(grouped) 143 | 144 | out = pd.concat(out) 145 | out = out.set_index('dt') 146 | return out 147 | 148 | 149 | def extract_round_trips(transactions, 150 | portfolio_value=None): 151 | """Group transactions into "round trips". First, transactions are 152 | grouped by day and directionality. Then, long and short 153 | transactions are matched to create round-trip round_trips for which 154 | PnL, duration and returns are computed. Crossings where a position 155 | changes from long to short and vice-versa are handled correctly. 156 | 157 | Under the hood, we reconstruct the individual shares in a 158 | portfolio over time and match round_trips in a FIFO-order. 159 | 160 | For example, the following transactions would constitute one round trip: 161 | index amount price symbol 162 | 2004-01-09 12:18:01 10 50 'AAPL' 163 | 2004-01-09 15:12:53 10 100 'AAPL' 164 | 2004-01-13 14:41:23 -10 100 'AAPL' 165 | 2004-01-13 15:23:34 -10 200 'AAPL' 166 | 167 | First, the first two and last two round_trips will be merged into a two 168 | single transactions (computing the price via vwap). Then, during 169 | the portfolio reconstruction, the two resulting transactions will 170 | be merged and result in 1 round-trip trade with a PnL of 171 | (150 * 20) - (75 * 20) = 1500. 172 | 173 | Note, that round trips do not have to close out positions 174 | completely. For example, we could have removed the last 175 | transaction in the example above and still generated a round-trip 176 | over 10 shares with 10 shares left in the portfolio to be matched 177 | with a later transaction. 178 | 179 | Parameters 180 | ---------- 181 | transactions : pd.DataFrame 182 | Prices and amounts of executed round_trips. One row per trade. 183 | - See full explanation in tears.create_full_tear_sheet 184 | 185 | portfolio_value : pd.Series (optional) 186 | Portfolio value (all net assets including cash) over time. 187 | Note that portfolio_value needs to beginning of day, so either 188 | use .shift() or positions.sum(axis='columns') / (1+returns). 189 | 190 | Returns 191 | ------- 192 | round_trips : pd.DataFrame 193 | DataFrame with one row per round trip. The returns column 194 | contains returns in respect to the portfolio value while 195 | rt_returns are the returns in regards to the invested capital 196 | into that partiulcar round-trip. 197 | """ 198 | 199 | transactions = _groupby_consecutive(transactions) 200 | roundtrips = [] 201 | 202 | for sym, trans_sym in transactions.groupby('symbol'): 203 | trans_sym = trans_sym.sort_index() 204 | price_stack = deque() 205 | dt_stack = deque() 206 | trans_sym['signed_price'] = trans_sym.price * \ 207 | np.sign(trans_sym.amount) 208 | trans_sym['abs_amount'] = trans_sym.amount.abs().astype(int) 209 | for dt, t in trans_sym.iterrows(): 210 | if t.price < 0: 211 | warnings.warn('Negative price detected, ignoring for' 212 | 'round-trip.') 213 | continue 214 | 215 | indiv_prices = [t.signed_price] * t.abs_amount 216 | if (len(price_stack) == 0) or \ 217 | (copysign(1, price_stack[-1]) == copysign(1, t.amount)): 218 | price_stack.extend(indiv_prices) 219 | dt_stack.extend([dt] * len(indiv_prices)) 220 | else: 221 | # Close round-trip 222 | pnl = 0 223 | invested = 0 224 | cur_open_dts = [] 225 | 226 | for price in indiv_prices: 227 | if len(price_stack) != 0 and \ 228 | (copysign(1, price_stack[-1]) != copysign(1, price)): 229 | # Retrieve first dt, stock-price pair from 230 | # stack 231 | prev_price = price_stack.popleft() 232 | prev_dt = dt_stack.popleft() 233 | 234 | pnl += -(price + prev_price) 235 | cur_open_dts.append(prev_dt) 236 | invested += abs(prev_price) 237 | 238 | else: 239 | # Push additional stock-prices onto stack 240 | price_stack.append(price) 241 | dt_stack.append(dt) 242 | 243 | roundtrips.append({'pnl': pnl, 244 | 'open_dt': cur_open_dts[0], 245 | 'close_dt': dt, 246 | 'long': price < 0, 247 | 'rt_returns': pnl / invested, 248 | 'symbol': sym, 249 | }) 250 | 251 | roundtrips = pd.DataFrame(roundtrips) 252 | 253 | roundtrips['duration'] = roundtrips['close_dt'].sub(roundtrips['open_dt']) 254 | 255 | if portfolio_value is not None: 256 | # Need to normalize so that we can join 257 | pv = pd.DataFrame(portfolio_value, 258 | columns=['portfolio_value'])\ 259 | .assign(date=portfolio_value.index) 260 | 261 | roundtrips['date'] = roundtrips.close_dt.apply(lambda x: 262 | x.replace(hour=0, 263 | minute=0, 264 | second=0)) 265 | 266 | tmp = (roundtrips.set_index('date') 267 | .join(pv.set_index('date'), lsuffix='_') 268 | .reset_index()) 269 | 270 | roundtrips['returns'] = tmp.pnl / tmp.portfolio_value 271 | roundtrips = roundtrips.drop('date', axis='columns') 272 | 273 | return roundtrips 274 | 275 | 276 | def add_closing_transactions(positions, transactions): 277 | """ 278 | Appends transactions that close out all positions at the end of 279 | the timespan covered by positions data. Utilizes pricing information 280 | in the positions DataFrame to determine closing price. 281 | 282 | Parameters 283 | ---------- 284 | positions : pd.DataFrame 285 | The positions that the strategy takes over time. 286 | transactions : pd.DataFrame 287 | Prices and amounts of executed round_trips. One row per trade. 288 | - See full explanation in tears.create_full_tear_sheet 289 | 290 | Returns 291 | ------- 292 | closed_txns : pd.DataFrame 293 | Transactions with closing transactions appended. 294 | """ 295 | 296 | closed_txns = transactions[['symbol', 'amount', 'price']] 297 | 298 | pos_at_end = positions.drop('cash', axis=1).iloc[-1] 299 | open_pos = pos_at_end.replace(0, np.nan).dropna() 300 | # Add closing round_trips one second after the close to be sure 301 | # they don't conflict with other round_trips executed at that time. 302 | end_dt = open_pos.name + pd.Timedelta(seconds=1) 303 | 304 | for sym, ending_val in open_pos.items(): 305 | txn_sym = transactions[transactions.symbol == sym] 306 | 307 | ending_amount = txn_sym.amount.sum() 308 | 309 | ending_price = ending_val / ending_amount 310 | closing_txn = OrderedDict([ 311 | ('amount', -ending_amount), 312 | ('price', ending_price), 313 | ('symbol', sym), 314 | ]) 315 | 316 | closing_txn = pd.DataFrame(closing_txn, index=[end_dt]) 317 | closed_txns = pd.concat([ 318 | closed_txns, 319 | closing_txn]) 320 | 321 | closed_txns = closed_txns[closed_txns.amount != 0] 322 | 323 | return closed_txns 324 | 325 | 326 | def apply_sector_mappings_to_round_trips(round_trips, sector_mappings): 327 | """ 328 | Translates round trip symbols to sectors. 329 | 330 | Parameters 331 | ---------- 332 | round_trips : pd.DataFrame 333 | DataFrame with one row per round trip trade. 334 | - See full explanation in round_trips.extract_round_trips 335 | sector_mappings : dict or pd.Series, optional 336 | Security identifier to sector mapping. 337 | Security ids as keys, sectors as values. 338 | 339 | Returns 340 | ------- 341 | sector_round_trips : pd.DataFrame 342 | Round trips with symbol names replaced by sector names. 343 | """ 344 | 345 | sector_round_trips = round_trips.copy() 346 | sector_round_trips.symbol = sector_round_trips.symbol.apply( 347 | lambda x: sector_mappings.get(x, 'No Sector Mapping')) 348 | sector_round_trips = sector_round_trips.dropna(axis=0) 349 | 350 | return sector_round_trips 351 | 352 | 353 | def gen_round_trip_stats(round_trips): 354 | """Generate various round-trip statistics. 355 | 356 | Parameters 357 | ---------- 358 | round_trips : pd.DataFrame 359 | DataFrame with one row per round trip trade. 360 | - See full explanation in round_trips.extract_round_trips 361 | 362 | Returns 363 | ------- 364 | stats : dict 365 | A dictionary where each value is a pandas DataFrame containing 366 | various round-trip statistics. 367 | 368 | See also 369 | -------- 370 | round_trips.print_round_trip_stats 371 | """ 372 | 373 | stats = {} 374 | stats['pnl'] = agg_all_long_short(round_trips, 'pnl', PNL_STATS) 375 | stats['summary'] = agg_all_long_short(round_trips, 'pnl', 376 | SUMMARY_STATS) 377 | stats['duration'] = agg_all_long_short(round_trips, 'duration', 378 | DURATION_STATS) 379 | stats['returns'] = agg_all_long_short(round_trips, 'returns', 380 | RETURN_STATS) 381 | 382 | stats['symbols'] = \ 383 | round_trips.groupby('symbol')['returns'].agg(**RETURN_STATS).T 384 | 385 | return stats 386 | 387 | 388 | def print_round_trip_stats(round_trips, hide_pos=False): 389 | """Print various round-trip statistics. Tries to pretty-print tables 390 | with HTML output if run inside IPython NB. 391 | 392 | Parameters 393 | ---------- 394 | round_trips : pd.DataFrame 395 | DataFrame with one row per round trip trade. 396 | - See full explanation in round_trips.extract_round_trips 397 | 398 | See also 399 | -------- 400 | round_trips.gen_round_trip_stats 401 | """ 402 | 403 | stats = gen_round_trip_stats(round_trips) 404 | 405 | print_table(stats['summary'], float_format='{:.2f}'.format, 406 | name='Summary stats') 407 | print_table(stats['pnl'], float_format='${:.2f}'.format, name='PnL stats') 408 | print_table(stats['duration'], float_format='{:.2f}'.format, 409 | name='Duration stats') 410 | print_table(stats['returns'] * 100, float_format='{:.2f}%'.format, 411 | name='Return stats') 412 | 413 | if not hide_pos: 414 | stats['symbols'].columns = stats['symbols'].columns.map(format_asset) 415 | print_table(stats['symbols'] * 100, 416 | float_format='{:.2f}%'.format, name='Symbol stats') 417 | --------------------------------------------------------------------------------