├── .gitattributes ├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.md ├── WHATSNEW.md ├── build_and_deploy_docs.sh ├── conda ├── bld.bat ├── build.sh └── meta.yaml ├── docs ├── convert_nbs_to_md.sh ├── example_tear_0.png ├── example_tear_1.png ├── extra.css ├── index.md ├── notebooks │ ├── fama_french_benchmark.ipynb │ ├── full_tear_sheet_example.ipynb │ ├── round_trip_tear_sheet_example.ipynb │ ├── sector_mappings_example.ipynb │ ├── single_stock_example.ipynb │ ├── slippage_example.ipynb │ └── zipline_algo_example.ipynb ├── simple_tear_0.png ├── simple_tear_1.png └── whatsnew.md ├── mkdocs.yml ├── pyfolio ├── __init__.py ├── _seaborn.py ├── _version.py ├── capacity.py ├── deprecate.py ├── examples │ ├── bayesian.ipynb │ ├── fama_french_benchmark.ipynb │ ├── full_tear_sheet_example.ipynb │ ├── pydata_stack-4-finance.jpg │ ├── pyfolio_talk_slides.ipynb │ ├── round_trip_tear_sheet_example.ipynb │ ├── sector_mappings_example.ipynb │ ├── single_stock_example.ipynb │ ├── slippage_example.ipynb │ └── zipline_algo_example.ipynb ├── interesting_periods.py ├── ipycompat.py ├── perf_attrib.py ├── plotting.py ├── pos.py ├── round_trips.py ├── tears.py ├── tests │ ├── __init__.py │ ├── matplotlibrc │ ├── test_capacity.py │ ├── test_data │ │ ├── factor_loadings.csv │ │ ├── factor_returns.csv │ │ ├── intercepts.csv │ │ ├── positions.csv │ │ ├── residuals.csv │ │ ├── returns.csv │ │ ├── test_LMCAP.csv │ │ ├── test_LT_MOMENTUM.csv │ │ ├── test_MACDSignal.csv │ │ ├── test_VLTY.csv │ │ ├── test_caps.csv │ │ ├── test_gross_lev.csv.gz │ │ ├── test_pos.csv.gz │ │ ├── test_returns.csv.gz │ │ ├── test_sectors.csv │ │ ├── test_shares_held.csv │ │ ├── test_txn.csv.gz │ │ └── test_volumes.csv │ ├── test_nbs.py │ ├── test_perf_attrib.py │ ├── test_pos.py │ ├── test_round_trips.py │ ├── test_tears.py │ ├── test_timeseries.py │ └── test_txn.py ├── timeseries.py ├── txn.py └── utils.py ├── setup.cfg ├── setup.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | pyfolio/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Problem Description 2 | 3 | **Please provide a minimal, self-contained, and reproducible example:** 4 | ```python 5 | [Paste code here] 6 | ``` 7 | 8 | **Please provide the full traceback:** 9 | ```python 10 | [Paste traceback here] 11 | ``` 12 | 13 | **Please provide any additional information below:** 14 | 15 | 16 | ## Versions 17 | 18 | * Pyfolio version: 19 | * Python version: 20 | * Pandas version: 21 | * Matplotlib version: 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *,cover 45 | 46 | # Translations 47 | *.mo 48 | *.pot 49 | 50 | # Django stuff: 51 | *.log 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # PyBuilder 57 | target/ 58 | 59 | # VIM 60 | *.sw? 61 | 62 | # IPython notebook checkpoints 63 | .ipynb_checkpoints/ 64 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | sudo: false 3 | 4 | python: 5 | - 2.7 6 | - 3.7 7 | 8 | env: 9 | - PANDAS_VERSION=0.18.1 10 | - PANDAS_VERSION=0.25.0 11 | 12 | matrix: 13 | exclude: 14 | - python: 3.7 15 | env: PANDAS_VERSION=0.18.1 16 | - python: 2.7 17 | env: PANDAS_VERSION=0.25.0 18 | 19 | before_install: 20 | # We do this conditionally because it saves us some downloading if the 21 | # version is the same. 22 | - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then 23 | wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; 24 | else 25 | wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; 26 | fi 27 | - bash miniconda.sh -b -p $HOME/miniconda 28 | - export PATH="$HOME/miniconda/bin:$PATH" 29 | # required for using mkl-service 30 | - export MKL_THREADING_LAYER=GNU 31 | - hash -r 32 | - conda config --set always_yes yes --set changeps1 no 33 | - conda update -q conda 34 | # Useful for debugging any issues with conda 35 | - conda info -a 36 | - cp pyfolio/tests/matplotlibrc . 37 | 38 | install: 39 | - conda create -q -n testenv --yes python=$TRAVIS_PYTHON_VERSION ipython numpy scipy nose matplotlib pandas=$PANDAS_VERSION flake8 seaborn scikit-learn runipy pandas-datareader 40 | - source activate testenv 41 | - pip install -e .[all] 42 | 43 | before_script: 44 | - "flake8 pyfolio" 45 | 46 | script: 47 | - nosetests $TESTCMD 48 | 49 | notifications: 50 | email: false 51 | 52 | branches: 53 | only: 54 | - master 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018 Quantopian, Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include pyfolio/_version.py 3 | include LICENSE 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |  2 | 3 | # pyfolio 4 | 5 | [](https://gitter.im/quantopian/pyfolio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 6 | [](https://travis-ci.org/quantopian/pyfolio) 7 | 8 | pyfolio is a Python library for performance and risk analysis of 9 | financial portfolios developed by 10 | [Quantopian Inc](https://www.quantopian.com). It works well with the 11 | [Zipline](https://www.zipline.io/) open source backtesting library. 12 | Quantopian also offers a [fully managed service for professionals](https://factset.quantopian.com) 13 | that includes Zipline, Alphalens, Pyfolio, FactSet data, and more. 14 | 15 | At the core of pyfolio is a so-called tear sheet that consists of 16 | various individual plots that provide a comprehensive image of the 17 | performance of a trading algorithm. Here's an example of a simple tear 18 | sheet analyzing a strategy: 19 | 20 |  21 |  22 | 23 | Also see [slides of a talk about 24 | pyfolio](https://nbviewer.jupyter.org/format/slides/github/quantopian/pyfolio/blob/master/pyfolio/examples/pyfolio_talk_slides.ipynb#/). 25 | 26 | ## Installation 27 | 28 | To install pyfolio, run: 29 | 30 | ```bash 31 | pip install pyfolio 32 | ``` 33 | 34 | #### Development 35 | 36 | For development, you may want to use a [virtual environment](https://docs.python-guide.org/en/latest/dev/virtualenvs/) to avoid dependency conflicts between pyfolio and other Python projects you have. To get set up with a virtual env, run: 37 | ```bash 38 | mkvirtualenv pyfolio 39 | ``` 40 | 41 | Next, clone this git repository and run `python setup.py develop` 42 | and edit the library files directly. 43 | 44 | #### Matplotlib on OSX 45 | 46 | If you are on OSX and using a non-framework build of Python, you may need to set your backend: 47 | ``` bash 48 | echo "backend: TkAgg" > ~/.matplotlib/matplotlibrc 49 | ``` 50 | 51 | ## Usage 52 | 53 | A good way to get started is to run the pyfolio examples in 54 | a [Jupyter notebook](https://jupyter.org/). To do this, you first want to 55 | start a Jupyter notebook server: 56 | 57 | ```bash 58 | jupyter notebook 59 | ``` 60 | 61 | From the notebook list page, navigate to the pyfolio examples directory 62 | and open a notebook. Execute the code in a notebook cell by clicking on it 63 | and hitting Shift+Enter. 64 | 65 | 66 | ## Questions? 67 | 68 | If you find a bug, feel free to [open an issue](https://github.com/quantopian/pyfolio/issues) in this repository. 69 | 70 | You can also join our [mailing list](https://groups.google.com/forum/#!forum/pyfolio) or 71 | our [Gitter channel](https://gitter.im/quantopian/pyfolio). 72 | 73 | ## Support 74 | 75 | Please [open an issue](https://github.com/quantopian/pyfolio/issues/new) for support. 76 | 77 | ## Contributing 78 | 79 | If you'd like to contribute, a great place to look is the [issues marked with help-wanted](https://github.com/quantopian/pyfolio/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22). 80 | 81 | For a list of core developers and outside collaborators, see [the GitHub contributors list](https://github.com/quantopian/pyfolio/graphs/contributors). 82 | -------------------------------------------------------------------------------- /WHATSNEW.md: -------------------------------------------------------------------------------- 1 | # What's New 2 | 3 | These are new features and improvements of note in each release. 4 | 5 | ## v0.9.0 (Aug 1st, 2018) 6 | 7 | ### New features 8 | 9 | - Previously, `pyfolio` has required a benchmark, usually the U.S. market 10 | returns `SPY`. In order to provide support for international equities and 11 | alternative data sets, `pyfolio` is now completely independent of benchmarks. 12 | If a benchmark is passed, all benchmark-related analyses will be performed; 13 | if not, they will simply be skipped. By [George Ho](https://github.com/eigenfoo) 14 | - Performance attribution tearsheet [PR441](https://github.com/quantopian/pyfolio/pull/441), [PR433](https://github.com/quantopian/pyfolio/pull/433), [PR442](https://github.com/quantopian/pyfolio/pull/442). By [Vikram Narayan](https://github.com/vikram-narayan). 15 | - Improved implementation of `get_turnover` [PR332](https://github.com/quantopian/pyfolio/pull/432). By [Gus Gordon](https://github.com/gusgordon). 16 | - Users can now pass in extra rows (as a dict or OrderedDict) to display in the perf_stats table [PR445](https://github.com/quantopian/pyfolio/pull/445). By [Gus Gordon](https://github.com/gusgordon). 17 | 18 | ### Maintenance 19 | 20 | - Many features have been more extensively troubleshooted, maintained and 21 | tested. By [Ana Ruelas](https://github.com/ahgnaw) and [Vikram 22 | Narayan](https://github.com/vikram-narayan). 23 | - Various fixes to support pandas versions >= 0.18.1 [PR443](https://github.com/quantopian/pyfolio/pull/443). By [Andrew Daniels](https://github.com/yankees714). 24 | 25 | ## v0.8.0 (Aug 23rd, 2017) 26 | 27 | This is a major release from `0.7.0`, and all users are recommended to upgrade. 28 | 29 | ### New features 30 | 31 | - Risk tear sheet: added a new tear sheet to analyze risk exposures to common 32 | factors (e.g. mean reversion and momentum), sector (e.g. Morningstar 33 | sectors), market cap and illiquid stocks. By [George 34 | Ho](https://github.com/eigenfoo). 35 | - Simple tear sheet: added a new tear sheet that presents only the most 36 | important plots in the full tear sheet, for a quick general overview of a 37 | portfolio's performance. By [George Ho](https://github.com/eigenfoo). 38 | - Performance attribution: added new table to do performance attribution 39 | analysis, such as the amount of returns attributable to common factors, and 40 | summary statistics such as the multi-factor alpha and multi-factor Sharpe 41 | ratio. By [Vikram Narayan](https://github.com/vikram-narayan). 42 | - Volatility plot: added a rolling annual volatility plot to the returns tear 43 | sheet. By [hkopp](https://github.com/hkopp). 44 | 45 | ### Bugfixes 46 | 47 | - Yahoo and pandas data-reader: fixed bug regarding Yahoo backend for market 48 | data and pandas data-reader. By [Thomas Wiecki](https://github.com/twiecki) 49 | and [Gus Gordon](https://github.com/gusgordon). 50 | - `empyrical` compatibility: removed `information_ratio` to remain compatible 51 | with `empyrical`. By [Thomas Wiecki](https://github.com/twiecki). 52 | - Fama-French rolling multivariate regression: fixed bug where the rolling 53 | Fama-French plot performed separate linear regressions instead of a 54 | multivariate regression. By [George Ho](https://github.com/eigenfoo). 55 | - Other minor bugfixes. By [Scott Sanderson](https://github.com/ssanderson), 56 | [Jonathan Ng](https://github.com/jonathanng), 57 | [SylvainDe](https://github.com/SylvainDe) and 58 | [mckelvin](https://github.com/mckelvin). 59 | 60 | ### Maintenance 61 | 62 | - Documentation: updated and improved `pyfolio` documentation and example 63 | Jupyter notebooks. By [George Ho](https://github.com/eigenfoo). 64 | - Data loader migration: all data loaders have been migrated from `pyfolio` to 65 | `empyrical`. By [James Christopher](https://github.com/jameschristopher). 66 | - Improved plotting style: fixed issues with formatting and presentation of 67 | plots. By [George Ho](https://github.com/eigenfoo). 68 | 69 | ## v0.7.0 (Jan 28th, 2017) 70 | 71 | This is a major release from `0.6.0`, and all users are recommended to upgrade. 72 | 73 | ### New features 74 | 75 | - Adds a transaction timing plot, which gives insight into the strategies' 76 | trade times. 77 | - Adds a plot showing the number of longs and shorts held over time. 78 | - New round trips plot selects a sample of held positions (16 by default) and 79 | shows their round trips. This replaces the old round trip plot, which became 80 | unreadable for strategies that traded many positions. 81 | - Adds basic capability for analyzing intraday strategies. If a strategy makes 82 | a large amount of transactions relative to its end-of-day positions, then 83 | pyfolio will attempt to reconstruct the intraday positions, take the point of 84 | peak exposure to the market during each day, and plot that data with the 85 | positions tear sheet. By default pyfolio will automatically detect this, but 86 | the behavior can be changed by passing either `estimate_intraday=True` or 87 | `estimate_intraday=False` to the tear sheet functions ([see 88 | here](https://github.com/quantopian/pyfolio/blob/master/pyfolio/tears.py#L131)). 89 | - Now formats [zipline](https://github.com/quantopian/zipline) assets, 90 | displaying their ticker symbol. 91 | - Gross leverage is no longer required to be passed, and will now be calculated 92 | from the passed positions DataFrame. 93 | 94 | ### Bugfixes 95 | 96 | - Cone plotting location is now correct. 97 | - Adjust scaling of beta and Fama-French plots. 98 | - Removed multiple dependencies, some of which were previously unused. 99 | - Various text fixes. 100 | 101 | ## v0.6.0 (Oct 17, 2016) 102 | 103 | This is a major new release from `0.5.1`. All users are recommended to upgrade. 104 | 105 | ### New features 106 | 107 | * Computation of performance and risk measures has been split off into 108 | [`empyrical`](https://github.com/quantopian/empyrical). This allows 109 | [`Zipline`](https://zipline.io) and `pyfolio` to use the same code to 110 | calculate its risk statistics. By [Ana Ruelas](https://github.com/ahgnaw) and 111 | [Abhi Kalyan](https://github.com/abhijeetkalyan). 112 | * New multistrike cone which redraws the cone when it crossed its initial bounds 113 | [PR310](https://github.com/quantopian/pyfolio/pull/310). By [Ana 114 | Ruelas](https://github.com/ahgnaw) and [Abhi 115 | Kalyan](https://github.com/abhijeetkalyan). 116 | 117 | ### Bugfixes 118 | 119 | * Can use most recent PyMC3 now. 120 | * Depends on seaborn 0.7.0 or later now 121 | [PR331](https://github.com/quantopian/pyfolio/pull/331). 122 | * Disable buggy computation of round trips per day and per month 123 | [PR339](https://github.com/quantopian/pyfolio/pull/339). 124 | 125 | ## v0.5.1 (June 10, 2016) 126 | 127 | This is a bugfix release from `0.5.0` with limited new functionality. All users are recommended to upgrade. 128 | 129 | ### New features 130 | 131 | * OOS data is now overlaid on top of box plot 132 | [PR306](https://github.com/quantopian/pyfolio/pull/306) by [Ana 133 | Ruelas](https://github.com/ahgnaw) 134 | * New logo [PR298](https://github.com/quantopian/pyfolio/pull/298) by [Taso 135 | Petridis](https://github.com/tasopetridis) and [Richard 136 | Frank](https://github.com/richafrank) 137 | * Raw returns plot and cumulative log returns plot 138 | [PR294](https://github.com/quantopian/pyfolio/pull/294) by [Thomas 139 | Wiecki](https://github.com/twiecki) 140 | * Net exposure line to the long/short exposure plot 141 | [PR301](https://github.com/quantopian/pyfolio/pull/301) by [Ana 142 | Ruelas](https://github.com/ahgnaw) 143 | 144 | ### Bugfixes 145 | 146 | * Fix drawdown behavior and pandas exception in tear-sheet creation 147 | [PR297](https://github.com/quantopian/pyfolio/pull/297) by [Flavio 148 | Duarte](https://github.com/flaviodrt) 149 | 150 | ## v0.5.0 (April 21, 2016) -- Olympia 151 | 152 | This is a major release from `0.4.0` that includes many new analyses and 153 | features. We recommend that all users upgrade to this new version. Also update 154 | your dependencies, specifically, `pandas>=0.18.0`, `seaborn>=0.6.0` and 155 | `zipline>=0.8.4`. 156 | 157 | ### New features 158 | 159 | * New capacity tear-sheet to assess how much capital can be traded on a strategy 160 | [PR284](https://github.com/quantopian/pyfolio/pull/284). [Andrew 161 | Campbell](https://github.com/a-campbell). 162 | * Bootstrap analysis to assess uncertainty in performance metrics 163 | [PR261](https://github.com/quantopian/pyfolio/pull/261). [Thomas 164 | Wiecki](https://github.com/twiecki) 165 | * Refactored round-trip analysis to be more general and have better output. Now 166 | does full portfolio reconstruction to match trades 167 | [PR293](https://github.com/quantopian/pyfolio/pull/293). [Thomas 168 | Wiecki](https://github.com/twiecki), [Andrew 169 | Campbell](https://github.com/a-campbell). See the 170 | [tutorial](http://quantopian.github.io/pyfolio/round_trip_example/) for more 171 | information. 172 | * Prettier printing of tables in notebooks 173 | [PR289](https://github.com/quantopian/pyfolio/pull/289). [Thomas 174 | Wiecki](https://github.com/twiecki) 175 | * Faster max-drawdown calculation 176 | [PR281](https://github.com/quantopian/pyfolio/pull/281). [Devin 177 | Stevenson](https://github.com/devinstevenson) 178 | * New metrics tail-ratio and common sense ratio 179 | [PR276](https://github.com/quantopian/pyfolio/pull/276). [Thomas 180 | Wiecki](https://github.com/twiecki) 181 | * Log-scaled cumulative returns plot and raw returns plot 182 | [PR294](https://github.com/quantopian/pyfolio/pull/294). [Thomas 183 | Wiecki](https://github.com/twiecki) 184 | 185 | ### Bug fixes 186 | * Many depracation fixes for Pandas 0.18.0, seaborn 0.6.0, and zipline 0.8.4 187 | 188 | 189 | ## v0.4.0 (Dec 10, 2015) 190 | 191 | This is a major release from 0.3.1 that includes new features and quite a few bug fixes. We recommend that all users upgrade to this new version. 192 | 193 | ### New features 194 | 195 | * Round-trip analysis [PR210](https://github.com/quantopian/pyfolio/pull/210) 196 | Andrew, Thomas 197 | * Improved cone to forecast returns that uses a bootstrap instead of linear 198 | forecasting [PR233](https://github.com/quantopian/pyfolio/pull/233) Andrew, 199 | Thomas 200 | * Plot max and median long/short exposures 201 | [PR237](https://github.com/quantopian/pyfolio/pull/237) Andrew 202 | 203 | ### Bug fixes 204 | 205 | * Sharpe ratio was calculated incorrectly 206 | [PR219](https://github.com/quantopian/pyfolio/pull/219) Thomas, Justin 207 | * annual_return() now only computes CAGR in the correct way 208 | [PR234](https://github.com/quantopian/pyfolio/pull/234) Justin 209 | * Cache SPY and Fama-French returns in home-directory instead of 210 | install-directory [PR241](https://github.com/quantopian/pyfolio/pull/241) Joe 211 | * Remove data files from package 212 | [PR241](https://github.com/quantopian/pyfolio/pull/241) Joe 213 | * Cast factor.name to str 214 | [PR223](https://github.com/quantopian/pyfolio/pull/223) Scotty 215 | * Test all `create_*_tear_sheet` functions in all configurations 216 | [PR247](https://github.com/quantopian/pyfolio/pull/247) Thomas 217 | 218 | 219 | ## v0.3.1 (Nov 12, 2015) 220 | 221 | This is a minor release from 0.3 that includes mostly bugfixes but also some new features. We recommend that all users upgrade to this new version. 222 | 223 | ### New features 224 | 225 | * Add Information Ratio [PR194](https://github.com/quantopian/pyfolio/pull/194) 226 | by @MridulS 227 | * Bayesian tear-sheet now accepts 'Fama-French' option to do Bayesian 228 | multivariate regression against Fama-French risk factors 229 | [PR200](https://github.com/quantopian/pyfolio/pull/200) by Shane Bussman 230 | * Plotting of monthly returns 231 | [PR195](https://github.com/quantopian/pyfolio/pull/195) 232 | 233 | ### Bug fixes 234 | 235 | * `pos.get_percent_alloc` was not handling short allocations correctly 236 | [PR201](https://github.com/quantopian/pyfolio/pull/201) 237 | * UTC bug with cached Fama-French factors 238 | [commit](https://github.com/quantopian/pyfolio/commit/709553a55b5df7c908d17f443cb17b51854a65be) 239 | * Sector map was not being passed from `create_returns_tearsheet` 240 | [commit](https://github.com/quantopian/pyfolio/commit/894b753e365f9cb4861ffca2ef214c5a64b2bef4) 241 | * New sector mapping feature was not Python 3 compatible 242 | [PR201](https://github.com/quantopian/pyfolio/pull/201) 243 | 244 | 245 | ### Maintenance 246 | 247 | * We now depend on pandas-datareader as the yahoo finance loaders from pandas 248 | will be deprecated [PR181](https://github.com/quantopian/pyfolio/pull/181) by 249 | @tswrightsandpointe 250 | 251 | ### Contributors 252 | 253 | Besiders the core developers, we have seen an increase in outside contributions 254 | which we greatly appreciate. Specifically, these people contributed to this 255 | release: 256 | 257 | * Shane Bussman 258 | * @MridulS 259 | * @YihaoLu 260 | * @jkrauss82 261 | * @tswrightsandpointe 262 | * @cgdeboer 263 | 264 | 265 | ## v0.3 (Oct 23, 2015) 266 | 267 | This is a major release from 0.2 that includes many exciting new features. We 268 | recommend that all users upgrade to this new version. 269 | 270 | ### New features 271 | 272 | * Sector exposures: sum positions by sector given a dictionary or series of 273 | symbol to sector mappings 274 | [PR166](https://github.com/quantopian/pyfolio/pull/166) 275 | * Ability to make cones with multiple shades stdev regions 276 | [PR168](https://github.com/quantopian/pyfolio/pull/168) 277 | * Slippage sweep: See how an algorithm performs with various levels of slippage 278 | [PR170](https://github.com/quantopian/pyfolio/pull/170) 279 | * Stochastic volatility model in Bayesian tear sheet 280 | [PR174](https://github.com/quantopian/pyfolio/pull/174) 281 | * Ability to suppress display of position information 282 | [PR177](https://github.com/quantopian/pyfolio/pull/177) 283 | 284 | ### Bug fixes 285 | 286 | * Various fixes to make pyfolio pandas 0.17 compatible 287 | 288 | ## v0.2 (Oct 16, 2015) 289 | 290 | This is a major release from 0.1 that includes mainly bugfixes and refactorings 291 | but also some new features. We recommend that all users upgrade to this new 292 | version. 293 | 294 | ### New features 295 | 296 | * Volatility matched cumulative returns plot 297 | [PR126](https://github.com/quantopian/pyfolio/pull/126). 298 | * Allow for different periodicity (annualization factors) in the annual_() 299 | methods [PR164](https://github.com/quantopian/pyfolio/pull/164). 300 | * Users can supply their own interesting periods 301 | [PR163](https://github.com/quantopian/pyfolio/pull/163). 302 | * Ability to weight a portfolio of holdings by a metric valued 303 | [PR161](https://github.com/quantopian/pyfolio/pull/161). 304 | 305 | ### Bug fixes 306 | 307 | * Fix drawdown overlaps [PR150](https://github.com/quantopian/pyfolio/pull/150). 308 | * Monthly returns distribution should not stack by year 309 | [PR162](https://github.com/quantopian/pyfolio/pull/162). 310 | * Fix gross leverage [PR147](https://github.com/quantopian/pyfolio/pull/147) 311 | -------------------------------------------------------------------------------- /build_and_deploy_docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | pushd docs 4 | bash convert_nbs_to_md.sh 5 | popd 6 | mkdocs build --clean 7 | mkdocs gh-deploy 8 | -------------------------------------------------------------------------------- /conda/bld.bat: -------------------------------------------------------------------------------- 1 | "%PYTHON%" setup.py install 2 | if errorlevel 1 exit 1 3 | 4 | :: Add more build steps here, if they are necessary. 5 | 6 | :: See 7 | :: http://docs.continuum.io/conda/build.html 8 | :: for a list of environment variables that are set during the build process. 9 | -------------------------------------------------------------------------------- /conda/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | $PYTHON setup.py install --single-version-externally-managed --record=record.txt 4 | 5 | # Add more build steps here, if they are necessary. 6 | 7 | # See 8 | # http://docs.continuum.io/conda/build.html 9 | # for a list of environment variables that are set during the build process. 10 | -------------------------------------------------------------------------------- /conda/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: pyfolio 3 | version: "0.6.0" 4 | 5 | source: 6 | fn: pyfolio-0.6.0.tar.gz 7 | url: https://pypi.python.org/packages/74/b6/bd9064f071ab71312256dc0dcf792440f2e41a66f6736bd2aa90ba965fb6/pyfolio-0.6.0.tar.gz#md5=f3f02df1c1b77209eb33c64e34f00031 8 | md5: f3f02df1c1b77209eb33c64e34f00031 9 | 10 | build: 11 | noarch_python: True 12 | 13 | requirements: 14 | build: 15 | - python 16 | - setuptools 17 | 18 | run: 19 | - python 20 | - matplotlib >=1.4.0 21 | - numpy >=1.9.1 22 | - pandas >=0.18.0 23 | - pytz >=2014.10 24 | - scipy >=0.14.0 25 | - seaborn >=0.6.0 26 | - pandas-datareader >=0.2 27 | - ipython 28 | - empyrical >=0.2.1 29 | 30 | test: 31 | # Python imports 32 | imports: 33 | - pyfolio 34 | - pyfolio.tests 35 | 36 | #commands: 37 | # - nosetests # You can put test commands to be run here. Use this to test that the 38 | # # entry points work. 39 | 40 | about: 41 | home: http://quantopian.github.io/pyfolio/ 42 | license: Apache Software License 43 | summary: 'pyfolio is a Python library for performance and risk analysis of financial portfolios' 44 | -------------------------------------------------------------------------------- /docs/convert_nbs_to_md.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | NBDIR=notebooks 3 | 4 | for fullfile in $NBDIR/*.ipynb; do 5 | echo "Processing $fullfile file.."; 6 | filename=$(basename "$fullfile") 7 | extension="${filename##*.}" 8 | filename="${filename%.*}" 9 | jupyter nbconvert $fullfile --to markdown --output $filename 10 | done 11 | -------------------------------------------------------------------------------- /docs/example_tear_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/docs/example_tear_0.png -------------------------------------------------------------------------------- /docs/example_tear_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/docs/example_tear_1.png -------------------------------------------------------------------------------- /docs/extra.css: -------------------------------------------------------------------------------- 1 | img[title="pyfolio"] { 2 | background-color: transparent !important; 3 | border: 0 !important; 4 | } 5 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ../README.md -------------------------------------------------------------------------------- /docs/notebooks/fama_french_benchmark.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/fama_french_benchmark.ipynb -------------------------------------------------------------------------------- /docs/notebooks/full_tear_sheet_example.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/full_tear_sheet_example.ipynb -------------------------------------------------------------------------------- /docs/notebooks/round_trip_tear_sheet_example.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/round_trip_tear_sheet_example.ipynb -------------------------------------------------------------------------------- /docs/notebooks/sector_mappings_example.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/sector_mappings_example.ipynb -------------------------------------------------------------------------------- /docs/notebooks/single_stock_example.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/single_stock_example.ipynb -------------------------------------------------------------------------------- /docs/notebooks/slippage_example.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/slippage_example.ipynb -------------------------------------------------------------------------------- /docs/notebooks/zipline_algo_example.ipynb: -------------------------------------------------------------------------------- 1 | ../../pyfolio/examples/zipline_algo_example.ipynb -------------------------------------------------------------------------------- /docs/simple_tear_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/docs/simple_tear_0.png -------------------------------------------------------------------------------- /docs/simple_tear_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/docs/simple_tear_1.png -------------------------------------------------------------------------------- /docs/whatsnew.md: -------------------------------------------------------------------------------- 1 | ../WHATSNEW.md -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: pyfolio 2 | repo_url: https://github.com/quantopian/pyfolio 3 | site_author: Quantopian Inc. 4 | 5 | pages: 6 | - Overview: 'index.md' 7 | - Releases: 'whatsnew.md' 8 | - Tutorial: 9 | - 'Single stock': 'notebooks/single_stock_example.md' 10 | - 'Zipline algorithm': 'notebooks/zipline_algo_example.md' 11 | - 'Sector analysis': 'notebooks/sector_mappings_example.md' 12 | - 'Round trip analysis': 'notebooks/round_trip_tear_sheet_example.md' 13 | - 'Slippage analysis': 'notebooks/slippage_example.md' 14 | 15 | extra_css: [extra.css] 16 | -------------------------------------------------------------------------------- /pyfolio/__init__.py: -------------------------------------------------------------------------------- 1 | from . import utils 2 | from . import timeseries 3 | from . import pos 4 | from . import txn 5 | from . import interesting_periods 6 | from . import capacity 7 | from . import round_trips 8 | from . import perf_attrib 9 | 10 | from .tears import * # noqa 11 | from .plotting import * # noqa 12 | from ._version import get_versions 13 | 14 | __version__ = get_versions()['version'] 15 | del get_versions 16 | 17 | __all__ = ['utils', 'timeseries', 'pos', 'txn', 18 | 'interesting_periods', 'capacity', 'round_trips', 19 | 'perf_attrib'] 20 | -------------------------------------------------------------------------------- /pyfolio/_seaborn.py: -------------------------------------------------------------------------------- 1 | """Wrapper module around seaborn to suppress warnings on import. 2 | 3 | This should be removed when seaborn stops raising: 4 | 5 | UserWarning: axes.color_cycle is deprecated and replaced with axes.prop_cycle; 6 | please use the latter. 7 | """ 8 | import warnings 9 | 10 | 11 | with warnings.catch_warnings(): 12 | warnings.filterwarnings( 13 | 'ignore', 14 | 'axes.color_cycle is deprecated', 15 | UserWarning, 16 | 'matplotlib', 17 | ) 18 | from seaborn import * # noqa 19 | -------------------------------------------------------------------------------- /pyfolio/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.15 (https://github.com/warner/python-versioneer) 10 | 11 | import errno 12 | import os 13 | import re 14 | import subprocess 15 | import sys 16 | 17 | 18 | def get_keywords(): 19 | # these strings will be replaced by git during git-archive. 20 | # setup.py/versioneer.py will grep for the variable names, so they must 21 | # each be defined on a line of their own. _version.py will just call 22 | # get_keywords(). 23 | git_refnames = " (HEAD -> master)" 24 | git_full = "4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa" 25 | keywords = {"refnames": git_refnames, "full": git_full} 26 | return keywords 27 | 28 | 29 | class VersioneerConfig: 30 | pass 31 | 32 | 33 | def get_config(): 34 | # these strings are filled in when 'setup.py versioneer' creates 35 | # _version.py 36 | cfg = VersioneerConfig() 37 | cfg.VCS = "git" 38 | cfg.style = "pep440" 39 | cfg.tag_prefix = "" 40 | cfg.parentdir_prefix = "pyfolio-" 41 | cfg.versionfile_source = "pyfolio/_version.py" 42 | cfg.verbose = False 43 | return cfg 44 | 45 | 46 | class NotThisMethod(Exception): 47 | pass 48 | 49 | 50 | LONG_VERSION_PY = {} 51 | HANDLERS = {} 52 | 53 | 54 | def register_vcs_handler(vcs, method): # decorator 55 | def decorate(f): 56 | if vcs not in HANDLERS: 57 | HANDLERS[vcs] = {} 58 | HANDLERS[vcs][method] = f 59 | return f 60 | return decorate 61 | 62 | 63 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 64 | assert isinstance(commands, list) 65 | p = None 66 | for c in commands: 67 | try: 68 | dispcmd = str([c] + args) 69 | # remember shell=False, so use git.cmd on windows, not just git 70 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 71 | stderr=(subprocess.PIPE if hide_stderr 72 | else None)) 73 | break 74 | except EnvironmentError: 75 | e = sys.exc_info()[1] 76 | if e.errno == errno.ENOENT: 77 | continue 78 | if verbose: 79 | print("unable to run %s" % dispcmd) 80 | print(e) 81 | return None 82 | else: 83 | if verbose: 84 | print("unable to find command, tried %s" % (commands,)) 85 | return None 86 | stdout = p.communicate()[0].strip() 87 | if sys.version_info[0] >= 3: 88 | stdout = stdout.decode() 89 | if p.returncode != 0: 90 | if verbose: 91 | print("unable to run %s (error)" % dispcmd) 92 | return None 93 | return stdout 94 | 95 | 96 | def versions_from_parentdir(parentdir_prefix, root, verbose): 97 | # Source tarballs conventionally unpack into a directory that includes 98 | # both the project name and a version string. 99 | dirname = os.path.basename(root) 100 | if not dirname.startswith(parentdir_prefix): 101 | if verbose: 102 | print("guessing rootdir is '%s', but '%s' doesn't start with " 103 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 104 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 105 | return {"version": dirname[len(parentdir_prefix):], 106 | "full-revisionid": None, 107 | "dirty": False, "error": None} 108 | 109 | 110 | @register_vcs_handler("git", "get_keywords") 111 | def git_get_keywords(versionfile_abs): 112 | # the code embedded in _version.py can just fetch the value of these 113 | # keywords. When used from setup.py, we don't want to import _version.py, 114 | # so we do it with a regexp instead. This function is not used from 115 | # _version.py. 116 | keywords = {} 117 | try: 118 | f = open(versionfile_abs, "r") 119 | for line in f.readlines(): 120 | if line.strip().startswith("git_refnames ="): 121 | mo = re.search(r'=\s*"(.*)"', line) 122 | if mo: 123 | keywords["refnames"] = mo.group(1) 124 | if line.strip().startswith("git_full ="): 125 | mo = re.search(r'=\s*"(.*)"', line) 126 | if mo: 127 | keywords["full"] = mo.group(1) 128 | f.close() 129 | except EnvironmentError: 130 | pass 131 | return keywords 132 | 133 | 134 | @register_vcs_handler("git", "keywords") 135 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 136 | if not keywords: 137 | raise NotThisMethod("no keywords at all, weird") 138 | refnames = keywords["refnames"].strip() 139 | if refnames.startswith("$Format"): 140 | if verbose: 141 | print("keywords are unexpanded, not using") 142 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 143 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 144 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 145 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 146 | TAG = "tag: " 147 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 148 | if not tags: 149 | # Either we're using git < 1.8.3, or there really are no tags. We use 150 | # a heuristic: assume all version tags have a digit. The old git %d 151 | # expansion behaves like git log --decorate=short and strips out the 152 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 153 | # between branches and tags. By ignoring refnames without digits, we 154 | # filter out many common branch names like "release" and 155 | # "stabilization", as well as "HEAD" and "master". 156 | tags = set([r for r in refs if re.search(r'\d', r)]) 157 | if verbose: 158 | print("discarding '%s', no digits" % ",".join(refs-tags)) 159 | if verbose: 160 | print("likely tags: %s" % ",".join(sorted(tags))) 161 | for ref in sorted(tags): 162 | # sorting will prefer e.g. "2.0" over "2.0rc1" 163 | if ref.startswith(tag_prefix): 164 | r = ref[len(tag_prefix):] 165 | if verbose: 166 | print("picking %s" % r) 167 | return {"version": r, 168 | "full-revisionid": keywords["full"].strip(), 169 | "dirty": False, "error": None 170 | } 171 | # no suitable tags, so version is "0+unknown", but full hex is still there 172 | if verbose: 173 | print("no suitable tags, using unknown + full revision id") 174 | return {"version": "0+unknown", 175 | "full-revisionid": keywords["full"].strip(), 176 | "dirty": False, "error": "no suitable tags"} 177 | 178 | 179 | @register_vcs_handler("git", "pieces_from_vcs") 180 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 181 | # this runs 'git' from the root of the source tree. This only gets called 182 | # if the git-archive 'subst' keywords were *not* expanded, and 183 | # _version.py hasn't already been rewritten with a short version string, 184 | # meaning we're inside a checked out source tree. 185 | 186 | if not os.path.exists(os.path.join(root, ".git")): 187 | if verbose: 188 | print("no .git in %s" % root) 189 | raise NotThisMethod("no .git directory") 190 | 191 | GITS = ["git"] 192 | if sys.platform == "win32": 193 | GITS = ["git.cmd", "git.exe"] 194 | # if there is a tag, this yields TAG-NUM-gHEX[-dirty] 195 | # if there are no tags, this yields HEX[-dirty] (no NUM) 196 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 197 | "--always", "--long"], 198 | cwd=root) 199 | # --long was added in git-1.5.5 200 | if describe_out is None: 201 | raise NotThisMethod("'git describe' failed") 202 | describe_out = describe_out.strip() 203 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 204 | if full_out is None: 205 | raise NotThisMethod("'git rev-parse' failed") 206 | full_out = full_out.strip() 207 | 208 | pieces = {} 209 | pieces["long"] = full_out 210 | pieces["short"] = full_out[:7] # maybe improved later 211 | pieces["error"] = None 212 | 213 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 214 | # TAG might have hyphens. 215 | git_describe = describe_out 216 | 217 | # look for -dirty suffix 218 | dirty = git_describe.endswith("-dirty") 219 | pieces["dirty"] = dirty 220 | if dirty: 221 | git_describe = git_describe[:git_describe.rindex("-dirty")] 222 | 223 | # now we have TAG-NUM-gHEX or HEX 224 | 225 | if "-" in git_describe: 226 | # TAG-NUM-gHEX 227 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 228 | if not mo: 229 | # unparseable. Maybe git-describe is misbehaving? 230 | pieces["error"] = ("unable to parse git-describe output: '%s'" 231 | % describe_out) 232 | return pieces 233 | 234 | # tag 235 | full_tag = mo.group(1) 236 | if not full_tag.startswith(tag_prefix): 237 | if verbose: 238 | fmt = "tag '%s' doesn't start with prefix '%s'" 239 | print(fmt % (full_tag, tag_prefix)) 240 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 241 | % (full_tag, tag_prefix)) 242 | return pieces 243 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 244 | 245 | # distance: number of commits since tag 246 | pieces["distance"] = int(mo.group(2)) 247 | 248 | # commit: short hex revision ID 249 | pieces["short"] = mo.group(3) 250 | 251 | else: 252 | # HEX: no tags 253 | pieces["closest-tag"] = None 254 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 255 | cwd=root) 256 | pieces["distance"] = int(count_out) # total number of commits 257 | 258 | return pieces 259 | 260 | 261 | def plus_or_dot(pieces): 262 | if "+" in pieces.get("closest-tag", ""): 263 | return "." 264 | return "+" 265 | 266 | 267 | def render_pep440(pieces): 268 | # now build up version string, with post-release "local version 269 | # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 270 | # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 271 | 272 | # exceptions: 273 | # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 274 | 275 | if pieces["closest-tag"]: 276 | rendered = pieces["closest-tag"] 277 | if pieces["distance"] or pieces["dirty"]: 278 | rendered += plus_or_dot(pieces) 279 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 280 | if pieces["dirty"]: 281 | rendered += ".dirty" 282 | else: 283 | # exception #1 284 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 285 | pieces["short"]) 286 | if pieces["dirty"]: 287 | rendered += ".dirty" 288 | return rendered 289 | 290 | 291 | def render_pep440_pre(pieces): 292 | # TAG[.post.devDISTANCE] . No -dirty 293 | 294 | # exceptions: 295 | # 1: no tags. 0.post.devDISTANCE 296 | 297 | if pieces["closest-tag"]: 298 | rendered = pieces["closest-tag"] 299 | if pieces["distance"]: 300 | rendered += ".post.dev%d" % pieces["distance"] 301 | else: 302 | # exception #1 303 | rendered = "0.post.dev%d" % pieces["distance"] 304 | return rendered 305 | 306 | 307 | def render_pep440_post(pieces): 308 | # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that 309 | # .dev0 sorts backwards (a dirty tree will appear "older" than the 310 | # corresponding clean one), but you shouldn't be releasing software with 311 | # -dirty anyways. 312 | 313 | # exceptions: 314 | # 1: no tags. 0.postDISTANCE[.dev0] 315 | 316 | if pieces["closest-tag"]: 317 | rendered = pieces["closest-tag"] 318 | if pieces["distance"] or pieces["dirty"]: 319 | rendered += ".post%d" % pieces["distance"] 320 | if pieces["dirty"]: 321 | rendered += ".dev0" 322 | rendered += plus_or_dot(pieces) 323 | rendered += "g%s" % pieces["short"] 324 | else: 325 | # exception #1 326 | rendered = "0.post%d" % pieces["distance"] 327 | if pieces["dirty"]: 328 | rendered += ".dev0" 329 | rendered += "+g%s" % pieces["short"] 330 | return rendered 331 | 332 | 333 | def render_pep440_old(pieces): 334 | # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. 335 | 336 | # exceptions: 337 | # 1: no tags. 0.postDISTANCE[.dev0] 338 | 339 | if pieces["closest-tag"]: 340 | rendered = pieces["closest-tag"] 341 | if pieces["distance"] or pieces["dirty"]: 342 | rendered += ".post%d" % pieces["distance"] 343 | if pieces["dirty"]: 344 | rendered += ".dev0" 345 | else: 346 | # exception #1 347 | rendered = "0.post%d" % pieces["distance"] 348 | if pieces["dirty"]: 349 | rendered += ".dev0" 350 | return rendered 351 | 352 | 353 | def render_git_describe(pieces): 354 | # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty 355 | # --always' 356 | 357 | # exceptions: 358 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 359 | 360 | if pieces["closest-tag"]: 361 | rendered = pieces["closest-tag"] 362 | if pieces["distance"]: 363 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 364 | else: 365 | # exception #1 366 | rendered = pieces["short"] 367 | if pieces["dirty"]: 368 | rendered += "-dirty" 369 | return rendered 370 | 371 | 372 | def render_git_describe_long(pieces): 373 | # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty 374 | # --always -long'. The distance/hash is unconditional. 375 | 376 | # exceptions: 377 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 378 | 379 | if pieces["closest-tag"]: 380 | rendered = pieces["closest-tag"] 381 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 382 | else: 383 | # exception #1 384 | rendered = pieces["short"] 385 | if pieces["dirty"]: 386 | rendered += "-dirty" 387 | return rendered 388 | 389 | 390 | def render(pieces, style): 391 | if pieces["error"]: 392 | return {"version": "unknown", 393 | "full-revisionid": pieces.get("long"), 394 | "dirty": None, 395 | "error": pieces["error"]} 396 | 397 | if not style or style == "default": 398 | style = "pep440" # the default 399 | 400 | if style == "pep440": 401 | rendered = render_pep440(pieces) 402 | elif style == "pep440-pre": 403 | rendered = render_pep440_pre(pieces) 404 | elif style == "pep440-post": 405 | rendered = render_pep440_post(pieces) 406 | elif style == "pep440-old": 407 | rendered = render_pep440_old(pieces) 408 | elif style == "git-describe": 409 | rendered = render_git_describe(pieces) 410 | elif style == "git-describe-long": 411 | rendered = render_git_describe_long(pieces) 412 | else: 413 | raise ValueError("unknown style '%s'" % style) 414 | 415 | return {"version": rendered, "full-revisionid": pieces["long"], 416 | "dirty": pieces["dirty"], "error": None} 417 | 418 | 419 | def get_versions(): 420 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 421 | # __file__, we can work backwards from there to the root. Some 422 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 423 | # case we can only use expanded keywords. 424 | 425 | cfg = get_config() 426 | verbose = cfg.verbose 427 | 428 | try: 429 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 430 | verbose) 431 | except NotThisMethod: 432 | pass 433 | 434 | try: 435 | root = os.path.realpath(__file__) 436 | # versionfile_source is the relative path from the top of the source 437 | # tree (where the .git directory might live) to this file. Invert 438 | # this to find the root from __file__. 439 | for i in cfg.versionfile_source.split('/'): 440 | root = os.path.dirname(root) 441 | except NameError: 442 | return {"version": "0+unknown", "full-revisionid": None, 443 | "dirty": None, 444 | "error": "unable to find root of source tree"} 445 | 446 | try: 447 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 448 | return render(pieces, cfg.style) 449 | except NotThisMethod: 450 | pass 451 | 452 | try: 453 | if cfg.parentdir_prefix: 454 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 455 | except NotThisMethod: 456 | pass 457 | 458 | return {"version": "0+unknown", "full-revisionid": None, 459 | "dirty": None, 460 | "error": "unable to compute version"} 461 | -------------------------------------------------------------------------------- /pyfolio/capacity.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import empyrical as ep 4 | import numpy as np 5 | import pandas as pd 6 | 7 | from . import pos 8 | 9 | 10 | def daily_txns_with_bar_data(transactions, market_data): 11 | """ 12 | Sums the absolute value of shares traded in each name on each day. 13 | Adds columns containing the closing price and total daily volume for 14 | each day-ticker combination. 15 | 16 | Parameters 17 | ---------- 18 | transactions : pd.DataFrame 19 | Prices and amounts of executed trades. One row per trade. 20 | - See full explanation in tears.create_full_tear_sheet 21 | market_data : pd.DataFrame 22 | Daily market_data 23 | - DataFrame has a multi-index index, one level is dates and another is 24 | market_data contains volume & price, equities as columns 25 | 26 | Returns 27 | ------- 28 | txn_daily : pd.DataFrame 29 | Daily totals for transacted shares in each traded name. 30 | price and volume columns for close price and daily volume for 31 | the corresponding ticker, respectively. 32 | """ 33 | 34 | transactions.index.name = 'date' 35 | txn_daily = pd.DataFrame(transactions.assign( 36 | amount=abs(transactions.amount)).groupby( 37 | ['symbol', pd.Grouper(freq='D')]).sum()['amount']) 38 | txn_daily['price'] = market_data.xs('price', level=1).unstack() 39 | txn_daily['volume'] = market_data.xs('volume', level=1).unstack() 40 | 41 | txn_daily = txn_daily.reset_index().set_index('date') 42 | 43 | return txn_daily 44 | 45 | 46 | def days_to_liquidate_positions(positions, market_data, 47 | max_bar_consumption=0.2, 48 | capital_base=1e6, 49 | mean_volume_window=5): 50 | """ 51 | Compute the number of days that would have been required 52 | to fully liquidate each position on each day based on the 53 | trailing n day mean daily bar volume and a limit on the proportion 54 | of a daily bar that we are allowed to consume. 55 | 56 | This analysis uses portfolio allocations and a provided capital base 57 | rather than the dollar values in the positions DataFrame to remove the 58 | effect of compounding on days to liquidate. In other words, this function 59 | assumes that the net liquidation portfolio value will always remain 60 | constant at capital_base. 61 | 62 | Parameters 63 | ---------- 64 | positions: pd.DataFrame 65 | Contains daily position values including cash 66 | - See full explanation in tears.create_full_tear_sheet 67 | market_data : pd.DataFrame 68 | Daily market_data 69 | - DataFrame has a multi-index index, one level is dates and another is 70 | market_data contains volume & price, equities as columns 71 | max_bar_consumption : float 72 | Max proportion of a daily bar that can be consumed in the 73 | process of liquidating a position. 74 | capital_base : integer 75 | Capital base multiplied by portfolio allocation to compute 76 | position value that needs liquidating. 77 | mean_volume_window : float 78 | Trailing window to use in mean volume calculation. 79 | 80 | Returns 81 | ------- 82 | days_to_liquidate : pd.DataFrame 83 | Number of days required to fully liquidate daily positions. 84 | Datetime index, symbols as columns. 85 | """ 86 | 87 | DV = market_data.xs('volume', level=1) * market_data.xs('price', level=1) 88 | roll_mean_dv = DV.rolling(window=mean_volume_window, 89 | center=False).mean().shift() 90 | roll_mean_dv = roll_mean_dv.replace(0, np.nan) 91 | 92 | positions_alloc = pos.get_percent_alloc(positions) 93 | positions_alloc = positions_alloc.drop('cash', axis=1) 94 | 95 | days_to_liquidate = (positions_alloc * capital_base) / \ 96 | (max_bar_consumption * roll_mean_dv) 97 | 98 | return days_to_liquidate.iloc[mean_volume_window:] 99 | 100 | 101 | def get_max_days_to_liquidate_by_ticker(positions, market_data, 102 | max_bar_consumption=0.2, 103 | capital_base=1e6, 104 | mean_volume_window=5, 105 | last_n_days=None): 106 | """ 107 | Finds the longest estimated liquidation time for each traded 108 | name over the course of backtest (or last n days of the backtest). 109 | 110 | Parameters 111 | ---------- 112 | positions: pd.DataFrame 113 | Contains daily position values including cash 114 | - See full explanation in tears.create_full_tear_sheet 115 | market_data : pd.DataFrame 116 | Daily market_data 117 | - DataFrame has a multi-index index, one level is dates and another is 118 | market_data contains volume & price, equities as columns 119 | max_bar_consumption : float 120 | Max proportion of a daily bar that can be consumed in the 121 | process of liquidating a position. 122 | capital_base : integer 123 | Capital base multiplied by portfolio allocation to compute 124 | position value that needs liquidating. 125 | mean_volume_window : float 126 | Trailing window to use in mean volume calculation. 127 | last_n_days : integer 128 | Compute for only the last n days of the passed backtest data. 129 | 130 | Returns 131 | ------- 132 | days_to_liquidate : pd.DataFrame 133 | Max Number of days required to fully liquidate each traded name. 134 | Index of symbols. Columns for days_to_liquidate and the corresponding 135 | date and position_alloc on that day. 136 | """ 137 | 138 | dtlp = days_to_liquidate_positions(positions, market_data, 139 | max_bar_consumption=max_bar_consumption, 140 | capital_base=capital_base, 141 | mean_volume_window=mean_volume_window) 142 | 143 | if last_n_days is not None: 144 | dtlp = dtlp.loc[dtlp.index.max() - pd.Timedelta(days=last_n_days):] 145 | 146 | pos_alloc = pos.get_percent_alloc(positions) 147 | pos_alloc = pos_alloc.drop('cash', axis=1) 148 | 149 | liq_desc = pd.DataFrame() 150 | liq_desc['days_to_liquidate'] = dtlp.unstack() 151 | liq_desc['pos_alloc_pct'] = pos_alloc.unstack() * 100 152 | liq_desc.index.levels[0].name = 'symbol' 153 | liq_desc.index.levels[1].name = 'date' 154 | 155 | worst_liq = liq_desc.reset_index().sort_values( 156 | 'days_to_liquidate', ascending=False).groupby('symbol').first() 157 | 158 | return worst_liq 159 | 160 | 161 | def get_low_liquidity_transactions(transactions, market_data, 162 | last_n_days=None): 163 | """ 164 | For each traded name, find the daily transaction total that consumed 165 | the greatest proportion of available daily bar volume. 166 | 167 | Parameters 168 | ---------- 169 | transactions : pd.DataFrame 170 | Prices and amounts of executed trades. One row per trade. 171 | - See full explanation in create_full_tear_sheet. 172 | market_data : pd.DataFrame 173 | Daily market_data 174 | - DataFrame has a multi-index index, one level is dates and another is 175 | market_data contains volume & price, equities as columns 176 | last_n_days : integer 177 | Compute for only the last n days of the passed backtest data. 178 | """ 179 | 180 | txn_daily_w_bar = daily_txns_with_bar_data(transactions, market_data) 181 | txn_daily_w_bar.index.name = 'date' 182 | txn_daily_w_bar = txn_daily_w_bar.reset_index() 183 | 184 | if last_n_days is not None: 185 | md = txn_daily_w_bar.date.max() - pd.Timedelta(days=last_n_days) 186 | txn_daily_w_bar = txn_daily_w_bar[txn_daily_w_bar.date > md] 187 | 188 | bar_consumption = txn_daily_w_bar.assign( 189 | max_pct_bar_consumed=( 190 | txn_daily_w_bar.amount/txn_daily_w_bar.volume)*100 191 | ).sort_values('max_pct_bar_consumed', ascending=False) 192 | max_bar_consumption = bar_consumption.groupby('symbol').first() 193 | 194 | return max_bar_consumption[['date', 'max_pct_bar_consumed']] 195 | 196 | 197 | def apply_slippage_penalty(returns, txn_daily, simulate_starting_capital, 198 | backtest_starting_capital, impact=0.1): 199 | """ 200 | Applies quadratic volumeshare slippage model to daily returns based 201 | on the proportion of the observed historical daily bar dollar volume 202 | consumed by the strategy's trades. Scales the size of trades based 203 | on the ratio of the starting capital we wish to test to the starting 204 | capital of the passed backtest data. 205 | 206 | Parameters 207 | ---------- 208 | returns : pd.Series 209 | Time series of daily returns. 210 | txn_daily : pd.Series 211 | Daily transaciton totals, closing price, and daily volume for 212 | each traded name. See price_volume_daily_txns for more details. 213 | simulate_starting_capital : integer 214 | capital at which we want to test 215 | backtest_starting_capital: capital base at which backtest was 216 | origionally run. impact: See Zipline volumeshare slippage model 217 | impact : float 218 | Scales the size of the slippage penalty. 219 | 220 | Returns 221 | ------- 222 | adj_returns : pd.Series 223 | Slippage penalty adjusted daily returns. 224 | """ 225 | 226 | mult = simulate_starting_capital / backtest_starting_capital 227 | simulate_traded_shares = abs(mult * txn_daily.amount) 228 | simulate_traded_dollars = txn_daily.price * simulate_traded_shares 229 | simulate_pct_volume_used = simulate_traded_shares / txn_daily.volume 230 | 231 | penalties = simulate_pct_volume_used**2 \ 232 | * impact * simulate_traded_dollars 233 | 234 | daily_penalty = penalties.resample('D').sum() 235 | daily_penalty = daily_penalty.reindex(returns.index).fillna(0) 236 | 237 | # Since we are scaling the numerator of the penalties linearly 238 | # by capital base, it makes the most sense to scale the denominator 239 | # similarly. In other words, since we aren't applying compounding to 240 | # simulate_traded_shares, we shouldn't apply compounding to pv. 241 | portfolio_value = ep.cum_returns( 242 | returns, starting_value=backtest_starting_capital) * mult 243 | 244 | adj_returns = returns - (daily_penalty / portfolio_value) 245 | 246 | return adj_returns 247 | -------------------------------------------------------------------------------- /pyfolio/deprecate.py: -------------------------------------------------------------------------------- 1 | """Utilities for marking deprecated functions.""" 2 | # Copyright 2018 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import warnings 17 | from functools import wraps 18 | 19 | 20 | def deprecated(msg=None, stacklevel=2): 21 | """ 22 | Used to mark a function as deprecated. 23 | Parameters 24 | ---------- 25 | msg : str 26 | The message to display in the deprecation warning. 27 | stacklevel : int 28 | How far up the stack the warning needs to go, before 29 | showing the relevant calling lines. 30 | Usage 31 | ----- 32 | @deprecated(msg='function_a is deprecated! Use function_b instead.') 33 | def function_a(*args, **kwargs): 34 | """ 35 | def deprecated_dec(fn): 36 | @wraps(fn) 37 | def wrapper(*args, **kwargs): 38 | warnings.warn( 39 | msg or "Function %s is deprecated." % fn.__name__, 40 | category=DeprecationWarning, 41 | stacklevel=stacklevel 42 | ) 43 | return fn(*args, **kwargs) 44 | return wrapper 45 | return deprecated_dec 46 | -------------------------------------------------------------------------------- /pyfolio/examples/pydata_stack-4-finance.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/pyfolio/examples/pydata_stack-4-finance.jpg -------------------------------------------------------------------------------- /pyfolio/interesting_periods.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """Generates a list of historical event dates that may have had 17 | significant impact on markets. See extract_interesting_date_ranges.""" 18 | 19 | import pandas as pd 20 | 21 | from collections import OrderedDict 22 | 23 | PERIODS = OrderedDict() 24 | # Dotcom bubble 25 | PERIODS['Dotcom'] = (pd.Timestamp('20000310'), pd.Timestamp('20000910')) 26 | 27 | # Lehmann Brothers 28 | PERIODS['Lehman'] = (pd.Timestamp('20080801'), pd.Timestamp('20081001')) 29 | 30 | # 9/11 31 | PERIODS['9/11'] = (pd.Timestamp('20010911'), pd.Timestamp('20011011')) 32 | 33 | # 05/08/11 US down grade and European Debt Crisis 2011 34 | PERIODS[ 35 | 'US downgrade/European Debt Crisis'] = (pd.Timestamp('20110805'), 36 | pd.Timestamp('20110905')) 37 | 38 | # 16/03/11 Fukushima melt down 2011 39 | PERIODS['Fukushima'] = (pd.Timestamp('20110316'), pd.Timestamp('20110416')) 40 | 41 | # 01/08/03 US Housing Bubble 2003 42 | PERIODS['US Housing'] = ( 43 | pd.Timestamp('20030108'), pd.Timestamp('20030208')) 44 | 45 | # 06/09/12 EZB IR Event 2012 46 | PERIODS['EZB IR Event'] = ( 47 | pd.Timestamp('20120910'), pd.Timestamp('20121010')) 48 | 49 | # August 2007, March and September of 2008, Q1 & Q2 2009, 50 | PERIODS['Aug07'] = (pd.Timestamp('20070801'), pd.Timestamp('20070901')) 51 | PERIODS['Mar08'] = (pd.Timestamp('20080301'), pd.Timestamp('20080401')) 52 | PERIODS['Sept08'] = (pd.Timestamp('20080901'), pd.Timestamp('20081001')) 53 | PERIODS['2009Q1'] = (pd.Timestamp('20090101'), pd.Timestamp('20090301')) 54 | PERIODS['2009Q2'] = (pd.Timestamp('20090301'), pd.Timestamp('20090601')) 55 | 56 | # Flash Crash (May 6, 2010 + 1 week post), 57 | PERIODS['Flash Crash'] = ( 58 | pd.Timestamp('20100505'), pd.Timestamp('20100510')) 59 | 60 | # April and October 2014). 61 | PERIODS['Apr14'] = (pd.Timestamp('20140401'), pd.Timestamp('20140501')) 62 | PERIODS['Oct14'] = (pd.Timestamp('20141001'), pd.Timestamp('20141101')) 63 | 64 | # Market down-turn in August/Sept 2015 65 | PERIODS['Fall2015'] = (pd.Timestamp('20150815'), pd.Timestamp('20150930')) 66 | 67 | # Market regimes 68 | PERIODS['Low Volatility Bull Market'] = (pd.Timestamp('20050101'), 69 | pd.Timestamp('20070801')) 70 | 71 | PERIODS['GFC Crash'] = (pd.Timestamp('20070801'), 72 | pd.Timestamp('20090401')) 73 | 74 | PERIODS['Recovery'] = (pd.Timestamp('20090401'), 75 | pd.Timestamp('20130101')) 76 | 77 | PERIODS['New Normal'] = (pd.Timestamp('20130101'), 78 | pd.Timestamp('today')) 79 | -------------------------------------------------------------------------------- /pyfolio/ipycompat.py: -------------------------------------------------------------------------------- 1 | import IPython 2 | 3 | IPY_MAJOR = IPython.version_info[0] 4 | if IPY_MAJOR < 3: 5 | raise ImportError("IPython version %d is not supported." % IPY_MAJOR) 6 | 7 | IPY3 = (IPY_MAJOR == 3) 8 | 9 | # IPython underwent a major refactor between versions 3 and 4. Many of the 10 | # imports in version 4 have aliases to their old locations in 3, but they raise 11 | # noisy deprecation warnings. By conditionally importing here, we can support 12 | # older versions without triggering warnings for users on new versions. 13 | if IPY3: 14 | from IPython.nbformat import read 15 | else: 16 | from nbformat import read 17 | 18 | 19 | __all__ = ['read'] 20 | -------------------------------------------------------------------------------- /pyfolio/pos.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from __future__ import division 16 | 17 | import pandas as pd 18 | import numpy as np 19 | import warnings 20 | 21 | try: 22 | from zipline.assets import Equity, Future 23 | ZIPLINE = True 24 | except ImportError: 25 | ZIPLINE = False 26 | warnings.warn( 27 | 'Module "zipline.assets" not found; multipliers will not be applied' 28 | ' to position notionals.' 29 | ) 30 | 31 | 32 | def get_percent_alloc(values): 33 | """ 34 | Determines a portfolio's allocations. 35 | 36 | Parameters 37 | ---------- 38 | values : pd.DataFrame 39 | Contains position values or amounts. 40 | 41 | Returns 42 | ------- 43 | allocations : pd.DataFrame 44 | Positions and their allocations. 45 | """ 46 | 47 | return values.divide( 48 | values.sum(axis='columns'), 49 | axis='rows' 50 | ) 51 | 52 | 53 | def get_top_long_short_abs(positions, top=10): 54 | """ 55 | Finds the top long, short, and absolute positions. 56 | 57 | Parameters 58 | ---------- 59 | positions : pd.DataFrame 60 | The positions that the strategy takes over time. 61 | top : int, optional 62 | How many of each to find (default 10). 63 | 64 | Returns 65 | ------- 66 | df_top_long : pd.DataFrame 67 | Top long positions. 68 | df_top_short : pd.DataFrame 69 | Top short positions. 70 | df_top_abs : pd.DataFrame 71 | Top absolute positions. 72 | """ 73 | 74 | positions = positions.drop('cash', axis='columns') 75 | df_max = positions.max() 76 | df_min = positions.min() 77 | df_abs_max = positions.abs().max() 78 | df_top_long = df_max[df_max > 0].nlargest(top) 79 | df_top_short = df_min[df_min < 0].nsmallest(top) 80 | df_top_abs = df_abs_max.nlargest(top) 81 | return df_top_long, df_top_short, df_top_abs 82 | 83 | 84 | def get_max_median_position_concentration(positions): 85 | """ 86 | Finds the max and median long and short position concentrations 87 | in each time period specified by the index of positions. 88 | 89 | Parameters 90 | ---------- 91 | positions : pd.DataFrame 92 | The positions that the strategy takes over time. 93 | 94 | Returns 95 | ------- 96 | pd.DataFrame 97 | Columns are max long, max short, median long, and median short 98 | position concentrations. Rows are timeperiods. 99 | """ 100 | 101 | expos = get_percent_alloc(positions) 102 | expos = expos.drop('cash', axis=1) 103 | 104 | longs = expos.where(expos.applymap(lambda x: x > 0)) 105 | shorts = expos.where(expos.applymap(lambda x: x < 0)) 106 | 107 | alloc_summary = pd.DataFrame() 108 | alloc_summary['max_long'] = longs.max(axis=1) 109 | alloc_summary['median_long'] = longs.median(axis=1) 110 | alloc_summary['median_short'] = shorts.median(axis=1) 111 | alloc_summary['max_short'] = shorts.min(axis=1) 112 | 113 | return alloc_summary 114 | 115 | 116 | def extract_pos(positions, cash): 117 | """ 118 | Extract position values from backtest object as returned by 119 | get_backtest() on the Quantopian research platform. 120 | 121 | Parameters 122 | ---------- 123 | positions : pd.DataFrame 124 | timeseries containing one row per symbol (and potentially 125 | duplicate datetime indices) and columns for amount and 126 | last_sale_price. 127 | cash : pd.Series 128 | timeseries containing cash in the portfolio. 129 | 130 | Returns 131 | ------- 132 | pd.DataFrame 133 | Daily net position values. 134 | - See full explanation in tears.create_full_tear_sheet. 135 | """ 136 | 137 | positions = positions.copy() 138 | positions['values'] = positions.amount * positions.last_sale_price 139 | 140 | cash.name = 'cash' 141 | 142 | values = positions.reset_index().pivot_table(index='index', 143 | columns='sid', 144 | values='values') 145 | 146 | if ZIPLINE: 147 | for asset in values.columns: 148 | if type(asset) in [Equity, Future]: 149 | values[asset] = values[asset] * asset.price_multiplier 150 | 151 | values = values.join(cash).fillna(0) 152 | 153 | # NOTE: Set name of DataFrame.columns to sid, to match the behavior 154 | # of DataFrame.join in earlier versions of pandas. 155 | values.columns.name = 'sid' 156 | 157 | return values 158 | 159 | 160 | def get_sector_exposures(positions, symbol_sector_map): 161 | """ 162 | Sum position exposures by sector. 163 | 164 | Parameters 165 | ---------- 166 | positions : pd.DataFrame 167 | Contains position values or amounts. 168 | - Example 169 | index 'AAPL' 'MSFT' 'CHK' cash 170 | 2004-01-09 13939.380 -15012.993 -403.870 1477.483 171 | 2004-01-12 14492.630 -18624.870 142.630 3989.610 172 | 2004-01-13 -13853.280 13653.640 -100.980 100.000 173 | symbol_sector_map : dict or pd.Series 174 | Security identifier to sector mapping. 175 | Security ids as keys/index, sectors as values. 176 | - Example: 177 | {'AAPL' : 'Technology' 178 | 'MSFT' : 'Technology' 179 | 'CHK' : 'Natural Resources'} 180 | 181 | Returns 182 | ------- 183 | sector_exp : pd.DataFrame 184 | Sectors and their allocations. 185 | - Example: 186 | index 'Technology' 'Natural Resources' cash 187 | 2004-01-09 -1073.613 -403.870 1477.4830 188 | 2004-01-12 -4132.240 142.630 3989.6100 189 | 2004-01-13 -199.640 -100.980 100.0000 190 | """ 191 | 192 | cash = positions['cash'] 193 | positions = positions.drop('cash', axis=1) 194 | 195 | unmapped_pos = np.setdiff1d(positions.columns.values, 196 | list(symbol_sector_map.keys())) 197 | if len(unmapped_pos) > 0: 198 | warn_message = """Warning: Symbols {} have no sector mapping. 199 | They will not be included in sector allocations""".format( 200 | ", ".join(map(str, unmapped_pos))) 201 | warnings.warn(warn_message, UserWarning) 202 | 203 | sector_exp = positions.groupby( 204 | by=symbol_sector_map, axis=1).sum() 205 | 206 | sector_exp['cash'] = cash 207 | 208 | return sector_exp 209 | 210 | 211 | def get_long_short_pos(positions): 212 | """ 213 | Determines the long and short allocations in a portfolio. 214 | 215 | Parameters 216 | ---------- 217 | positions : pd.DataFrame 218 | The positions that the strategy takes over time. 219 | 220 | Returns 221 | ------- 222 | df_long_short : pd.DataFrame 223 | Long and short allocations as a decimal 224 | percentage of the total net liquidation 225 | """ 226 | 227 | pos_wo_cash = positions.drop('cash', axis=1) 228 | longs = pos_wo_cash[pos_wo_cash > 0].sum(axis=1).fillna(0) 229 | shorts = pos_wo_cash[pos_wo_cash < 0].sum(axis=1).fillna(0) 230 | cash = positions.cash 231 | net_liquidation = longs + shorts + cash 232 | df_pos = pd.DataFrame({'long': longs.divide(net_liquidation, axis='index'), 233 | 'short': shorts.divide(net_liquidation, 234 | axis='index')}) 235 | df_pos['net exposure'] = df_pos['long'] + df_pos['short'] 236 | return df_pos 237 | -------------------------------------------------------------------------------- /pyfolio/round_trips.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from __future__ import division 16 | from math import copysign 17 | import warnings 18 | from collections import deque, OrderedDict 19 | 20 | import pandas as pd 21 | import numpy as np 22 | 23 | from .utils import print_table, format_asset 24 | 25 | PNL_STATS = OrderedDict( 26 | [('Total profit', lambda x: x.sum()), 27 | ('Gross profit', lambda x: x[x > 0].sum()), 28 | ('Gross loss', lambda x: x[x < 0].sum()), 29 | ('Profit factor', lambda x: x[x > 0].sum() / x[x < 0].abs().sum() 30 | if x[x < 0].abs().sum() != 0 else np.nan), 31 | ('Avg. trade net profit', 'mean'), 32 | ('Avg. winning trade', lambda x: x[x > 0].mean()), 33 | ('Avg. losing trade', lambda x: x[x < 0].mean()), 34 | ('Ratio Avg. Win:Avg. Loss', lambda x: x[x > 0].mean() / 35 | x[x < 0].abs().mean() if x[x < 0].abs().mean() != 0 else np.nan), 36 | ('Largest winning trade', 'max'), 37 | ('Largest losing trade', 'min'), 38 | ]) 39 | 40 | SUMMARY_STATS = OrderedDict( 41 | [('Total number of round_trips', 'count'), 42 | ('Percent profitable', lambda x: len(x[x > 0]) / float(len(x))), 43 | ('Winning round_trips', lambda x: len(x[x > 0])), 44 | ('Losing round_trips', lambda x: len(x[x < 0])), 45 | ('Even round_trips', lambda x: len(x[x == 0])), 46 | ]) 47 | 48 | RETURN_STATS = OrderedDict( 49 | [('Avg returns all round_trips', lambda x: x.mean()), 50 | ('Avg returns winning', lambda x: x[x > 0].mean()), 51 | ('Avg returns losing', lambda x: x[x < 0].mean()), 52 | ('Median returns all round_trips', lambda x: x.median()), 53 | ('Median returns winning', lambda x: x[x > 0].median()), 54 | ('Median returns losing', lambda x: x[x < 0].median()), 55 | ('Largest winning trade', 'max'), 56 | ('Largest losing trade', 'min'), 57 | ]) 58 | 59 | DURATION_STATS = OrderedDict( 60 | [('Avg duration', lambda x: x.mean()), 61 | ('Median duration', lambda x: x.median()), 62 | ('Longest duration', lambda x: x.max()), 63 | ('Shortest duration', lambda x: x.min()) 64 | # FIXME: Instead of x.max() - x.min() this should be 65 | # rts.close_dt.max() - rts.open_dt.min() which is not 66 | # available here. As it would require a new approach here 67 | # that passes in multiple fields we disable these measures 68 | # for now. 69 | # ('Avg # round_trips per day', lambda x: float(len(x)) / 70 | # (x.max() - x.min()).days), 71 | # ('Avg # round_trips per month', lambda x: float(len(x)) / 72 | # (((x.max() - x.min()).days) / APPROX_BDAYS_PER_MONTH)), 73 | ]) 74 | 75 | 76 | def agg_all_long_short(round_trips, col, stats_dict): 77 | stats_all = (round_trips 78 | .assign(ones=1) 79 | .groupby('ones')[col] 80 | .agg(stats_dict) 81 | .T 82 | .rename(columns={1.0: 'All trades'})) 83 | stats_long_short = (round_trips 84 | .groupby('long')[col] 85 | .agg(stats_dict) 86 | .T 87 | .rename(columns={False: 'Short trades', 88 | True: 'Long trades'})) 89 | 90 | return stats_all.join(stats_long_short) 91 | 92 | 93 | def _groupby_consecutive(txn, max_delta=pd.Timedelta('8h')): 94 | """Merge transactions of the same direction separated by less than 95 | max_delta time duration. 96 | 97 | Parameters 98 | ---------- 99 | transactions : pd.DataFrame 100 | Prices and amounts of executed round_trips. One row per trade. 101 | - See full explanation in tears.create_full_tear_sheet 102 | 103 | max_delta : pandas.Timedelta (optional) 104 | Merge transactions in the same direction separated by less 105 | than max_delta time duration. 106 | 107 | 108 | Returns 109 | ------- 110 | transactions : pd.DataFrame 111 | 112 | """ 113 | def vwap(transaction): 114 | if transaction.amount.sum() == 0: 115 | warnings.warn('Zero transacted shares, setting vwap to nan.') 116 | return np.nan 117 | return (transaction.amount * transaction.price).sum() / \ 118 | transaction.amount.sum() 119 | 120 | out = [] 121 | for _, t in txn.groupby('symbol'): 122 | t = t.sort_index() 123 | t.index.name = 'dt' 124 | t = t.reset_index() 125 | 126 | t['order_sign'] = t.amount > 0 127 | t['block_dir'] = (t.order_sign.shift( 128 | 1) != t.order_sign).astype(int).cumsum() 129 | t['block_time'] = ((t.dt.sub(t.dt.shift(1))) > 130 | max_delta).astype(int).cumsum() 131 | grouped_price = (t.groupby(['block_dir', 132 | 'block_time']) 133 | .apply(vwap)) 134 | grouped_price.name = 'price' 135 | grouped_rest = t.groupby(['block_dir', 'block_time']).agg({ 136 | 'amount': 'sum', 137 | 'symbol': 'first', 138 | 'dt': 'first'}) 139 | 140 | grouped = grouped_rest.join(grouped_price) 141 | 142 | out.append(grouped) 143 | 144 | out = pd.concat(out) 145 | out = out.set_index('dt') 146 | return out 147 | 148 | 149 | def extract_round_trips(transactions, 150 | portfolio_value=None): 151 | """Group transactions into "round trips". First, transactions are 152 | grouped by day and directionality. Then, long and short 153 | transactions are matched to create round-trip round_trips for which 154 | PnL, duration and returns are computed. Crossings where a position 155 | changes from long to short and vice-versa are handled correctly. 156 | 157 | Under the hood, we reconstruct the individual shares in a 158 | portfolio over time and match round_trips in a FIFO-order. 159 | 160 | For example, the following transactions would constitute one round trip: 161 | index amount price symbol 162 | 2004-01-09 12:18:01 10 50 'AAPL' 163 | 2004-01-09 15:12:53 10 100 'AAPL' 164 | 2004-01-13 14:41:23 -10 100 'AAPL' 165 | 2004-01-13 15:23:34 -10 200 'AAPL' 166 | 167 | First, the first two and last two round_trips will be merged into a two 168 | single transactions (computing the price via vwap). Then, during 169 | the portfolio reconstruction, the two resulting transactions will 170 | be merged and result in 1 round-trip trade with a PnL of 171 | (150 * 20) - (75 * 20) = 1500. 172 | 173 | Note, that round trips do not have to close out positions 174 | completely. For example, we could have removed the last 175 | transaction in the example above and still generated a round-trip 176 | over 10 shares with 10 shares left in the portfolio to be matched 177 | with a later transaction. 178 | 179 | Parameters 180 | ---------- 181 | transactions : pd.DataFrame 182 | Prices and amounts of executed round_trips. One row per trade. 183 | - See full explanation in tears.create_full_tear_sheet 184 | 185 | portfolio_value : pd.Series (optional) 186 | Portfolio value (all net assets including cash) over time. 187 | Note that portfolio_value needs to beginning of day, so either 188 | use .shift() or positions.sum(axis='columns') / (1+returns). 189 | 190 | Returns 191 | ------- 192 | round_trips : pd.DataFrame 193 | DataFrame with one row per round trip. The returns column 194 | contains returns in respect to the portfolio value while 195 | rt_returns are the returns in regards to the invested capital 196 | into that partiulcar round-trip. 197 | """ 198 | 199 | transactions = _groupby_consecutive(transactions) 200 | roundtrips = [] 201 | 202 | for sym, trans_sym in transactions.groupby('symbol'): 203 | trans_sym = trans_sym.sort_index() 204 | price_stack = deque() 205 | dt_stack = deque() 206 | trans_sym['signed_price'] = trans_sym.price * \ 207 | np.sign(trans_sym.amount) 208 | trans_sym['abs_amount'] = trans_sym.amount.abs().astype(int) 209 | for dt, t in trans_sym.iterrows(): 210 | if t.price < 0: 211 | warnings.warn('Negative price detected, ignoring for' 212 | 'round-trip.') 213 | continue 214 | 215 | indiv_prices = [t.signed_price] * t.abs_amount 216 | if (len(price_stack) == 0) or \ 217 | (copysign(1, price_stack[-1]) == copysign(1, t.amount)): 218 | price_stack.extend(indiv_prices) 219 | dt_stack.extend([dt] * len(indiv_prices)) 220 | else: 221 | # Close round-trip 222 | pnl = 0 223 | invested = 0 224 | cur_open_dts = [] 225 | 226 | for price in indiv_prices: 227 | if len(price_stack) != 0 and \ 228 | (copysign(1, price_stack[-1]) != copysign(1, price)): 229 | # Retrieve first dt, stock-price pair from 230 | # stack 231 | prev_price = price_stack.popleft() 232 | prev_dt = dt_stack.popleft() 233 | 234 | pnl += -(price + prev_price) 235 | cur_open_dts.append(prev_dt) 236 | invested += abs(prev_price) 237 | 238 | else: 239 | # Push additional stock-prices onto stack 240 | price_stack.append(price) 241 | dt_stack.append(dt) 242 | 243 | roundtrips.append({'pnl': pnl, 244 | 'open_dt': cur_open_dts[0], 245 | 'close_dt': dt, 246 | 'long': price < 0, 247 | 'rt_returns': pnl / invested, 248 | 'symbol': sym, 249 | }) 250 | 251 | roundtrips = pd.DataFrame(roundtrips) 252 | 253 | roundtrips['duration'] = roundtrips['close_dt'].sub(roundtrips['open_dt']) 254 | 255 | if portfolio_value is not None: 256 | # Need to normalize so that we can join 257 | pv = pd.DataFrame(portfolio_value, 258 | columns=['portfolio_value'])\ 259 | .assign(date=portfolio_value.index) 260 | 261 | roundtrips['date'] = roundtrips.close_dt.apply(lambda x: 262 | x.replace(hour=0, 263 | minute=0, 264 | second=0)) 265 | 266 | tmp = (roundtrips.set_index('date') 267 | .join(pv.set_index('date'), lsuffix='_') 268 | .reset_index()) 269 | 270 | roundtrips['returns'] = tmp.pnl / tmp.portfolio_value 271 | roundtrips = roundtrips.drop('date', axis='columns') 272 | 273 | return roundtrips 274 | 275 | 276 | def add_closing_transactions(positions, transactions): 277 | """ 278 | Appends transactions that close out all positions at the end of 279 | the timespan covered by positions data. Utilizes pricing information 280 | in the positions DataFrame to determine closing price. 281 | 282 | Parameters 283 | ---------- 284 | positions : pd.DataFrame 285 | The positions that the strategy takes over time. 286 | transactions : pd.DataFrame 287 | Prices and amounts of executed round_trips. One row per trade. 288 | - See full explanation in tears.create_full_tear_sheet 289 | 290 | Returns 291 | ------- 292 | closed_txns : pd.DataFrame 293 | Transactions with closing transactions appended. 294 | """ 295 | 296 | closed_txns = transactions[['symbol', 'amount', 'price']] 297 | 298 | pos_at_end = positions.drop('cash', axis=1).iloc[-1] 299 | open_pos = pos_at_end.replace(0, np.nan).dropna() 300 | # Add closing round_trips one second after the close to be sure 301 | # they don't conflict with other round_trips executed at that time. 302 | end_dt = open_pos.name + pd.Timedelta(seconds=1) 303 | 304 | for sym, ending_val in open_pos.iteritems(): 305 | txn_sym = transactions[transactions.symbol == sym] 306 | 307 | ending_amount = txn_sym.amount.sum() 308 | 309 | ending_price = ending_val / ending_amount 310 | closing_txn = OrderedDict([ 311 | ('amount', -ending_amount), 312 | ('price', ending_price), 313 | ('symbol', sym), 314 | ]) 315 | 316 | closing_txn = pd.DataFrame(closing_txn, index=[end_dt]) 317 | closed_txns = closed_txns.append(closing_txn) 318 | 319 | closed_txns = closed_txns[closed_txns.amount != 0] 320 | 321 | return closed_txns 322 | 323 | 324 | def apply_sector_mappings_to_round_trips(round_trips, sector_mappings): 325 | """ 326 | Translates round trip symbols to sectors. 327 | 328 | Parameters 329 | ---------- 330 | round_trips : pd.DataFrame 331 | DataFrame with one row per round trip trade. 332 | - See full explanation in round_trips.extract_round_trips 333 | sector_mappings : dict or pd.Series, optional 334 | Security identifier to sector mapping. 335 | Security ids as keys, sectors as values. 336 | 337 | Returns 338 | ------- 339 | sector_round_trips : pd.DataFrame 340 | Round trips with symbol names replaced by sector names. 341 | """ 342 | 343 | sector_round_trips = round_trips.copy() 344 | sector_round_trips.symbol = sector_round_trips.symbol.apply( 345 | lambda x: sector_mappings.get(x, 'No Sector Mapping')) 346 | sector_round_trips = sector_round_trips.dropna(axis=0) 347 | 348 | return sector_round_trips 349 | 350 | 351 | def gen_round_trip_stats(round_trips): 352 | """Generate various round-trip statistics. 353 | 354 | Parameters 355 | ---------- 356 | round_trips : pd.DataFrame 357 | DataFrame with one row per round trip trade. 358 | - See full explanation in round_trips.extract_round_trips 359 | 360 | Returns 361 | ------- 362 | stats : dict 363 | A dictionary where each value is a pandas DataFrame containing 364 | various round-trip statistics. 365 | 366 | See also 367 | -------- 368 | round_trips.print_round_trip_stats 369 | """ 370 | 371 | stats = {} 372 | stats['pnl'] = agg_all_long_short(round_trips, 'pnl', PNL_STATS) 373 | stats['summary'] = agg_all_long_short(round_trips, 'pnl', 374 | SUMMARY_STATS) 375 | stats['duration'] = agg_all_long_short(round_trips, 'duration', 376 | DURATION_STATS) 377 | stats['returns'] = agg_all_long_short(round_trips, 'returns', 378 | RETURN_STATS) 379 | 380 | stats['symbols'] = \ 381 | round_trips.groupby('symbol')['returns'].agg(RETURN_STATS).T 382 | 383 | return stats 384 | 385 | 386 | def print_round_trip_stats(round_trips, hide_pos=False): 387 | """Print various round-trip statistics. Tries to pretty-print tables 388 | with HTML output if run inside IPython NB. 389 | 390 | Parameters 391 | ---------- 392 | round_trips : pd.DataFrame 393 | DataFrame with one row per round trip trade. 394 | - See full explanation in round_trips.extract_round_trips 395 | 396 | See also 397 | -------- 398 | round_trips.gen_round_trip_stats 399 | """ 400 | 401 | stats = gen_round_trip_stats(round_trips) 402 | 403 | print_table(stats['summary'], float_format='{:.2f}'.format, 404 | name='Summary stats') 405 | print_table(stats['pnl'], float_format='${:.2f}'.format, name='PnL stats') 406 | print_table(stats['duration'], float_format='{:.2f}'.format, 407 | name='Duration stats') 408 | print_table(stats['returns'] * 100, float_format='{:.2f}%'.format, 409 | name='Return stats') 410 | 411 | if not hide_pos: 412 | stats['symbols'].columns = stats['symbols'].columns.map(format_asset) 413 | print_table(stats['symbols'] * 100, 414 | float_format='{:.2f}%'.format, name='Symbol stats') 415 | -------------------------------------------------------------------------------- /pyfolio/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/pyfolio/tests/__init__.py -------------------------------------------------------------------------------- /pyfolio/tests/matplotlibrc: -------------------------------------------------------------------------------- 1 | backend : Agg -------------------------------------------------------------------------------- /pyfolio/tests/test_capacity.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from unittest import TestCase 3 | from nose_parameterized import parameterized 4 | 5 | from pandas import ( 6 | Series, 7 | DataFrame, 8 | date_range, 9 | datetime, 10 | concat 11 | ) 12 | from pandas.util.testing import (assert_frame_equal, 13 | assert_series_equal) 14 | 15 | from pyfolio.capacity import (days_to_liquidate_positions, 16 | get_max_days_to_liquidate_by_ticker, 17 | get_low_liquidity_transactions, 18 | daily_txns_with_bar_data, 19 | apply_slippage_penalty) 20 | 21 | 22 | class CapacityTestCase(TestCase): 23 | dates = date_range(start='2015-01-01', freq='D', periods=3) 24 | 25 | positions = DataFrame([[1.0, 3.0, 0.0], 26 | [0.0, 1.0, 1.0], 27 | [3.0, 0.0, 1.0]], 28 | columns=['A', 'B', 'cash'], index=dates) 29 | 30 | transactions = DataFrame(data=[[1, 100000, 10, 'A']] * len(dates), 31 | columns=['sid', 'amount', 'price', 'symbol'], 32 | index=dates) 33 | 34 | volume = DataFrame([[1.0, 3.0], 35 | [2.0, 2.0], 36 | [3.0, 1.0]], 37 | columns=['A', 'B'], index=dates) 38 | volume.index.name = 'dt' 39 | volume = volume * 1000000 40 | volume['market_data'] = 'volume' 41 | price = DataFrame([[1.0, 1.0]] * len(dates), 42 | columns=['A', 'B'], index=dates) 43 | price.index.name = 'dt' 44 | price['market_data'] = 'price' 45 | market_data = concat([volume, price]).reset_index().set_index( 46 | ['dt', 'market_data']) 47 | 48 | def test_days_to_liquidate_positions(self): 49 | dtlp = days_to_liquidate_positions(self.positions, 50 | self.market_data, 51 | max_bar_consumption=1, 52 | capital_base=1e6, 53 | mean_volume_window=1) 54 | 55 | expected = DataFrame([[0.0, .5/3], 56 | [0.75/2, 0.0]], 57 | columns=['A', 'B'], 58 | index=self.dates[1:]) 59 | assert_frame_equal(dtlp, expected) 60 | 61 | def test_get_max_days_to_liquidate_by_ticker(self): 62 | 63 | mdtl = get_max_days_to_liquidate_by_ticker(self.positions, 64 | self.market_data, 65 | max_bar_consumption=1, 66 | capital_base=1e6, 67 | mean_volume_window=1) 68 | 69 | expected = DataFrame([[datetime(2015, 1, 3), .75/2, 75.], 70 | [datetime(2015, 1, 2), .5/3, 50.]], 71 | columns=[ 72 | 'date', 'days_to_liquidate', 'pos_alloc_pct'], 73 | index=['A', 'B']) 74 | expected.index.name = 'symbol' 75 | 76 | assert_frame_equal(mdtl, expected) 77 | 78 | @parameterized.expand([(DataFrame([[datetime(2015, 1, 1), 100.], 79 | [datetime(2015, 1, 2), 100]], 80 | columns=['date', 'max_pct_bar_consumed'], 81 | index=['A', 'B']), None), 82 | (DataFrame([[datetime(2015, 1, 3), (1/3)*100.]], 83 | columns=['date', 'max_pct_bar_consumed'], 84 | index=['A']), 1)]) 85 | def test_get_low_liquidity_transactions(self, expected, last_n_days): 86 | txn_daily = DataFrame(data=[[1, 1000000, 1, 'A'], 87 | [2, 2000000, 1, 'B'], 88 | [1, 1000000, 1, 'A']], 89 | columns=['sid', 'amount', 'price', 'symbol'], 90 | index=self.dates) 91 | 92 | llt = get_low_liquidity_transactions(txn_daily, self.market_data, 93 | last_n_days=last_n_days) 94 | expected.index.name = 'symbol' 95 | assert_frame_equal(llt, expected) 96 | 97 | def test_daily_txns_with_bar_data(self): 98 | daily_txn = daily_txns_with_bar_data( 99 | self.transactions, self.market_data) 100 | expected = DataFrame(data=[['A', 100000, 1.0, 1000000.], 101 | ['A', 100000, 1.0, 2000000.], 102 | ['A', 100000, 1.0, 3000000.]], 103 | columns=['symbol', 'amount', 'price', 'volume'], 104 | index=self.dates) 105 | 106 | assert_frame_equal(daily_txn, expected, check_less_precise=True) 107 | 108 | @parameterized.expand([(1000000, 1, [0.9995, 0.9999375, 0.99998611]), 109 | (10000000, 1, [0.95, 0.99375, 0.998611]), 110 | (100000, 1, [0.999995, 0.999999375, 0.9999998611]), 111 | (1000000, .1, [0.99995, 0.99999375, 0.999998611])]) 112 | def test_apply_slippage_penalty(self, starting_base, impact, 113 | expected_adj_returns): 114 | returns = Series([1., 1., 1.], index=self.dates) 115 | daily_txn = daily_txns_with_bar_data( 116 | self.transactions, self.market_data) 117 | 118 | adj_returns = apply_slippage_penalty( 119 | returns, daily_txn, starting_base, 1000000, impact=impact) 120 | expected_adj_returns = Series(expected_adj_returns, index=self.dates) 121 | 122 | assert_series_equal(adj_returns, expected_adj_returns) 123 | -------------------------------------------------------------------------------- /pyfolio/tests/test_data/intercepts.csv: -------------------------------------------------------------------------------- 1 | 19001,0.0 2 | 19002,0.0 3 | -------------------------------------------------------------------------------- /pyfolio/tests/test_data/positions.csv: -------------------------------------------------------------------------------- 1 | ,19001,19002,cash 2 | 2016-01-04,1.0,1.0,0.0 3 | 2016-01-05,1.0,1.0,0.0 4 | 2016-01-06,1.0,1.0,0.0 5 | 2016-01-07,1.0,1.0,0.0 6 | 2016-01-08,1.0,1.0,0.0 7 | 2016-01-11,1.0,1.0,0.0 8 | 2016-01-12,1.0,1.0,0.0 9 | 2016-01-13,1.0,1.0,0.0 10 | 2016-01-14,1.0,1.0,0.0 11 | 2016-01-15,1.0,1.0,0.0 12 | 2016-01-18,1.0,1.0,0.0 13 | 2016-01-19,1.0,1.0,0.0 14 | 2016-01-20,1.0,1.0,0.0 15 | 2016-01-21,1.0,1.0,0.0 16 | 2016-01-22,1.0,1.0,0.0 17 | 2016-01-25,1.0,1.0,0.0 18 | 2016-01-26,1.0,1.0,0.0 19 | 2016-01-27,1.0,1.0,0.0 20 | 2016-01-28,1.0,1.0,0.0 21 | 2016-01-29,1.0,1.0,0.0 22 | 2016-02-01,1.0,1.0,0.0 23 | 2016-02-02,1.0,1.0,0.0 24 | 2016-02-03,1.0,1.0,0.0 25 | 2016-02-04,1.0,1.0,0.0 26 | 2016-02-05,1.0,1.0,0.0 27 | 2016-02-08,1.0,1.0,0.0 28 | 2016-02-09,1.0,1.0,0.0 29 | 2016-02-10,1.0,1.0,0.0 30 | 2016-02-11,1.0,1.0,0.0 31 | 2016-02-12,1.0,1.0,0.0 32 | 2016-02-15,1.0,1.0,0.0 33 | 2016-02-16,1.0,1.0,0.0 34 | 2016-02-17,1.0,1.0,0.0 35 | 2016-02-18,1.0,1.0,0.0 36 | 2016-02-19,1.0,1.0,0.0 37 | 2016-02-22,1.0,1.0,0.0 38 | 2016-02-23,1.0,1.0,0.0 39 | 2016-02-24,1.0,1.0,0.0 40 | 2016-02-25,1.0,1.0,0.0 41 | 2016-02-26,1.0,1.0,0.0 42 | 2016-02-29,1.0,1.0,0.0 43 | 2016-03-01,1.0,1.0,0.0 44 | 2016-03-02,1.0,1.0,0.0 45 | 2016-03-03,1.0,1.0,0.0 46 | 2016-03-04,1.0,1.0,0.0 47 | 2016-03-07,1.0,1.0,0.0 48 | 2016-03-08,1.0,1.0,0.0 49 | 2016-03-09,1.0,1.0,0.0 50 | 2016-03-10,1.0,1.0,0.0 51 | 2016-03-11,1.0,1.0,0.0 52 | 2016-03-14,1.0,1.0,0.0 53 | 2016-03-15,1.0,1.0,0.0 54 | 2016-03-16,1.0,1.0,0.0 55 | 2016-03-17,1.0,1.0,0.0 56 | 2016-03-18,1.0,1.0,0.0 57 | 2016-03-21,1.0,1.0,0.0 58 | 2016-03-22,1.0,1.0,0.0 59 | 2016-03-23,1.0,1.0,0.0 60 | 2016-03-24,1.0,1.0,0.0 61 | 2016-03-25,1.0,1.0,0.0 62 | 2016-03-28,1.0,1.0,0.0 63 | 2016-03-29,1.0,1.0,0.0 64 | 2016-03-30,1.0,1.0,0.0 65 | 2016-03-31,1.0,1.0,0.0 66 | 2016-04-01,1.0,1.0,0.0 67 | 2016-04-04,1.0,1.0,0.0 68 | 2016-04-05,1.0,1.0,0.0 69 | 2016-04-06,1.0,1.0,0.0 70 | 2016-04-07,1.0,1.0,0.0 71 | 2016-04-08,1.0,1.0,0.0 72 | 2016-04-11,1.0,1.0,0.0 73 | 2016-04-12,1.0,1.0,0.0 74 | 2016-04-13,1.0,1.0,0.0 75 | 2016-04-14,1.0,1.0,0.0 76 | 2016-04-15,1.0,1.0,0.0 77 | 2016-04-18,1.0,1.0,0.0 78 | 2016-04-19,1.0,1.0,0.0 79 | 2016-04-20,1.0,1.0,0.0 80 | 2016-04-21,1.0,1.0,0.0 81 | 2016-04-22,1.0,1.0,0.0 82 | 2016-04-25,1.0,1.0,0.0 83 | 2016-04-26,1.0,1.0,0.0 84 | 2016-04-27,1.0,1.0,0.0 85 | 2016-04-28,1.0,1.0,0.0 86 | 2016-04-29,1.0,1.0,0.0 87 | 2016-05-02,1.0,1.0,0.0 88 | 2016-05-03,1.0,1.0,0.0 89 | 2016-05-04,1.0,1.0,0.0 90 | 2016-05-05,1.0,1.0,0.0 91 | 2016-05-06,1.0,1.0,0.0 92 | 2016-05-09,1.0,1.0,0.0 93 | 2016-05-10,1.0,1.0,0.0 94 | 2016-05-11,1.0,1.0,0.0 95 | 2016-05-12,1.0,1.0,0.0 96 | 2016-05-13,1.0,1.0,0.0 97 | 2016-05-16,1.0,1.0,0.0 98 | 2016-05-17,1.0,1.0,0.0 99 | 2016-05-18,1.0,1.0,0.0 100 | 2016-05-19,1.0,1.0,0.0 101 | 2016-05-20,1.0,1.0,0.0 102 | 2016-05-23,1.0,1.0,0.0 103 | 2016-05-24,1.0,1.0,0.0 104 | 2016-05-25,1.0,1.0,0.0 105 | 2016-05-26,1.0,1.0,0.0 106 | 2016-05-27,1.0,1.0,0.0 107 | 2016-05-30,1.0,1.0,0.0 108 | 2016-05-31,1.0,1.0,0.0 109 | 2016-06-01,1.0,1.0,0.0 110 | 2016-06-02,1.0,1.0,0.0 111 | 2016-06-03,1.0,1.0,0.0 112 | 2016-06-06,1.0,1.0,0.0 113 | 2016-06-07,1.0,1.0,0.0 114 | 2016-06-08,1.0,1.0,0.0 115 | 2016-06-09,1.0,1.0,0.0 116 | 2016-06-10,1.0,1.0,0.0 117 | 2016-06-13,1.0,1.0,0.0 118 | 2016-06-14,1.0,1.0,0.0 119 | 2016-06-15,1.0,1.0,0.0 120 | 2016-06-16,1.0,1.0,0.0 121 | 2016-06-17,1.0,1.0,0.0 122 | 2016-06-20,1.0,1.0,0.0 123 | 2016-06-21,1.0,1.0,0.0 124 | 2016-06-22,1.0,1.0,0.0 125 | 2016-06-23,1.0,1.0,0.0 126 | 2016-06-24,1.0,1.0,0.0 127 | 2016-06-27,1.0,1.0,0.0 128 | 2016-06-28,1.0,1.0,0.0 129 | 2016-06-29,1.0,1.0,0.0 130 | 2016-06-30,1.0,1.0,0.0 131 | 2016-07-01,1.0,1.0,0.0 132 | 2016-07-04,1.0,1.0,0.0 133 | 2016-07-05,1.0,1.0,0.0 134 | 2016-07-06,1.0,1.0,0.0 135 | 2016-07-07,1.0,1.0,0.0 136 | 2016-07-08,1.0,1.0,0.0 137 | 2016-07-11,1.0,1.0,0.0 138 | 2016-07-12,1.0,1.0,0.0 139 | 2016-07-13,1.0,1.0,0.0 140 | 2016-07-14,1.0,1.0,0.0 141 | 2016-07-15,1.0,1.0,0.0 142 | 2016-07-18,1.0,1.0,0.0 143 | 2016-07-19,1.0,1.0,0.0 144 | 2016-07-20,1.0,1.0,0.0 145 | 2016-07-21,1.0,1.0,0.0 146 | 2016-07-22,1.0,1.0,0.0 147 | 2016-07-25,1.0,1.0,0.0 148 | 2016-07-26,1.0,1.0,0.0 149 | 2016-07-27,1.0,1.0,0.0 150 | 2016-07-28,1.0,1.0,0.0 151 | 2016-07-29,1.0,1.0,0.0 152 | 2016-08-01,1.0,1.0,0.0 153 | 2016-08-02,1.0,1.0,0.0 154 | 2016-08-03,1.0,1.0,0.0 155 | 2016-08-04,1.0,1.0,0.0 156 | 2016-08-05,1.0,1.0,0.0 157 | 2016-08-08,1.0,1.0,0.0 158 | 2016-08-09,1.0,1.0,0.0 159 | 2016-08-10,1.0,1.0,0.0 160 | 2016-08-11,1.0,1.0,0.0 161 | 2016-08-12,1.0,1.0,0.0 162 | 2016-08-15,1.0,1.0,0.0 163 | 2016-08-16,1.0,1.0,0.0 164 | 2016-08-17,1.0,1.0,0.0 165 | 2016-08-18,1.0,1.0,0.0 166 | 2016-08-19,1.0,1.0,0.0 167 | 2016-08-22,1.0,1.0,0.0 168 | 2016-08-23,1.0,1.0,0.0 169 | 2016-08-24,1.0,1.0,0.0 170 | 2016-08-25,1.0,1.0,0.0 171 | 2016-08-26,1.0,1.0,0.0 172 | 2016-08-29,1.0,1.0,0.0 173 | 2016-08-30,1.0,1.0,0.0 174 | 2016-08-31,1.0,1.0,0.0 175 | 2016-09-01,1.0,1.0,0.0 176 | 2016-09-02,1.0,1.0,0.0 177 | 2016-09-05,1.0,1.0,0.0 178 | 2016-09-06,1.0,1.0,0.0 179 | 2016-09-07,1.0,1.0,0.0 180 | 2016-09-08,1.0,1.0,0.0 181 | 2016-09-09,1.0,1.0,0.0 182 | 2016-09-12,1.0,1.0,0.0 183 | 2016-09-13,1.0,1.0,0.0 184 | 2016-09-14,1.0,1.0,0.0 185 | 2016-09-15,1.0,1.0,0.0 186 | 2016-09-16,1.0,1.0,0.0 187 | 2016-09-19,1.0,1.0,0.0 188 | 2016-09-20,1.0,1.0,0.0 189 | 2016-09-21,1.0,1.0,0.0 190 | 2016-09-22,1.0,1.0,0.0 191 | 2016-09-23,1.0,1.0,0.0 192 | 2016-09-26,1.0,1.0,0.0 193 | 2016-09-27,1.0,1.0,0.0 194 | 2016-09-28,1.0,1.0,0.0 195 | 2016-09-29,1.0,1.0,0.0 196 | 2016-09-30,1.0,1.0,0.0 197 | 2016-10-03,1.0,1.0,0.0 198 | 2016-10-04,1.0,1.0,0.0 199 | 2016-10-05,1.0,1.0,0.0 200 | 2016-10-06,1.0,1.0,0.0 201 | 2016-10-07,1.0,1.0,0.0 202 | 2016-10-10,1.0,1.0,0.0 203 | 2016-10-11,1.0,1.0,0.0 204 | 2016-10-12,1.0,1.0,0.0 205 | 2016-10-13,1.0,1.0,0.0 206 | 2016-10-14,1.0,1.0,0.0 207 | 2016-10-17,1.0,1.0,0.0 208 | 2016-10-18,1.0,1.0,0.0 209 | 2016-10-19,1.0,1.0,0.0 210 | 2016-10-20,1.0,1.0,0.0 211 | 2016-10-21,1.0,1.0,0.0 212 | 2016-10-24,1.0,1.0,0.0 213 | 2016-10-25,1.0,1.0,0.0 214 | 2016-10-26,1.0,1.0,0.0 215 | 2016-10-27,1.0,1.0,0.0 216 | 2016-10-28,1.0,1.0,0.0 217 | 2016-10-31,1.0,1.0,0.0 218 | 2016-11-01,1.0,1.0,0.0 219 | 2016-11-02,1.0,1.0,0.0 220 | 2016-11-03,1.0,1.0,0.0 221 | 2016-11-04,1.0,1.0,0.0 222 | 2016-11-07,1.0,1.0,0.0 223 | 2016-11-08,1.0,1.0,0.0 224 | 2016-11-09,1.0,1.0,0.0 225 | 2016-11-10,1.0,1.0,0.0 226 | 2016-11-11,1.0,1.0,0.0 227 | 2016-11-14,1.0,1.0,0.0 228 | 2016-11-15,1.0,1.0,0.0 229 | 2016-11-16,1.0,1.0,0.0 230 | 2016-11-17,1.0,1.0,0.0 231 | 2016-11-18,1.0,1.0,0.0 232 | 2016-11-21,1.0,1.0,0.0 233 | 2016-11-22,1.0,1.0,0.0 234 | 2016-11-23,1.0,1.0,0.0 235 | 2016-11-24,1.0,1.0,0.0 236 | 2016-11-25,1.0,1.0,0.0 237 | 2016-11-28,1.0,1.0,0.0 238 | 2016-11-29,1.0,1.0,0.0 239 | 2016-11-30,1.0,1.0,0.0 240 | 2016-12-01,1.0,1.0,0.0 241 | 2016-12-02,1.0,1.0,0.0 242 | 2016-12-05,1.0,1.0,0.0 243 | 2016-12-06,1.0,1.0,0.0 244 | 2016-12-07,1.0,1.0,0.0 245 | 2016-12-08,1.0,1.0,0.0 246 | 2016-12-09,1.0,1.0,0.0 247 | 2016-12-12,1.0,1.0,0.0 248 | 2016-12-13,1.0,1.0,0.0 249 | 2016-12-14,1.0,1.0,0.0 250 | 2016-12-15,1.0,1.0,0.0 251 | 2016-12-16,1.0,1.0,0.0 252 | 2016-12-19,1.0,1.0,0.0 253 | 2016-12-20,1.0,1.0,0.0 254 | 2016-12-21,1.0,1.0,0.0 255 | 2016-12-22,1.0,1.0,0.0 256 | 2016-12-23,1.0,1.0,0.0 257 | 2016-12-26,1.0,1.0,0.0 258 | 2016-12-27,1.0,1.0,0.0 259 | 2016-12-28,1.0,1.0,0.0 260 | 2016-12-29,1.0,1.0,0.0 261 | 2016-12-30,1.0,1.0,0.0 262 | 2017-01-02,1.0,1.0,0.0 263 | 2017-01-03,1.0,1.0,0.0 264 | 2017-01-04,1.0,1.0,0.0 265 | 2017-01-05,1.0,1.0,0.0 266 | 2017-01-06,1.0,1.0,0.0 267 | 2017-01-09,1.0,1.0,0.0 268 | 2017-01-10,1.0,1.0,0.0 269 | 2017-01-11,1.0,1.0,0.0 270 | 2017-01-12,1.0,1.0,0.0 271 | 2017-01-13,1.0,1.0,0.0 272 | 2017-01-16,1.0,1.0,0.0 273 | 2017-01-17,1.0,1.0,0.0 274 | 2017-01-18,1.0,1.0,0.0 275 | 2017-01-19,1.0,1.0,0.0 276 | 2017-01-20,1.0,1.0,0.0 277 | 2017-01-23,1.0,1.0,0.0 278 | 2017-01-24,1.0,1.0,0.0 279 | 2017-01-25,1.0,1.0,0.0 280 | 2017-01-26,1.0,1.0,0.0 281 | 2017-01-27,1.0,1.0,0.0 282 | 2017-01-30,1.0,1.0,0.0 283 | 2017-01-31,1.0,1.0,0.0 284 | 2017-02-01,1.0,1.0,0.0 285 | 2017-02-02,1.0,1.0,0.0 286 | 2017-02-03,1.0,1.0,0.0 287 | 2017-02-06,1.0,1.0,0.0 288 | 2017-02-07,1.0,1.0,0.0 289 | 2017-02-08,1.0,1.0,0.0 290 | 2017-02-09,1.0,1.0,0.0 291 | 2017-02-10,1.0,1.0,0.0 292 | 2017-02-13,1.0,1.0,0.0 293 | 2017-02-14,1.0,1.0,0.0 294 | 2017-02-15,1.0,1.0,0.0 295 | 2017-02-16,1.0,1.0,0.0 296 | 2017-02-17,1.0,1.0,0.0 297 | 2017-02-20,1.0,1.0,0.0 298 | 2017-02-21,1.0,1.0,0.0 299 | 2017-02-22,1.0,1.0,0.0 300 | 2017-02-23,1.0,1.0,0.0 301 | 2017-02-24,1.0,1.0,0.0 302 | 2017-02-27,1.0,1.0,0.0 303 | 2017-02-28,1.0,1.0,0.0 304 | 2017-03-01,1.0,1.0,0.0 305 | 2017-03-02,1.0,1.0,0.0 306 | 2017-03-03,1.0,1.0,0.0 307 | 2017-03-06,1.0,1.0,0.0 308 | 2017-03-07,1.0,1.0,0.0 309 | 2017-03-08,1.0,1.0,0.0 310 | 2017-03-09,1.0,1.0,0.0 311 | 2017-03-10,1.0,1.0,0.0 312 | 2017-03-13,1.0,1.0,0.0 313 | 2017-03-14,1.0,1.0,0.0 314 | 2017-03-15,1.0,1.0,0.0 315 | 2017-03-16,1.0,1.0,0.0 316 | 2017-03-17,1.0,1.0,0.0 317 | 2017-03-20,1.0,1.0,0.0 318 | 2017-03-21,1.0,1.0,0.0 319 | 2017-03-22,1.0,1.0,0.0 320 | 2017-03-23,1.0,1.0,0.0 321 | 2017-03-24,1.0,1.0,0.0 322 | 2017-03-27,1.0,1.0,0.0 323 | 2017-03-28,1.0,1.0,0.0 324 | 2017-03-29,1.0,1.0,0.0 325 | 2017-03-30,1.0,1.0,0.0 326 | 2017-03-31,1.0,1.0,0.0 327 | 2017-04-03,1.0,1.0,0.0 328 | 2017-04-04,1.0,1.0,0.0 329 | 2017-04-05,1.0,1.0,0.0 330 | 2017-04-06,1.0,1.0,0.0 331 | 2017-04-07,1.0,1.0,0.0 332 | 2017-04-10,1.0,1.0,0.0 333 | 2017-04-11,1.0,1.0,0.0 334 | 2017-04-12,1.0,1.0,0.0 335 | 2017-04-13,1.0,1.0,0.0 336 | 2017-04-14,1.0,1.0,0.0 337 | 2017-04-17,1.0,1.0,0.0 338 | 2017-04-18,1.0,1.0,0.0 339 | 2017-04-19,1.0,1.0,0.0 340 | 2017-04-20,1.0,1.0,0.0 341 | 2017-04-21,1.0,1.0,0.0 342 | 2017-04-24,1.0,1.0,0.0 343 | 2017-04-25,1.0,1.0,0.0 344 | 2017-04-26,1.0,1.0,0.0 345 | 2017-04-27,1.0,1.0,0.0 346 | 2017-04-28,1.0,1.0,0.0 347 | 2017-05-01,1.0,1.0,0.0 348 | 2017-05-02,1.0,1.0,0.0 349 | 2017-05-03,1.0,1.0,0.0 350 | 2017-05-04,1.0,1.0,0.0 351 | 2017-05-05,1.0,1.0,0.0 352 | 2017-05-08,1.0,1.0,0.0 353 | 2017-05-09,1.0,1.0,0.0 354 | 2017-05-10,1.0,1.0,0.0 355 | 2017-05-11,1.0,1.0,0.0 356 | 2017-05-12,1.0,1.0,0.0 357 | 2017-05-15,1.0,1.0,0.0 358 | 2017-05-16,1.0,1.0,0.0 359 | 2017-05-17,1.0,1.0,0.0 360 | 2017-05-18,1.0,1.0,0.0 361 | 2017-05-19,1.0,1.0,0.0 362 | 2017-05-22,1.0,1.0,0.0 363 | 2017-05-23,1.0,1.0,0.0 364 | 2017-05-24,1.0,1.0,0.0 365 | 2017-05-25,1.0,1.0,0.0 366 | 2017-05-26,1.0,1.0,0.0 367 | 2017-05-29,1.0,1.0,0.0 368 | 2017-05-30,1.0,1.0,0.0 369 | 2017-05-31,1.0,1.0,0.0 370 | 2017-06-01,1.0,1.0,0.0 371 | 2017-06-02,1.0,1.0,0.0 372 | 2017-06-05,1.0,1.0,0.0 373 | 2017-06-06,1.0,1.0,0.0 374 | 2017-06-07,1.0,1.0,0.0 375 | 2017-06-08,1.0,1.0,0.0 376 | 2017-06-09,1.0,1.0,0.0 377 | 2017-06-12,1.0,1.0,0.0 378 | 2017-06-13,1.0,1.0,0.0 379 | 2017-06-14,1.0,1.0,0.0 380 | 2017-06-15,1.0,1.0,0.0 381 | 2017-06-16,1.0,1.0,0.0 382 | 2017-06-19,1.0,1.0,0.0 383 | 2017-06-20,1.0,1.0,0.0 384 | 2017-06-21,1.0,1.0,0.0 385 | 2017-06-22,1.0,1.0,0.0 386 | 2017-06-23,1.0,1.0,0.0 387 | 2017-06-26,1.0,1.0,0.0 388 | 2017-06-27,1.0,1.0,0.0 389 | 2017-06-28,1.0,1.0,0.0 390 | 2017-06-29,1.0,1.0,0.0 391 | 2017-06-30,1.0,1.0,0.0 392 | 2017-07-03,1.0,1.0,0.0 393 | 2017-07-04,1.0,1.0,0.0 394 | 2017-07-05,1.0,1.0,0.0 395 | 2017-07-06,1.0,1.0,0.0 396 | 2017-07-07,1.0,1.0,0.0 397 | 2017-07-10,1.0,1.0,0.0 398 | 2017-07-11,1.0,1.0,0.0 399 | 2017-07-12,1.0,1.0,0.0 400 | 2017-07-13,1.0,1.0,0.0 401 | 2017-07-14,1.0,1.0,0.0 402 | 2017-07-17,1.0,1.0,0.0 403 | 2017-07-18,1.0,1.0,0.0 404 | 2017-07-19,1.0,1.0,0.0 405 | 2017-07-20,1.0,1.0,0.0 406 | 2017-07-21,1.0,1.0,0.0 407 | 2017-07-24,1.0,1.0,0.0 408 | 2017-07-25,1.0,1.0,0.0 409 | 2017-07-26,1.0,1.0,0.0 410 | 2017-07-27,1.0,1.0,0.0 411 | 2017-07-28,1.0,1.0,0.0 412 | 2017-07-31,1.0,1.0,0.0 413 | 2017-08-01,1.0,1.0,0.0 414 | 2017-08-02,1.0,1.0,0.0 415 | 2017-08-03,1.0,1.0,0.0 416 | 2017-08-04,1.0,1.0,0.0 417 | 2017-08-07,1.0,1.0,0.0 418 | 2017-08-08,1.0,1.0,0.0 419 | 2017-08-09,1.0,1.0,0.0 420 | 2017-08-10,1.0,1.0,0.0 421 | 2017-08-11,1.0,1.0,0.0 422 | 2017-08-14,1.0,1.0,0.0 423 | 2017-08-15,1.0,1.0,0.0 424 | 2017-08-16,1.0,1.0,0.0 425 | 2017-08-17,1.0,1.0,0.0 426 | 2017-08-18,1.0,1.0,0.0 427 | 2017-08-21,1.0,1.0,0.0 428 | 2017-08-22,1.0,1.0,0.0 429 | 2017-08-23,1.0,1.0,0.0 430 | 2017-08-24,1.0,1.0,0.0 431 | 2017-08-25,1.0,1.0,0.0 432 | 2017-08-28,1.0,1.0,0.0 433 | 2017-08-29,1.0,1.0,0.0 434 | 2017-08-30,1.0,1.0,0.0 435 | 2017-08-31,1.0,1.0,0.0 436 | 2017-09-01,1.0,1.0,0.0 437 | 2017-09-04,1.0,1.0,0.0 438 | 2017-09-05,1.0,1.0,0.0 439 | 2017-09-06,1.0,1.0,0.0 440 | 2017-09-07,1.0,1.0,0.0 441 | 2017-09-08,1.0,1.0,0.0 442 | 2017-09-11,1.0,1.0,0.0 443 | 2017-09-12,1.0,1.0,0.0 444 | 2017-09-13,1.0,1.0,0.0 445 | 2017-09-14,1.0,1.0,0.0 446 | 2017-09-15,1.0,1.0,0.0 447 | 2017-09-18,1.0,1.0,0.0 448 | 2017-09-19,1.0,1.0,0.0 449 | 2017-09-20,1.0,1.0,0.0 450 | 2017-09-21,1.0,1.0,0.0 451 | 2017-09-22,1.0,1.0,0.0 452 | 2017-09-25,1.0,1.0,0.0 453 | 2017-09-26,1.0,1.0,0.0 454 | 2017-09-27,1.0,1.0,0.0 455 | 2017-09-28,1.0,1.0,0.0 456 | 2017-09-29,1.0,1.0,0.0 457 | 2017-10-02,1.0,1.0,0.0 458 | 2017-10-03,1.0,1.0,0.0 459 | 2017-10-04,1.0,1.0,0.0 460 | 2017-10-05,1.0,1.0,0.0 461 | 2017-10-06,1.0,1.0,0.0 462 | 2017-10-09,1.0,1.0,0.0 463 | 2017-10-10,1.0,1.0,0.0 464 | 2017-10-11,1.0,1.0,0.0 465 | 2017-10-12,1.0,1.0,0.0 466 | 2017-10-13,1.0,1.0,0.0 467 | 2017-10-16,1.0,1.0,0.0 468 | 2017-10-17,1.0,1.0,0.0 469 | 2017-10-18,1.0,1.0,0.0 470 | 2017-10-19,1.0,1.0,0.0 471 | 2017-10-20,1.0,1.0,0.0 472 | 2017-10-23,1.0,1.0,0.0 473 | 2017-10-24,1.0,1.0,0.0 474 | 2017-10-25,1.0,1.0,0.0 475 | 2017-10-26,1.0,1.0,0.0 476 | 2017-10-27,1.0,1.0,0.0 477 | 2017-10-30,1.0,1.0,0.0 478 | 2017-10-31,1.0,1.0,0.0 479 | 2017-11-01,1.0,1.0,0.0 480 | 2017-11-02,1.0,1.0,0.0 481 | 2017-11-03,1.0,1.0,0.0 482 | 2017-11-06,1.0,1.0,0.0 483 | 2017-11-07,1.0,1.0,0.0 484 | 2017-11-08,1.0,1.0,0.0 485 | 2017-11-09,1.0,1.0,0.0 486 | 2017-11-10,1.0,1.0,0.0 487 | 2017-11-13,1.0,1.0,0.0 488 | 2017-11-14,1.0,1.0,0.0 489 | 2017-11-15,1.0,1.0,0.0 490 | 2017-11-16,1.0,1.0,0.0 491 | 2017-11-17,1.0,1.0,0.0 492 | 2017-11-20,1.0,1.0,0.0 493 | 2017-11-21,1.0,1.0,0.0 494 | 2017-11-22,1.0,1.0,0.0 495 | 2017-11-23,1.0,1.0,0.0 496 | 2017-11-24,1.0,1.0,0.0 497 | 2017-11-27,1.0,1.0,0.0 498 | 2017-11-28,1.0,1.0,0.0 499 | 2017-11-29,1.0,1.0,0.0 500 | 2017-11-30,1.0,1.0,0.0 501 | 2017-12-01,1.0,1.0,0.0 502 | 2017-12-04,1.0,1.0,0.0 503 | 2017-12-05,1.0,1.0,0.0 504 | 2017-12-06,1.0,1.0,0.0 505 | 2017-12-07,1.0,1.0,0.0 506 | -------------------------------------------------------------------------------- /pyfolio/tests/test_data/residuals.csv: -------------------------------------------------------------------------------- 1 | ,19001,19002 2 | 2016-01-04,0.0,0.0 3 | 2016-01-05,0.0,0.0 4 | 2016-01-06,0.0,0.0 5 | 2016-01-07,0.0,0.0 6 | 2016-01-08,0.0,0.0 7 | 2016-01-11,0.0,0.0 8 | 2016-01-12,0.0,0.0 9 | 2016-01-13,0.0,0.0 10 | 2016-01-14,0.0,0.0 11 | 2016-01-15,0.0,0.0 12 | 2016-01-18,0.0,0.0 13 | 2016-01-19,0.0,0.0 14 | 2016-01-20,0.0,0.0 15 | 2016-01-21,0.0,0.0 16 | 2016-01-22,0.0,0.0 17 | 2016-01-25,0.0,0.0 18 | 2016-01-26,0.0,0.0 19 | 2016-01-27,0.0,0.0 20 | 2016-01-28,0.0,0.0 21 | 2016-01-29,0.0,0.0 22 | 2016-02-01,0.0,0.0 23 | 2016-02-02,0.0,0.0 24 | 2016-02-03,0.0,0.0 25 | 2016-02-04,0.0,0.0 26 | 2016-02-05,0.0,0.0 27 | 2016-02-08,0.0,0.0 28 | 2016-02-09,0.0,0.0 29 | 2016-02-10,0.0,0.0 30 | 2016-02-11,0.0,0.0 31 | 2016-02-12,0.0,0.0 32 | 2016-02-15,0.0,0.0 33 | 2016-02-16,0.0,0.0 34 | 2016-02-17,0.0,0.0 35 | 2016-02-18,0.0,0.0 36 | 2016-02-19,0.0,0.0 37 | 2016-02-22,0.0,0.0 38 | 2016-02-23,0.0,0.0 39 | 2016-02-24,0.0,0.0 40 | 2016-02-25,0.0,0.0 41 | 2016-02-26,0.0,0.0 42 | 2016-02-29,0.0,0.0 43 | 2016-03-01,0.0,0.0 44 | 2016-03-02,0.0,0.0 45 | 2016-03-03,0.0,0.0 46 | 2016-03-04,0.0,0.0 47 | 2016-03-07,0.0,0.0 48 | 2016-03-08,0.0,0.0 49 | 2016-03-09,0.0,0.0 50 | 2016-03-10,0.0,0.0 51 | 2016-03-11,0.0,0.0 52 | 2016-03-14,0.0,0.0 53 | 2016-03-15,0.0,0.0 54 | 2016-03-16,0.0,0.0 55 | 2016-03-17,0.0,0.0 56 | 2016-03-18,0.0,0.0 57 | 2016-03-21,0.0,0.0 58 | 2016-03-22,0.0,0.0 59 | 2016-03-23,0.0,0.0 60 | 2016-03-24,0.0,0.0 61 | 2016-03-25,0.0,0.0 62 | 2016-03-28,0.0,0.0 63 | 2016-03-29,0.0,0.0 64 | 2016-03-30,0.0,0.0 65 | 2016-03-31,0.0,0.0 66 | 2016-04-01,0.0,0.0 67 | 2016-04-04,0.0,0.0 68 | 2016-04-05,0.0,0.0 69 | 2016-04-06,0.0,0.0 70 | 2016-04-07,0.0,0.0 71 | 2016-04-08,0.0,0.0 72 | 2016-04-11,0.0,0.0 73 | 2016-04-12,0.0,0.0 74 | 2016-04-13,0.0,0.0 75 | 2016-04-14,0.0,0.0 76 | 2016-04-15,0.0,0.0 77 | 2016-04-18,0.0,0.0 78 | 2016-04-19,0.0,0.0 79 | 2016-04-20,0.0,0.0 80 | 2016-04-21,0.0,0.0 81 | 2016-04-22,0.0,0.0 82 | 2016-04-25,0.0,0.0 83 | 2016-04-26,0.0,0.0 84 | 2016-04-27,0.0,0.0 85 | 2016-04-28,0.0,0.0 86 | 2016-04-29,0.0,0.0 87 | 2016-05-02,0.0,0.0 88 | 2016-05-03,0.0,0.0 89 | 2016-05-04,0.0,0.0 90 | 2016-05-05,0.0,0.0 91 | 2016-05-06,0.0,0.0 92 | 2016-05-09,0.0,0.0 93 | 2016-05-10,0.0,0.0 94 | 2016-05-11,0.0,0.0 95 | 2016-05-12,0.0,0.0 96 | 2016-05-13,0.0,0.0 97 | 2016-05-16,0.0,0.0 98 | 2016-05-17,0.0,0.0 99 | 2016-05-18,0.0,0.0 100 | 2016-05-19,0.0,0.0 101 | 2016-05-20,0.0,0.0 102 | 2016-05-23,0.0,0.0 103 | 2016-05-24,0.0,0.0 104 | 2016-05-25,0.0,0.0 105 | 2016-05-26,0.0,0.0 106 | 2016-05-27,0.0,0.0 107 | 2016-05-30,0.0,0.0 108 | 2016-05-31,0.0,0.0 109 | 2016-06-01,0.0,0.0 110 | 2016-06-02,0.0,0.0 111 | 2016-06-03,0.0,0.0 112 | 2016-06-06,0.0,0.0 113 | 2016-06-07,0.0,0.0 114 | 2016-06-08,0.0,0.0 115 | 2016-06-09,0.0,0.0 116 | 2016-06-10,0.0,0.0 117 | 2016-06-13,0.0,0.0 118 | 2016-06-14,0.0,0.0 119 | 2016-06-15,0.0,0.0 120 | 2016-06-16,0.0,0.0 121 | 2016-06-17,0.0,0.0 122 | 2016-06-20,0.0,0.0 123 | 2016-06-21,0.0,0.0 124 | 2016-06-22,0.0,0.0 125 | 2016-06-23,0.0,0.0 126 | 2016-06-24,0.0,0.0 127 | 2016-06-27,0.0,0.0 128 | 2016-06-28,0.0,0.0 129 | 2016-06-29,0.0,0.0 130 | 2016-06-30,0.0,0.0 131 | 2016-07-01,0.0,0.0 132 | 2016-07-04,0.0,0.0 133 | 2016-07-05,0.0,0.0 134 | 2016-07-06,0.0,0.0 135 | 2016-07-07,0.0,0.0 136 | 2016-07-08,0.0,0.0 137 | 2016-07-11,0.0,0.0 138 | 2016-07-12,0.0,0.0 139 | 2016-07-13,0.0,0.0 140 | 2016-07-14,0.0,0.0 141 | 2016-07-15,0.0,0.0 142 | 2016-07-18,0.0,0.0 143 | 2016-07-19,0.0,0.0 144 | 2016-07-20,0.0,0.0 145 | 2016-07-21,0.0,0.0 146 | 2016-07-22,0.0,0.0 147 | 2016-07-25,0.0,0.0 148 | 2016-07-26,0.0,0.0 149 | 2016-07-27,0.0,0.0 150 | 2016-07-28,0.0,0.0 151 | 2016-07-29,0.0,0.0 152 | 2016-08-01,0.0,0.0 153 | 2016-08-02,0.0,0.0 154 | 2016-08-03,0.0,0.0 155 | 2016-08-04,0.0,0.0 156 | 2016-08-05,0.0,0.0 157 | 2016-08-08,0.0,0.0 158 | 2016-08-09,0.0,0.0 159 | 2016-08-10,0.0,0.0 160 | 2016-08-11,0.0,0.0 161 | 2016-08-12,0.0,0.0 162 | 2016-08-15,0.0,0.0 163 | 2016-08-16,0.0,0.0 164 | 2016-08-17,0.0,0.0 165 | 2016-08-18,0.0,0.0 166 | 2016-08-19,0.0,0.0 167 | 2016-08-22,0.0,0.0 168 | 2016-08-23,0.0,0.0 169 | 2016-08-24,0.0,0.0 170 | 2016-08-25,0.0,0.0 171 | 2016-08-26,0.0,0.0 172 | 2016-08-29,0.0,0.0 173 | 2016-08-30,0.0,0.0 174 | 2016-08-31,0.0,0.0 175 | 2016-09-01,0.0,0.0 176 | 2016-09-02,0.0,0.0 177 | 2016-09-05,0.0,0.0 178 | 2016-09-06,0.0,0.0 179 | 2016-09-07,0.0,0.0 180 | 2016-09-08,0.0,0.0 181 | 2016-09-09,0.0,0.0 182 | 2016-09-12,0.0,0.0 183 | 2016-09-13,0.0,0.0 184 | 2016-09-14,0.0,0.0 185 | 2016-09-15,0.0,0.0 186 | 2016-09-16,0.0,0.0 187 | 2016-09-19,0.0,0.0 188 | 2016-09-20,0.0,0.0 189 | 2016-09-21,0.0,0.0 190 | 2016-09-22,0.0,0.0 191 | 2016-09-23,0.0,0.0 192 | 2016-09-26,0.0,0.0 193 | 2016-09-27,0.0,0.0 194 | 2016-09-28,0.0,0.0 195 | 2016-09-29,0.0,0.0 196 | 2016-09-30,0.0,0.0 197 | 2016-10-03,0.0,0.0 198 | 2016-10-04,0.0,0.0 199 | 2016-10-05,0.0,0.0 200 | 2016-10-06,0.0,0.0 201 | 2016-10-07,0.0,0.0 202 | 2016-10-10,0.0,0.0 203 | 2016-10-11,0.0,0.0 204 | 2016-10-12,0.0,0.0 205 | 2016-10-13,0.0,0.0 206 | 2016-10-14,0.0,0.0 207 | 2016-10-17,0.0,0.0 208 | 2016-10-18,0.0,0.0 209 | 2016-10-19,0.0,0.0 210 | 2016-10-20,0.0,0.0 211 | 2016-10-21,0.0,0.0 212 | 2016-10-24,0.0,0.0 213 | 2016-10-25,0.0,0.0 214 | 2016-10-26,0.0,0.0 215 | 2016-10-27,0.0,0.0 216 | 2016-10-28,0.0,0.0 217 | 2016-10-31,0.0,0.0 218 | 2016-11-01,0.0,0.0 219 | 2016-11-02,0.0,0.0 220 | 2016-11-03,0.0,0.0 221 | 2016-11-04,0.0,0.0 222 | 2016-11-07,0.0,0.0 223 | 2016-11-08,0.0,0.0 224 | 2016-11-09,0.0,0.0 225 | 2016-11-10,0.0,0.0 226 | 2016-11-11,0.0,0.0 227 | 2016-11-14,0.0,0.0 228 | 2016-11-15,0.0,0.0 229 | 2016-11-16,0.0,0.0 230 | 2016-11-17,0.0,0.0 231 | 2016-11-18,0.0,0.0 232 | 2016-11-21,0.0,0.0 233 | 2016-11-22,0.0,0.0 234 | 2016-11-23,0.0,0.0 235 | 2016-11-24,0.0,0.0 236 | 2016-11-25,0.0,0.0 237 | 2016-11-28,0.0,0.0 238 | 2016-11-29,0.0,0.0 239 | 2016-11-30,0.0,0.0 240 | 2016-12-01,0.0,0.0 241 | 2016-12-02,0.0,0.0 242 | 2016-12-05,0.0,0.0 243 | 2016-12-06,0.0,0.0 244 | 2016-12-07,0.0,0.0 245 | 2016-12-08,0.0,0.0 246 | 2016-12-09,0.0,0.0 247 | 2016-12-12,0.0,0.0 248 | 2016-12-13,0.0,0.0 249 | 2016-12-14,0.0,0.0 250 | 2016-12-15,0.0,0.0 251 | 2016-12-16,0.0,0.0 252 | 2016-12-19,0.0,0.0 253 | 2016-12-20,0.0,0.0 254 | 2016-12-21,0.0,0.0 255 | 2016-12-22,0.0,0.0 256 | 2016-12-23,0.0,0.0 257 | 2016-12-26,0.0,0.0 258 | 2016-12-27,0.0,0.0 259 | 2016-12-28,0.0,0.0 260 | 2016-12-29,0.0,0.0 261 | 2016-12-30,0.0,0.0 262 | 2017-01-02,0.0,0.0 263 | 2017-01-03,0.0,0.0 264 | 2017-01-04,0.0,0.0 265 | 2017-01-05,0.0,0.0 266 | 2017-01-06,0.0,0.0 267 | 2017-01-09,0.0,0.0 268 | 2017-01-10,0.0,0.0 269 | 2017-01-11,0.0,0.0 270 | 2017-01-12,0.0,0.0 271 | 2017-01-13,0.0,0.0 272 | 2017-01-16,0.0,0.0 273 | 2017-01-17,0.0,0.0 274 | 2017-01-18,0.0,0.0 275 | 2017-01-19,0.0,0.0 276 | 2017-01-20,0.0,0.0 277 | 2017-01-23,0.0,0.0 278 | 2017-01-24,0.0,0.0 279 | 2017-01-25,0.0,0.0 280 | 2017-01-26,0.0,0.0 281 | 2017-01-27,0.0,0.0 282 | 2017-01-30,0.0,0.0 283 | 2017-01-31,0.0,0.0 284 | 2017-02-01,0.0,0.0 285 | 2017-02-02,0.0,0.0 286 | 2017-02-03,0.0,0.0 287 | 2017-02-06,0.0,0.0 288 | 2017-02-07,0.0,0.0 289 | 2017-02-08,0.0,0.0 290 | 2017-02-09,0.0,0.0 291 | 2017-02-10,0.0,0.0 292 | 2017-02-13,0.0,0.0 293 | 2017-02-14,0.0,0.0 294 | 2017-02-15,0.0,0.0 295 | 2017-02-16,0.0,0.0 296 | 2017-02-17,0.0,0.0 297 | 2017-02-20,0.0,0.0 298 | 2017-02-21,0.0,0.0 299 | 2017-02-22,0.0,0.0 300 | 2017-02-23,0.0,0.0 301 | 2017-02-24,0.0,0.0 302 | 2017-02-27,0.0,0.0 303 | 2017-02-28,0.0,0.0 304 | 2017-03-01,0.0,0.0 305 | 2017-03-02,0.0,0.0 306 | 2017-03-03,0.0,0.0 307 | 2017-03-06,0.0,0.0 308 | 2017-03-07,0.0,0.0 309 | 2017-03-08,0.0,0.0 310 | 2017-03-09,0.0,0.0 311 | 2017-03-10,0.0,0.0 312 | 2017-03-13,0.0,0.0 313 | 2017-03-14,0.0,0.0 314 | 2017-03-15,0.0,0.0 315 | 2017-03-16,0.0,0.0 316 | 2017-03-17,0.0,0.0 317 | 2017-03-20,0.0,0.0 318 | 2017-03-21,0.0,0.0 319 | 2017-03-22,0.0,0.0 320 | 2017-03-23,0.0,0.0 321 | 2017-03-24,0.0,0.0 322 | 2017-03-27,0.0,0.0 323 | 2017-03-28,0.0,0.0 324 | 2017-03-29,0.0,0.0 325 | 2017-03-30,0.0,0.0 326 | 2017-03-31,0.0,0.0 327 | 2017-04-03,0.0,0.0 328 | 2017-04-04,0.0,0.0 329 | 2017-04-05,0.0,0.0 330 | 2017-04-06,0.0,0.0 331 | 2017-04-07,0.0,0.0 332 | 2017-04-10,0.0,0.0 333 | 2017-04-11,0.0,0.0 334 | 2017-04-12,0.0,0.0 335 | 2017-04-13,0.0,0.0 336 | 2017-04-14,0.0,0.0 337 | 2017-04-17,0.0,0.0 338 | 2017-04-18,0.0,0.0 339 | 2017-04-19,0.0,0.0 340 | 2017-04-20,0.0,0.0 341 | 2017-04-21,0.0,0.0 342 | 2017-04-24,0.0,0.0 343 | 2017-04-25,0.0,0.0 344 | 2017-04-26,0.0,0.0 345 | 2017-04-27,0.0,0.0 346 | 2017-04-28,0.0,0.0 347 | 2017-05-01,0.0,0.0 348 | 2017-05-02,0.0,0.0 349 | 2017-05-03,0.0,0.0 350 | 2017-05-04,0.0,0.0 351 | 2017-05-05,0.0,0.0 352 | 2017-05-08,0.0,0.0 353 | 2017-05-09,0.0,0.0 354 | 2017-05-10,0.0,0.0 355 | 2017-05-11,0.0,0.0 356 | 2017-05-12,0.0,0.0 357 | 2017-05-15,0.0,0.0 358 | 2017-05-16,0.0,0.0 359 | 2017-05-17,0.0,0.0 360 | 2017-05-18,0.0,0.0 361 | 2017-05-19,0.0,0.0 362 | 2017-05-22,0.0,0.0 363 | 2017-05-23,0.0,0.0 364 | 2017-05-24,0.0,0.0 365 | 2017-05-25,0.0,0.0 366 | 2017-05-26,0.0,0.0 367 | 2017-05-29,0.0,0.0 368 | 2017-05-30,0.0,0.0 369 | 2017-05-31,0.0,0.0 370 | 2017-06-01,0.0,0.0 371 | 2017-06-02,0.0,0.0 372 | 2017-06-05,0.0,0.0 373 | 2017-06-06,0.0,0.0 374 | 2017-06-07,0.0,0.0 375 | 2017-06-08,0.0,0.0 376 | 2017-06-09,0.0,0.0 377 | 2017-06-12,0.0,0.0 378 | 2017-06-13,0.0,0.0 379 | 2017-06-14,0.0,0.0 380 | 2017-06-15,0.0,0.0 381 | 2017-06-16,0.0,0.0 382 | 2017-06-19,0.0,0.0 383 | 2017-06-20,0.0,0.0 384 | 2017-06-21,0.0,0.0 385 | 2017-06-22,0.0,0.0 386 | 2017-06-23,0.0,0.0 387 | 2017-06-26,0.0,0.0 388 | 2017-06-27,0.0,0.0 389 | 2017-06-28,0.0,0.0 390 | 2017-06-29,0.0,0.0 391 | 2017-06-30,0.0,0.0 392 | 2017-07-03,0.0,0.0 393 | 2017-07-04,0.0,0.0 394 | 2017-07-05,0.0,0.0 395 | 2017-07-06,0.0,0.0 396 | 2017-07-07,0.0,0.0 397 | 2017-07-10,0.0,0.0 398 | 2017-07-11,0.0,0.0 399 | 2017-07-12,0.0,0.0 400 | 2017-07-13,0.0,0.0 401 | 2017-07-14,0.0,0.0 402 | 2017-07-17,0.0,0.0 403 | 2017-07-18,0.0,0.0 404 | 2017-07-19,0.0,0.0 405 | 2017-07-20,0.0,0.0 406 | 2017-07-21,0.0,0.0 407 | 2017-07-24,0.0,0.0 408 | 2017-07-25,0.0,0.0 409 | 2017-07-26,0.0,0.0 410 | 2017-07-27,0.0,0.0 411 | 2017-07-28,0.0,0.0 412 | 2017-07-31,0.0,0.0 413 | 2017-08-01,0.0,0.0 414 | 2017-08-02,0.0,0.0 415 | 2017-08-03,0.0,0.0 416 | 2017-08-04,0.0,0.0 417 | 2017-08-07,0.0,0.0 418 | 2017-08-08,0.0,0.0 419 | 2017-08-09,0.0,0.0 420 | 2017-08-10,0.0,0.0 421 | 2017-08-11,0.0,0.0 422 | 2017-08-14,0.0,0.0 423 | 2017-08-15,0.0,0.0 424 | 2017-08-16,0.0,0.0 425 | 2017-08-17,0.0,0.0 426 | 2017-08-18,0.0,0.0 427 | 2017-08-21,0.0,0.0 428 | 2017-08-22,0.0,0.0 429 | 2017-08-23,0.0,0.0 430 | 2017-08-24,0.0,0.0 431 | 2017-08-25,0.0,0.0 432 | 2017-08-28,0.0,0.0 433 | 2017-08-29,0.0,0.0 434 | 2017-08-30,0.0,0.0 435 | 2017-08-31,0.0,0.0 436 | 2017-09-01,0.0,0.0 437 | 2017-09-04,0.0,0.0 438 | 2017-09-05,0.0,0.0 439 | 2017-09-06,0.0,0.0 440 | 2017-09-07,0.0,0.0 441 | 2017-09-08,0.0,0.0 442 | 2017-09-11,0.0,0.0 443 | 2017-09-12,0.0,0.0 444 | 2017-09-13,0.0,0.0 445 | 2017-09-14,0.0,0.0 446 | 2017-09-15,0.0,0.0 447 | 2017-09-18,0.0,0.0 448 | 2017-09-19,0.0,0.0 449 | 2017-09-20,0.0,0.0 450 | 2017-09-21,0.0,0.0 451 | 2017-09-22,0.0,0.0 452 | 2017-09-25,0.0,0.0 453 | 2017-09-26,0.0,0.0 454 | 2017-09-27,0.0,0.0 455 | 2017-09-28,0.0,0.0 456 | 2017-09-29,0.0,0.0 457 | 2017-10-02,0.0,0.0 458 | 2017-10-03,0.0,0.0 459 | 2017-10-04,0.0,0.0 460 | 2017-10-05,0.0,0.0 461 | 2017-10-06,0.0,0.0 462 | 2017-10-09,0.0,0.0 463 | 2017-10-10,0.0,0.0 464 | 2017-10-11,0.0,0.0 465 | 2017-10-12,0.0,0.0 466 | 2017-10-13,0.0,0.0 467 | 2017-10-16,0.0,0.0 468 | 2017-10-17,0.0,0.0 469 | 2017-10-18,0.0,0.0 470 | 2017-10-19,0.0,0.0 471 | 2017-10-20,0.0,0.0 472 | 2017-10-23,0.0,0.0 473 | 2017-10-24,0.0,0.0 474 | 2017-10-25,0.0,0.0 475 | 2017-10-26,0.0,0.0 476 | 2017-10-27,0.0,0.0 477 | 2017-10-30,0.0,0.0 478 | 2017-10-31,0.0,0.0 479 | 2017-11-01,0.0,0.0 480 | 2017-11-02,0.0,0.0 481 | 2017-11-03,0.0,0.0 482 | 2017-11-06,0.0,0.0 483 | 2017-11-07,0.0,0.0 484 | 2017-11-08,0.0,0.0 485 | 2017-11-09,0.0,0.0 486 | 2017-11-10,0.0,0.0 487 | 2017-11-13,0.0,0.0 488 | 2017-11-14,0.0,0.0 489 | 2017-11-15,0.0,0.0 490 | 2017-11-16,0.0,0.0 491 | 2017-11-17,0.0,0.0 492 | 2017-11-20,0.0,0.0 493 | 2017-11-21,0.0,0.0 494 | 2017-11-22,0.0,0.0 495 | 2017-11-23,0.0,0.0 496 | 2017-11-24,0.0,0.0 497 | 2017-11-27,0.0,0.0 498 | 2017-11-28,0.0,0.0 499 | 2017-11-29,0.0,0.0 500 | 2017-11-30,0.0,0.0 501 | 2017-12-01,0.0,0.0 502 | 2017-12-04,0.0,0.0 503 | 2017-12-05,0.0,0.0 504 | 2017-12-06,0.0,0.0 505 | 2017-12-07,0.0,0.0 506 | -------------------------------------------------------------------------------- /pyfolio/tests/test_data/returns.csv: -------------------------------------------------------------------------------- 1 | 2016-01-04,2.16420955433 2 | 2016-01-05,3.21963118331 3 | 2016-01-06,0.890280110274 4 | 2016-01-07,0.798731209228 5 | 2016-01-08,0.307379650145 6 | 2016-01-11,1.59831707812 7 | 2016-01-12,0.88271274164 8 | 2016-01-13,0.77753756012 9 | 2016-01-14,1.28892080939 10 | 2016-01-15,-0.541028037651 11 | 2016-01-18,-1.89937122039 12 | 2016-01-19,0.122271178453 13 | 2016-01-20,0.815388949389 14 | 2016-01-21,-0.141425332724 15 | 2016-01-22,3.00213798426 16 | 2016-01-25,0.533109945299 17 | 2016-01-26,-2.86858221585 18 | 2016-01-27,-0.191563180222 19 | 2016-01-28,2.43267052951 20 | 2016-01-29,-0.689629567983 21 | 2016-02-01,-2.46857090225 22 | 2016-02-02,0.244505204607 23 | 2016-02-03,-0.947726483363 24 | 2016-02-04,-0.475305004218 25 | 2016-02-05,-1.82663812777 26 | 2016-02-08,-0.508564063334 27 | 2016-02-09,-1.69143732169 28 | 2016-02-10,0.400149642192 29 | 2016-02-11,0.368989120123 30 | 2016-02-12,-0.997063259668 31 | 2016-02-15,-1.03201360932 32 | 2016-02-16,-2.53942888438 33 | 2016-02-17,-0.224354793955 34 | 2016-02-18,-1.16741609144 35 | 2016-02-19,-0.855352968587 36 | 2016-02-22,0.858073472935 37 | 2016-02-23,-0.0954251358104 38 | 2016-02-24,-0.282468449763 39 | 2016-02-25,-1.44964681395 40 | 2016-02-26,-0.255387189898 41 | 2016-02-29,-0.264323353829 42 | 2016-03-01,-1.07058124655 43 | 2016-03-02,3.38414136983 44 | 2016-03-03,0.998854735347 45 | 2016-03-04,-0.0163008945794 46 | 2016-03-07,0.819268123409 47 | 2016-03-08,1.18491401456 48 | 2016-03-09,1.06293956537 49 | 2016-03-10,1.79637051463 50 | 2016-03-11,0.528901456148 51 | 2016-03-14,0.535391635914 52 | 2016-03-15,-0.301088290328 53 | 2016-03-16,0.770497780535 54 | 2016-03-17,-1.1610737922 55 | 2016-03-18,3.40345681791 56 | 2016-03-21,2.7736036187 57 | 2016-03-22,1.04883926804 58 | 2016-03-23,0.534453024845 59 | 2016-03-24,0.792241874683 60 | 2016-03-25,1.53628604191 61 | 2016-03-28,-0.722975259429 62 | 2016-03-29,1.62462407089 63 | 2016-03-30,-0.844202400059 64 | 2016-03-31,1.41411017676 65 | 2016-04-01,1.07975659325 66 | 2016-04-04,-0.230666883153 67 | 2016-04-05,-0.642502102383 68 | 2016-04-06,0.0405872165676 69 | 2016-04-07,0.368292061037 70 | 2016-04-08,-0.697054796069 71 | 2016-04-11,-1.05186589144 72 | 2016-04-12,0.801704932265 73 | 2016-04-13,3.32762426185 74 | 2016-04-14,0.204194062652 75 | 2016-04-15,-1.77749201533 76 | 2016-04-18,1.64510111632 77 | 2016-04-19,-1.57119336071 78 | 2016-04-20,-0.761930810788 79 | 2016-04-21,0.0467044137431 80 | 2016-04-22,-1.58528869716 81 | 2016-04-25,1.43149960312 82 | 2016-04-26,1.03697204831 83 | 2016-04-27,-0.381072542429 84 | 2016-04-28,-2.54498644417 85 | 2016-04-29,1.50497240428 86 | 2016-05-02,1.23958647672 87 | 2016-05-03,0.205805018603 88 | 2016-05-04,-0.352648323503 89 | 2016-05-05,-1.49295944192 90 | 2016-05-06,-0.438053344492 91 | 2016-05-09,-1.72894520467 92 | 2016-05-10,-2.86702155506 93 | 2016-05-11,-0.97682620458 94 | 2016-05-12,-1.05221826017 95 | 2016-05-13,0.803451599015 96 | 2016-05-16,-1.02580604037 97 | 2016-05-17,-1.20737631597 98 | 2016-05-18,0.35173032931 99 | 2016-05-19,1.59529470518 100 | 2016-05-20,3.49976389872 101 | 2016-05-23,-0.608561015518 102 | 2016-05-24,1.75492332661 103 | 2016-05-25,-0.976824518213 104 | 2016-05-26,-0.762357033605 105 | 2016-05-27,0.1817742094 106 | 2016-05-30,1.22739712328 107 | 2016-05-31,0.319908865373 108 | 2016-06-01,-1.35449594912 109 | 2016-06-02,0.362131321694 110 | 2016-06-03,2.21705179903 111 | 2016-06-06,-1.30192677619 112 | 2016-06-07,0.0178854991274 113 | 2016-06-08,-1.47753502024 114 | 2016-06-09,0.388687574166 115 | 2016-06-10,-0.835237798701 116 | 2016-06-13,-1.91738079234 117 | 2016-06-14,-0.126811429755 118 | 2016-06-15,-0.374984330112 119 | 2016-06-16,-0.575500480522 120 | 2016-06-17,1.10316676581 121 | 2016-06-20,-1.03470883988 122 | 2016-06-21,-0.430671456989 123 | 2016-06-22,-1.98501677538 124 | 2016-06-23,2.23195015682 125 | 2016-06-24,-2.27978858701 126 | 2016-06-27,-0.0547230933603 127 | 2016-06-28,-0.177375253824 128 | 2016-06-29,1.38628789473 129 | 2016-06-30,-2.10896133386 130 | 2016-07-01,-0.972559018228 131 | 2016-07-04,-1.69567561208 132 | 2016-07-05,-0.64888133472 133 | 2016-07-06,-1.74750120905 134 | 2016-07-07,0.612313110879 135 | 2016-07-08,-0.21348600543 136 | 2016-07-11,-2.37354641079 137 | 2016-07-12,2.34600563094 138 | 2016-07-13,-1.04336195757 139 | 2016-07-14,0.377637838315 140 | 2016-07-15,0.0338083935778 141 | 2016-07-18,0.909632054483 142 | 2016-07-19,0.844327206461 143 | 2016-07-20,0.895187523368 144 | 2016-07-21,0.165891923536 145 | 2016-07-22,1.9916643941 146 | 2016-07-25,-1.1091146781 147 | 2016-07-26,1.24390087496 148 | 2016-07-27,1.00094166192 149 | 2016-07-28,0.680678647468 150 | 2016-07-29,-0.0293931414154 151 | 2016-08-01,0.351603827883 152 | 2016-08-02,-0.798342249125 153 | 2016-08-03,0.205663294643 154 | 2016-08-04,-2.6809759772 155 | 2016-08-05,0.534199714544 156 | 2016-08-08,0.944042246308 157 | 2016-08-09,-1.85750356162 158 | 2016-08-10,-0.290528219864 159 | 2016-08-11,-0.32905864368 160 | 2016-08-12,-0.168931678387 161 | 2016-08-15,-1.53259737711 162 | 2016-08-16,-0.616398725272 163 | 2016-08-17,-1.46964751032 164 | 2016-08-18,2.09905648113 165 | 2016-08-19,0.238560449113 166 | 2016-08-22,-0.441756620999 167 | 2016-08-23,-0.410627662791 168 | 2016-08-24,-2.05285271364 169 | 2016-08-25,-1.30495612163 170 | 2016-08-26,0.975539898453 171 | 2016-08-29,0.615123595465 172 | 2016-08-30,-1.90191501412 173 | 2016-08-31,-0.721278127477 174 | 2016-09-01,-0.207989689119 175 | 2016-09-02,0.928175954722 176 | 2016-09-05,-2.20193539771 177 | 2016-09-06,0.675082663553 178 | 2016-09-07,-1.17348291224 179 | 2016-09-08,-2.3210435542 180 | 2016-09-09,0.140702484336 181 | 2016-09-12,0.702228038194 182 | 2016-09-13,1.27181335792 183 | 2016-09-14,0.145246056696 184 | 2016-09-15,-0.585503007615 185 | 2016-09-16,-1.39574486836 186 | 2016-09-19,-0.712681905613 187 | 2016-09-20,0.592172683913 188 | 2016-09-21,0.543331757931 189 | 2016-09-22,-0.927308943571 190 | 2016-09-23,0.673275235917 191 | 2016-09-26,-1.31082534404 192 | 2016-09-27,-3.27807107304 193 | 2016-09-28,-1.61808455048 194 | 2016-09-29,-2.45734574515 195 | 2016-09-30,1.81236268769 196 | 2016-10-03,0.344615177338 197 | 2016-10-04,-1.96990593741 198 | 2016-10-05,-1.05332957456 199 | 2016-10-06,1.99902579095 200 | 2016-10-07,2.31913065504 201 | 2016-10-10,-1.71455092288 202 | 2016-10-11,1.12295599912 203 | 2016-10-12,-1.41305665793 204 | 2016-10-13,0.873445411669 205 | 2016-10-14,-0.992702158626 206 | 2016-10-17,-0.646236750223 207 | 2016-10-18,-0.542581106315 208 | 2016-10-19,2.41722229378 209 | 2016-10-20,0.512886806468 210 | 2016-10-21,3.23958416818 211 | 2016-10-24,1.51172970288 212 | 2016-10-25,-1.97088115697 213 | 2016-10-26,-0.0361537248081 214 | 2016-10-27,-1.79663107987 215 | 2016-10-28,-0.299407698529 216 | 2016-10-31,-1.88375165918 217 | 2016-11-01,1.14583539274 218 | 2016-11-02,-0.656287365929 219 | 2016-11-03,0.826878358349 220 | 2016-11-04,0.878824978593 221 | 2016-11-07,-1.55464949905 222 | 2016-11-08,0.108362171074 223 | 2016-11-09,0.7607252931 224 | 2016-11-10,-0.507196407513 225 | 2016-11-11,-0.893018454854 226 | 2016-11-14,-0.23438062666 227 | 2016-11-15,0.742226093711 228 | 2016-11-16,2.3599476867 229 | 2016-11-17,-2.67030547347 230 | 2016-11-18,0.148696655935 231 | 2016-11-21,-1.49634890187 232 | 2016-11-22,-0.257851092584 233 | 2016-11-23,1.9096369789 234 | 2016-11-24,-1.75362174434 235 | 2016-11-25,-2.03713562499 236 | 2016-11-28,-2.55586126117 237 | 2016-11-29,-0.985398500407 238 | 2016-11-30,2.73326706877 239 | 2016-12-01,0.436718057752 240 | 2016-12-02,1.62459501086 241 | 2016-12-05,1.80084477746 242 | 2016-12-06,-1.33308086694 243 | 2016-12-07,-1.79302308165 244 | 2016-12-08,2.06646014678 245 | 2016-12-09,0.174803695097 246 | 2016-12-12,-1.3798786479 247 | 2016-12-13,2.39830631055 248 | 2016-12-14,2.62229938628 249 | 2016-12-15,-1.17278693274 250 | 2016-12-16,-1.09589663123 251 | 2016-12-19,0.34849014948 252 | 2016-12-20,0.862131044321 253 | 2016-12-21,-0.928719129359 254 | 2016-12-22,-3.20040225054 255 | 2016-12-23,0.122270141027 256 | 2016-12-26,2.27022433928 257 | 2016-12-27,-3.30083634438 258 | 2016-12-28,-0.484237366838 259 | 2016-12-29,1.54666243088 260 | 2016-12-30,2.02694845146 261 | 2017-01-02,-1.13568489899 262 | 2017-01-03,-2.57018957359 263 | 2017-01-04,-0.646602296369 264 | 2017-01-05,2.34907016957 265 | 2017-01-06,-1.50553460473 266 | 2017-01-09,-1.83810500357 267 | 2017-01-10,1.28972667054 268 | 2017-01-11,-1.86512037748 269 | 2017-01-12,-0.443890229501 270 | 2017-01-13,-0.312779620076 271 | 2017-01-16,-0.995093604823 272 | 2017-01-17,1.27624134049 273 | 2017-01-18,-0.828481516298 274 | 2017-01-19,-1.48098736263 275 | 2017-01-20,0.549474843283 276 | 2017-01-23,0.260249928374 277 | 2017-01-24,0.674873372985 278 | 2017-01-25,0.619820009087 279 | 2017-01-26,-2.34383963544 280 | 2017-01-27,-2.10949881089 281 | 2017-01-30,1.96666125501 282 | 2017-01-31,-1.58649315855 283 | 2017-02-01,-0.532487258066 284 | 2017-02-02,0.971644247506 285 | 2017-02-03,0.535632107372 286 | 2017-02-06,-1.37595849837 287 | 2017-02-07,0.804908129643 288 | 2017-02-08,0.226021010764 289 | 2017-02-09,-1.92393843186 290 | 2017-02-10,1.00202586802 291 | 2017-02-13,-2.61169583121 292 | 2017-02-14,-0.354844934186 293 | 2017-02-15,-1.02494728473 294 | 2017-02-16,0.228443680958 295 | 2017-02-17,-3.43853205295 296 | 2017-02-20,0.98235484906 297 | 2017-02-21,-1.303577649 298 | 2017-02-22,0.731015644217 299 | 2017-02-23,-0.686764353276 300 | 2017-02-24,-1.10874559461 301 | 2017-02-27,-1.13311052405 302 | 2017-02-28,-0.706265342992 303 | 2017-03-01,-1.99602056214 304 | 2017-03-02,-1.77118921694 305 | 2017-03-03,-0.26399968974 306 | 2017-03-06,-3.04559895192 307 | 2017-03-07,1.50067606963 308 | 2017-03-08,0.272853172261 309 | 2017-03-09,0.553466545441 310 | 2017-03-10,-0.221014391134 311 | 2017-03-13,0.294451776784 312 | 2017-03-14,-0.526508664707 313 | 2017-03-15,-1.60134330844 314 | 2017-03-16,1.85428223205 315 | 2017-03-17,-0.0575180631839 316 | 2017-03-20,-0.804773583575 317 | 2017-03-21,0.0959239853297 318 | 2017-03-22,-0.0505395008888 319 | 2017-03-23,-0.665508142742 320 | 2017-03-24,2.18027033894 321 | 2017-03-27,1.27721523253 322 | 2017-03-28,0.0381972461105 323 | 2017-03-29,-1.52290214945 324 | 2017-03-30,0.956648485035 325 | 2017-03-31,0.951585622391 326 | 2017-04-03,-2.03368978779 327 | 2017-04-04,0.837201240864 328 | 2017-04-05,0.675320754703 329 | 2017-04-06,-1.38567147857 330 | 2017-04-07,-1.31631979878 331 | 2017-04-10,-2.1958092599 332 | 2017-04-11,0.550385238052 333 | 2017-04-12,-1.09750329041 334 | 2017-04-13,1.05577162309 335 | 2017-04-14,-1.62733919465 336 | 2017-04-17,-2.430297819 337 | 2017-04-18,-2.8584865773 338 | 2017-04-19,0.612572489773 339 | 2017-04-20,0.0780394187355 340 | 2017-04-21,1.81907008147 341 | 2017-04-24,0.533016516702 342 | 2017-04-25,1.62280310702 343 | 2017-04-26,-3.49101818025 344 | 2017-04-27,0.505912618034 345 | 2017-04-28,2.34497727936 346 | 2017-05-01,1.27982322983 347 | 2017-05-02,-3.28006352412 348 | 2017-05-03,0.558046942455 349 | 2017-05-04,-1.14088576872 350 | 2017-05-05,1.27990250842 351 | 2017-05-08,-2.6554831932 352 | 2017-05-09,0.305969120203 353 | 2017-05-10,2.36697493652 354 | 2017-05-11,0.901350548961 355 | 2017-05-12,1.47657485082 356 | 2017-05-15,-0.0249465082623 357 | 2017-05-16,-0.986723754665 358 | 2017-05-17,1.22650120974 359 | 2017-05-18,-1.26747907878 360 | 2017-05-19,0.469249912172 361 | 2017-05-22,-0.897163586484 362 | 2017-05-23,-0.201564266035 363 | 2017-05-24,-2.48901699082 364 | 2017-05-25,0.310530342949 365 | 2017-05-26,1.39993342151 366 | 2017-05-29,-1.32114985926 367 | 2017-05-30,-1.55939770421 368 | 2017-05-31,0.251878743216 369 | 2017-06-01,-0.720543762919 370 | 2017-06-02,-1.09234543399 371 | 2017-06-05,-2.31782526342 372 | 2017-06-06,1.62199773143 373 | 2017-06-07,-0.209915230395 374 | 2017-06-08,0.730383073908 375 | 2017-06-09,-1.52065275148 376 | 2017-06-12,-0.888903454012 377 | 2017-06-13,2.14437685725 378 | 2017-06-14,0.80654823367 379 | 2017-06-15,-0.0369352471997 380 | 2017-06-16,-1.52722797628 381 | 2017-06-19,-0.185615062136 382 | 2017-06-20,0.747712618986 383 | 2017-06-21,-0.382922482812 384 | 2017-06-22,-0.0824178900418 385 | 2017-06-23,1.63542459048 386 | 2017-06-26,-0.477665414151 387 | 2017-06-27,-0.726359595805 388 | 2017-06-28,-2.15638276459 389 | 2017-06-29,-0.376129645064 390 | 2017-06-30,-1.69955745668 391 | 2017-07-03,2.01065971035 392 | 2017-07-04,-0.729569532852 393 | 2017-07-05,0.625347950302 394 | 2017-07-06,0.951673860043 395 | 2017-07-07,-1.40118153706 396 | 2017-07-10,-0.80795495471 397 | 2017-07-11,0.415069440239 398 | 2017-07-12,-1.75791454491 399 | 2017-07-13,-1.00251266286 400 | 2017-07-14,-1.25462789997 401 | 2017-07-17,2.19697589072 402 | 2017-07-18,-0.448686570639 403 | 2017-07-19,1.3461216949 404 | 2017-07-20,0.471860167339 405 | 2017-07-21,-1.80069601033 406 | 2017-07-24,0.112565354251 407 | 2017-07-25,0.353891388233 408 | 2017-07-26,2.20426423196 409 | 2017-07-27,1.0142090195 410 | 2017-07-28,-0.829626091563 411 | 2017-07-31,0.000353288028221 412 | 2017-08-01,-1.42886114567 413 | 2017-08-02,-0.340757690955 414 | 2017-08-03,2.55597944625 415 | 2017-08-04,0.861145764153 416 | 2017-08-07,1.32198759659 417 | 2017-08-08,-0.0390397541084 418 | 2017-08-09,0.918851571578 419 | 2017-08-10,-1.17398999163 420 | 2017-08-11,0.781880216401 421 | 2017-08-14,-0.130218406447 422 | 2017-08-15,3.10640403635 423 | 2017-08-16,0.213238792126 424 | 2017-08-17,0.216607652142 425 | 2017-08-18,-0.716881597089 426 | 2017-08-21,-3.73674699662 427 | 2017-08-22,-1.70135071407 428 | 2017-08-23,-1.46939143935 429 | 2017-08-24,-2.04903708979 430 | 2017-08-25,-0.509864956148 431 | 2017-08-28,1.32668844699 432 | 2017-08-29,0.120516478373 433 | 2017-08-30,-0.789345873489 434 | 2017-08-31,0.193975917066 435 | 2017-09-01,-0.505107059727 436 | 2017-09-04,0.450000046009 437 | 2017-09-05,-1.11952813426 438 | 2017-09-06,-0.361841803858 439 | 2017-09-07,-1.08139691805 440 | 2017-09-08,-1.74327499448 441 | 2017-09-11,0.361855218159 442 | 2017-09-12,-0.152628361654 443 | 2017-09-13,-1.64989464856 444 | 2017-09-14,0.410757950451 445 | 2017-09-15,-0.530326700757 446 | 2017-09-18,-0.17493428176 447 | 2017-09-19,0.755092093784 448 | 2017-09-20,0.57603620811 449 | 2017-09-21,-2.39813670791 450 | 2017-09-22,2.19039229392 451 | 2017-09-25,-2.14517245505 452 | 2017-09-26,0.557856453616 453 | 2017-09-27,0.970994402874 454 | 2017-09-28,-1.7062662684 455 | 2017-09-29,2.289756245 456 | 2017-10-02,-2.21884039066 457 | 2017-10-03,-1.01688534564 458 | 2017-10-04,-0.259175509346 459 | 2017-10-05,-0.319289896615 460 | 2017-10-06,0.200042182949 461 | 2017-10-09,-0.0226113761569 462 | 2017-10-10,1.53034661666 463 | 2017-10-11,2.38475882145 464 | 2017-10-12,-0.53600982685 465 | 2017-10-13,1.83580320538 466 | 2017-10-16,1.33419812274 467 | 2017-10-17,-1.0697522211 468 | 2017-10-18,-1.1522665034 469 | 2017-10-19,0.674744963968 470 | 2017-10-20,-1.32389256982 471 | 2017-10-23,1.66367405489 472 | 2017-10-24,3.24047024041 473 | 2017-10-25,0.184048461979 474 | 2017-10-26,1.71065006077 475 | 2017-10-27,0.391009250722 476 | 2017-10-30,-0.703045138945 477 | 2017-10-31,0.990963037634 478 | 2017-11-01,0.775091407101 479 | 2017-11-02,0.0587659177434 480 | 2017-11-03,1.0674859235 481 | 2017-11-06,0.57254145092 482 | 2017-11-07,1.12671933158 483 | 2017-11-08,-0.570907316663 484 | 2017-11-09,1.58149159817 485 | 2017-11-10,1.48710113275 486 | 2017-11-13,0.310956546026 487 | 2017-11-14,1.61472697925 488 | 2017-11-15,1.70729437889 489 | 2017-11-16,-1.27034812155 490 | 2017-11-17,-0.525604960667 491 | 2017-11-20,0.214937582637 492 | 2017-11-21,0.702985855346 493 | 2017-11-22,-0.504772278 494 | 2017-11-23,0.318426777681 495 | 2017-11-24,1.0821632933 496 | 2017-11-27,0.619825773006 497 | 2017-11-28,-0.558634889801 498 | 2017-11-29,0.701991325725 499 | 2017-11-30,-0.10420659651 500 | 2017-12-01,-1.50572502032 501 | 2017-12-04,1.44843656704 502 | 2017-12-05,-0.317600794692 503 | 2017-12-06,0.429533271829 504 | 2017-12-07,-1.27730404508 505 | -------------------------------------------------------------------------------- /pyfolio/tests/test_data/test_gross_lev.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/pyfolio/tests/test_data/test_gross_lev.csv.gz -------------------------------------------------------------------------------- /pyfolio/tests/test_data/test_pos.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/pyfolio/tests/test_data/test_pos.csv.gz -------------------------------------------------------------------------------- /pyfolio/tests/test_data/test_returns.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/pyfolio/tests/test_data/test_returns.csv.gz -------------------------------------------------------------------------------- /pyfolio/tests/test_data/test_txn.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/pyfolio/4b901f6d73aa02ceb6d04b7d83502e5c6f2e81aa/pyfolio/tests/test_data/test_txn.csv.gz -------------------------------------------------------------------------------- /pyfolio/tests/test_nbs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | simple example script for running notebooks and reporting exceptions. 4 | Usage: `checkipnb.py foo.ipynb [bar.ipynb [...]]` 5 | Each cell is submitted to the kernel, and checked for errors. 6 | """ 7 | 8 | import os 9 | import glob 10 | from runipy.notebook_runner import NotebookRunner 11 | 12 | from pyfolio.utils import pyfolio_root 13 | from pyfolio.ipycompat import read as read_notebook 14 | 15 | 16 | def test_nbs(): 17 | path = os.path.join(pyfolio_root(), 'examples', '*.ipynb') 18 | for ipynb in glob.glob(path): 19 | with open(ipynb) as f: 20 | nb = read_notebook(f, 'json') 21 | nb_runner = NotebookRunner(nb) 22 | nb_runner.run_notebook(skip_exceptions=False) 23 | -------------------------------------------------------------------------------- /pyfolio/tests/test_pos.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from nose_parameterized import parameterized 3 | from collections import OrderedDict 4 | import os 5 | import gzip 6 | 7 | from pandas import ( 8 | Series, 9 | DataFrame, 10 | date_range, 11 | Timestamp, 12 | read_csv 13 | ) 14 | from pandas.util.testing import assert_frame_equal 15 | 16 | from numpy import ( 17 | arange, 18 | zeros_like, 19 | nan, 20 | ) 21 | 22 | import warnings 23 | 24 | from pyfolio.utils import (to_utc, to_series, check_intraday, 25 | detect_intraday, estimate_intraday) 26 | from pyfolio.pos import (get_percent_alloc, 27 | extract_pos, 28 | get_sector_exposures, 29 | get_max_median_position_concentration) 30 | 31 | 32 | class PositionsTestCase(TestCase): 33 | dates = date_range(start='2015-01-01', freq='D', periods=20) 34 | 35 | def test_get_percent_alloc(self): 36 | raw_data = arange(15, dtype=float).reshape(5, 3) 37 | # Make the first column negative to test absolute magnitudes. 38 | raw_data[:, 0] *= -1 39 | 40 | frame = DataFrame( 41 | raw_data, 42 | index=date_range('01-01-2015', freq='D', periods=5), 43 | columns=['A', 'B', 'C'] 44 | ) 45 | 46 | result = get_percent_alloc(frame) 47 | expected_raw = zeros_like(raw_data) 48 | for idx, row in enumerate(raw_data): 49 | expected_raw[idx] = row / row.sum() 50 | 51 | expected = DataFrame( 52 | expected_raw, 53 | index=frame.index, 54 | columns=frame.columns, 55 | ) 56 | 57 | assert_frame_equal(result, expected) 58 | 59 | def test_extract_pos(self): 60 | index_dup = [Timestamp('2015-06-08', tz='UTC'), 61 | Timestamp('2015-06-08', tz='UTC'), 62 | Timestamp('2015-06-09', tz='UTC'), 63 | Timestamp('2015-06-09', tz='UTC')] 64 | index = [Timestamp('2015-06-08', tz='UTC'), 65 | Timestamp('2015-06-09', tz='UTC')] 66 | 67 | positions = DataFrame( 68 | {'amount': [100., 200., 300., 400.], 69 | 'last_sale_price': [10., 20., 30., 40.], 70 | 'sid': [1, 2, 1, 2]}, 71 | index=index_dup 72 | ) 73 | cash = Series([100., 200.], index=index) 74 | 75 | result = extract_pos(positions, cash) 76 | 77 | expected = DataFrame(OrderedDict([ 78 | (1, [100.*10., 300.*30.]), 79 | (2, [200.*20., 400.*40.]), 80 | ('cash', [100., 200.])]), 81 | index=index 82 | ) 83 | expected.index.name = 'index' 84 | expected.columns.name = 'sid' 85 | 86 | assert_frame_equal(result, expected) 87 | 88 | @parameterized.expand([ 89 | (DataFrame([[1.0, 2.0, 3.0, 10.0]]*len(dates), 90 | columns=[0, 1, 2, 'cash'], index=dates), 91 | {0: 'A', 1: 'B', 2: 'A'}, 92 | DataFrame([[4.0, 2.0, 10.0]]*len(dates), 93 | columns=['A', 'B', 'cash'], index=dates), 94 | False), 95 | (DataFrame([[1.0, 2.0, 3.0, 10.0]]*len(dates), 96 | columns=[0, 1, 2, 'cash'], index=dates), 97 | Series(index=[0, 1, 2], data=['A', 'B', 'A']), 98 | DataFrame([[4.0, 2.0, 10.0]]*len(dates), 99 | columns=['A', 'B', 'cash'], index=dates), 100 | False), 101 | (DataFrame([[1.0, 2.0, 3.0, 10.0]]*len(dates), 102 | columns=[0, 1, 2, 'cash'], index=dates), 103 | {0: 'A', 1: 'B'}, 104 | DataFrame([[1.0, 2.0, 10.0]]*len(dates), 105 | columns=['A', 'B', 'cash'], index=dates), 106 | True) 107 | ]) 108 | def test_sector_exposure(self, positions, mapping, 109 | expected_sector_exposure, 110 | warning_expected): 111 | """ 112 | Tests sector exposure mapping and rollup. 113 | 114 | """ 115 | with warnings.catch_warnings(record=True) as w: 116 | result_sector_exposure = get_sector_exposures(positions, 117 | mapping) 118 | 119 | assert_frame_equal(result_sector_exposure, 120 | expected_sector_exposure) 121 | if warning_expected: 122 | self.assertEqual(len(w), 1) 123 | else: 124 | self.assertEqual(len(w), 0) 125 | 126 | @parameterized.expand([ 127 | (DataFrame([[1.0, 2.0, 3.0, 14.0]]*len(dates), 128 | columns=[0, 1, 2, 'cash'], index=dates), 129 | DataFrame([[0.15, 0.1, nan, nan]]*len(dates), 130 | columns=['max_long', 'median_long', 131 | 'median_short', 'max_short'], index=dates)), 132 | (DataFrame([[1.0, -2.0, -13.0, 15.0]]*len(dates), 133 | columns=[0, 1, 2, 'cash'], index=dates), 134 | DataFrame([[1.0, 1.0, -7.5, -13.0]]*len(dates), 135 | columns=['max_long', 'median_long', 136 | 'median_short', 'max_short'], index=dates)), 137 | (DataFrame([[nan, 2.0, nan, 8.0]]*len(dates), 138 | columns=[0, 1, 2, 'cash'], index=dates), 139 | DataFrame([[0.2, 0.2, nan, nan]]*len(dates), 140 | columns=['max_long', 'median_long', 141 | 'median_short', 'max_short'], index=dates)) 142 | ]) 143 | def test_max_median_exposure(self, positions, expected): 144 | alloc_summary = get_max_median_position_concentration(positions) 145 | assert_frame_equal(expected, alloc_summary) 146 | 147 | __location__ = os.path.realpath( 148 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 149 | 150 | test_returns = read_csv( 151 | gzip.open( 152 | __location__ + '/test_data/test_returns.csv.gz'), 153 | index_col=0, parse_dates=True) 154 | test_returns = to_series(to_utc(test_returns)) 155 | test_txn = to_utc(read_csv( 156 | gzip.open( 157 | __location__ + '/test_data/test_txn.csv.gz'), 158 | index_col=0, parse_dates=True)) 159 | test_pos = to_utc(read_csv( 160 | gzip.open(__location__ + '/test_data/test_pos.csv.gz'), 161 | index_col=0, parse_dates=True)) 162 | 163 | @parameterized.expand([ 164 | (test_pos, test_txn, False), 165 | (test_pos.resample('1W').last(), test_txn, True) 166 | ]) 167 | def test_detect_intraday(self, positions, transactions, expected): 168 | detected = detect_intraday(positions, transactions, threshold=0.25) 169 | assert detected == expected 170 | 171 | @parameterized.expand([ 172 | ('infer', test_returns, test_pos, test_txn, test_pos), 173 | (False, test_returns, test_pos, test_txn, test_pos) 174 | ]) 175 | def test_check_intraday(self, estimate, returns, 176 | positions, transactions, expected): 177 | detected = check_intraday(estimate, returns, positions, transactions) 178 | assert_frame_equal(detected, expected) 179 | 180 | @parameterized.expand([ 181 | (test_returns, test_pos, test_txn, (1506, 8)), 182 | (test_returns, test_pos.resample('1W').last(), test_txn, (1819, 8)) 183 | ]) 184 | def test_estimate_intraday(self, returns, positions, 185 | transactions, expected): 186 | intraday_pos = estimate_intraday(returns, positions, transactions) 187 | assert intraday_pos.shape == expected 188 | -------------------------------------------------------------------------------- /pyfolio/tests/test_round_trips.py: -------------------------------------------------------------------------------- 1 | from nose_parameterized import parameterized 2 | 3 | from unittest import TestCase 4 | 5 | from pandas import ( 6 | Series, 7 | DataFrame, 8 | DatetimeIndex, 9 | date_range, 10 | Timedelta, 11 | read_csv 12 | ) 13 | from pandas.util.testing import (assert_frame_equal) 14 | 15 | import os 16 | import gzip 17 | 18 | from pyfolio.round_trips import (extract_round_trips, 19 | add_closing_transactions, 20 | _groupby_consecutive, 21 | ) 22 | 23 | 24 | class RoundTripTestCase(TestCase): 25 | dates = date_range(start='2015-01-01', freq='D', periods=20) 26 | dates_intraday = date_range(start='2015-01-01', 27 | freq='2BH', periods=8) 28 | 29 | @parameterized.expand([ 30 | (DataFrame(data=[[2, 10., 'A'], 31 | [2, 20., 'A'], 32 | [-2, 20., 'A'], 33 | [-2, 10., 'A'], 34 | ], 35 | columns=['amount', 'price', 'symbol'], 36 | index=dates_intraday[:4]), 37 | DataFrame(data=[[4, 15., 'A'], 38 | [-4, 15., 'A'], 39 | ], 40 | columns=['amount', 'price', 'symbol'], 41 | index=dates_intraday[[0, 2]]) 42 | .rename_axis('dt', axis='index') 43 | ), 44 | (DataFrame(data=[[2, 10., 'A'], 45 | [2, 20., 'A'], 46 | [2, 20., 'A'], 47 | [2, 10., 'A'], 48 | ], 49 | columns=['amount', 'price', 'symbol'], 50 | index=dates_intraday[[0, 1, 4, 5]]), 51 | DataFrame(data=[[4, 15., 'A'], 52 | [4, 15., 'A'], 53 | ], 54 | columns=['amount', 'price', 'symbol'], 55 | index=dates_intraday[[0, 4]]) 56 | .rename_axis('dt', axis='index') 57 | ), 58 | ]) 59 | def test_groupby_consecutive(self, transactions, expected): 60 | grouped_txn = _groupby_consecutive(transactions) 61 | assert_frame_equal(grouped_txn.sort_index(axis='columns'), 62 | expected.sort_index(axis='columns')) 63 | 64 | @parameterized.expand([ 65 | # Simple round-trip 66 | (DataFrame(data=[[2, 10., 'A'], 67 | [-2, 15., 'A']], 68 | columns=['amount', 'price', 'symbol'], 69 | index=dates[:2]), 70 | DataFrame(data=[[dates[0], dates[1], 71 | Timedelta(days=1), 10., .5, 72 | True, 'A']], 73 | columns=['open_dt', 'close_dt', 74 | 'duration', 'pnl', 'rt_returns', 75 | 'long', 'symbol'], 76 | index=[0]) 77 | ), 78 | # Round-trip with left-over txn that shouldn't be counted 79 | (DataFrame(data=[[2, 10., 'A'], 80 | [2, 15., 'A'], 81 | [-9, 10., 'A']], 82 | columns=['amount', 'price', 'symbol'], 83 | index=dates[:3]), 84 | DataFrame(data=[[dates[0], dates[2], 85 | Timedelta(days=2), -10., -.2, 86 | True, 'A']], 87 | columns=['open_dt', 'close_dt', 88 | 'duration', 'pnl', 'rt_returns', 89 | 'long', 'symbol'], 90 | index=[0]) 91 | ), 92 | # Round-trip with sell that crosses 0 and should be split 93 | (DataFrame(data=[[2, 10., 'A'], 94 | [-4, 15., 'A'], 95 | [3, 20., 'A']], 96 | columns=['amount', 'price', 'symbol'], 97 | index=dates[:3]), 98 | DataFrame(data=[[dates[0], dates[1], 99 | Timedelta(days=1), 10., .5, 100 | True, 'A'], 101 | [dates[1], dates[2], 102 | Timedelta(days=1), 103 | -10, (-1. / 3), 104 | False, 'A']], 105 | columns=['open_dt', 'close_dt', 106 | 'duration', 'pnl', 'rt_returns', 107 | 'long', 'symbol'], 108 | index=[0, 1]) 109 | ), 110 | # Round-trip that does not cross 0 111 | (DataFrame(data=[[4, 10., 'A'], 112 | [-2, 15., 'A'], 113 | [2, 20., 'A']], 114 | columns=['amount', 'price', 'symbol'], 115 | index=dates[:3]), 116 | DataFrame(data=[[dates[0], dates[1], 117 | Timedelta(days=1), 10., .5, 118 | True, 'A']], 119 | columns=['open_dt', 'close_dt', 120 | 'duration', 'pnl', 'rt_returns', 121 | 'long', 'symbol'], 122 | index=[0]) 123 | ), 124 | # Round-trip that does not cross 0 and has portfolio value 125 | (DataFrame(data=[[4, 10., 'A'], 126 | [-2, 15., 'A'], 127 | [2, 20., 'A']], 128 | columns=['amount', 'price', 'symbol'], 129 | index=dates[:3]), 130 | DataFrame(data=[[dates[0], dates[1], 131 | Timedelta(days=1), 10., .5, 132 | True, 'A', 0.1]], 133 | columns=['open_dt', 'close_dt', 134 | 'duration', 'pnl', 'rt_returns', 135 | 'long', 'symbol', 'returns'], 136 | index=[0]), 137 | Series([100., 100., 100.], index=dates[:3]), 138 | ), 139 | 140 | ]) 141 | def test_extract_round_trips(self, transactions, expected, 142 | portfolio_value=None): 143 | round_trips = extract_round_trips(transactions, 144 | portfolio_value=portfolio_value) 145 | 146 | assert_frame_equal(round_trips.sort_index(axis='columns'), 147 | expected.sort_index(axis='columns')) 148 | 149 | def test_add_closing_trades(self): 150 | dates = date_range(start='2015-01-01', periods=20) 151 | transactions = DataFrame(data=[[2, 10, 'A'], 152 | [-5, 10, 'A'], 153 | [-1, 10, 'B']], 154 | columns=['amount', 'price', 'symbol'], 155 | index=dates[:3]) 156 | positions = DataFrame(data=[[20, 10, 0], 157 | [-30, 10, 30], 158 | [-60, 0, 30]], 159 | columns=['A', 'B', 'cash'], 160 | index=dates[:3]) 161 | 162 | expected_ix = dates[:3].append(DatetimeIndex([dates[2] + 163 | Timedelta(seconds=1)])) 164 | expected = DataFrame(data=[[2, 10, 'A'], 165 | [-5, 10, 'A'], 166 | [-1, 10., 'B'], 167 | [3, 20., 'A']], 168 | columns=['amount', 'price', 'symbol'], 169 | index=expected_ix) 170 | 171 | transactions_closed = add_closing_transactions(positions, transactions) 172 | assert_frame_equal(transactions_closed, expected) 173 | 174 | def test_txn_pnl_matches_round_trip_pnl(self): 175 | __location__ = os.path.realpath( 176 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 177 | 178 | test_txn = read_csv(gzip.open( 179 | __location__ + '/test_data/test_txn.csv.gz'), 180 | index_col=0, parse_dates=True) 181 | test_pos = read_csv(gzip.open( 182 | __location__ + '/test_data/test_pos.csv.gz'), 183 | index_col=0, parse_dates=True) 184 | 185 | transactions_closed = add_closing_transactions(test_pos, test_txn) 186 | transactions_closed['txn_dollars'] = transactions_closed.amount * \ 187 | -1. * transactions_closed.price 188 | round_trips = extract_round_trips(transactions_closed) 189 | 190 | self.assertAlmostEqual(round_trips.pnl.sum(), 191 | transactions_closed.txn_dollars.sum()) 192 | -------------------------------------------------------------------------------- /pyfolio/tests/test_tears.py: -------------------------------------------------------------------------------- 1 | from matplotlib.testing.decorators import cleanup 2 | 3 | from unittest import TestCase 4 | from nose_parameterized import parameterized 5 | 6 | import os 7 | import gzip 8 | 9 | from pandas import read_csv 10 | 11 | from pyfolio.utils import (to_utc, to_series) 12 | from pyfolio.tears import (create_full_tear_sheet, 13 | create_simple_tear_sheet, 14 | create_returns_tear_sheet, 15 | create_position_tear_sheet, 16 | create_txn_tear_sheet, 17 | create_round_trip_tear_sheet, 18 | create_interesting_times_tear_sheet,) 19 | 20 | 21 | class PositionsTestCase(TestCase): 22 | __location__ = os.path.realpath( 23 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 24 | 25 | test_returns = read_csv( 26 | gzip.open( 27 | __location__ + '/test_data/test_returns.csv.gz'), 28 | index_col=0, parse_dates=True) 29 | test_returns = to_series(to_utc(test_returns)) 30 | test_txn = to_utc(read_csv( 31 | gzip.open( 32 | __location__ + '/test_data/test_txn.csv.gz'), 33 | index_col=0, parse_dates=True)) 34 | test_pos = to_utc(read_csv( 35 | gzip.open(__location__ + '/test_data/test_pos.csv.gz'), 36 | index_col=0, parse_dates=True)) 37 | 38 | @parameterized.expand([({},), 39 | ({'slippage': 1},), 40 | ({'live_start_date': test_returns.index[-20]},), 41 | ({'round_trips': True},), 42 | ({'hide_positions': True},), 43 | ({'cone_std': 1},), 44 | ({'bootstrap': True},), 45 | ]) 46 | @cleanup 47 | def test_create_full_tear_sheet_breakdown(self, kwargs): 48 | create_full_tear_sheet(self.test_returns, 49 | positions=self.test_pos, 50 | transactions=self.test_txn, 51 | benchmark_rets=self.test_returns, 52 | **kwargs 53 | ) 54 | 55 | @parameterized.expand([({},), 56 | ({'slippage': 1},), 57 | ({'live_start_date': test_returns.index[-20]},), 58 | ]) 59 | @cleanup 60 | def test_create_simple_tear_sheet_breakdown(self, kwargs): 61 | create_simple_tear_sheet(self.test_returns, 62 | positions=self.test_pos, 63 | transactions=self.test_txn, 64 | **kwargs 65 | ) 66 | 67 | @parameterized.expand([({},), 68 | ({'live_start_date': 69 | test_returns.index[-20]},), 70 | ({'cone_std': 1},), 71 | ({'bootstrap': True},), 72 | ]) 73 | @cleanup 74 | def test_create_returns_tear_sheet_breakdown(self, kwargs): 75 | create_returns_tear_sheet(self.test_returns, 76 | benchmark_rets=self.test_returns, 77 | **kwargs 78 | ) 79 | 80 | @parameterized.expand([({},), 81 | ({'hide_positions': True},), 82 | ({'show_and_plot_top_pos': 0},), 83 | ({'show_and_plot_top_pos': 1},), 84 | ]) 85 | @cleanup 86 | def test_create_position_tear_sheet_breakdown(self, kwargs): 87 | create_position_tear_sheet(self.test_returns, 88 | self.test_pos, 89 | **kwargs 90 | ) 91 | 92 | @parameterized.expand([({},), 93 | ({'unadjusted_returns': test_returns},), 94 | ]) 95 | @cleanup 96 | def test_create_txn_tear_sheet_breakdown(self, kwargs): 97 | create_txn_tear_sheet(self.test_returns, 98 | self.test_pos, 99 | self.test_txn, 100 | **kwargs 101 | ) 102 | 103 | @parameterized.expand([({},), 104 | ({'sector_mappings': {}},), 105 | ]) 106 | @cleanup 107 | def test_create_round_trip_tear_sheet_breakdown(self, kwargs): 108 | create_round_trip_tear_sheet(self.test_returns, 109 | self.test_pos, 110 | self.test_txn, 111 | **kwargs 112 | ) 113 | 114 | @parameterized.expand([({},), 115 | ({'legend_loc': 1},), 116 | ]) 117 | @cleanup 118 | def test_create_interesting_times_tear_sheet_breakdown(self, 119 | kwargs): 120 | create_interesting_times_tear_sheet(self.test_returns, 121 | self.test_returns, 122 | **kwargs 123 | ) 124 | -------------------------------------------------------------------------------- /pyfolio/tests/test_timeseries.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import os 4 | from unittest import TestCase 5 | from nose_parameterized import parameterized 6 | from numpy.testing import assert_allclose, assert_almost_equal 7 | from pandas.util.testing import assert_series_equal 8 | 9 | import numpy as np 10 | import pandas as pd 11 | 12 | from .. import timeseries 13 | from pyfolio.utils import to_utc, to_series 14 | import gzip 15 | 16 | 17 | DECIMAL_PLACES = 8 18 | 19 | 20 | class TestDrawdown(TestCase): 21 | drawdown_list = np.array( 22 | [100, 90, 75] 23 | ) / 10. 24 | dt = pd.date_range('2000-1-3', periods=3, freq='D') 25 | 26 | drawdown_serie = pd.Series(drawdown_list, index=dt) 27 | 28 | @parameterized.expand([ 29 | (drawdown_serie,) 30 | ]) 31 | def test_get_max_drawdown_begins_first_day(self, px): 32 | rets = px.pct_change() 33 | drawdowns = timeseries.gen_drawdown_table(rets, top=1) 34 | self.assertEqual(drawdowns.loc[0, 'Net drawdown in %'], 25) 35 | 36 | drawdown_list = np.array( 37 | [100, 110, 120, 150, 180, 200, 100, 120, 38 | 160, 180, 200, 300, 400, 500, 600, 800, 39 | 900, 1000, 650, 600] 40 | ) / 10. 41 | dt = pd.date_range('2000-1-3', periods=20, freq='D') 42 | 43 | drawdown_serie = pd.Series(drawdown_list, index=dt) 44 | 45 | @parameterized.expand([ 46 | (drawdown_serie, 47 | pd.Timestamp('2000-01-08'), 48 | pd.Timestamp('2000-01-09'), 49 | pd.Timestamp('2000-01-13'), 50 | 50, 51 | pd.Timestamp('2000-01-20'), 52 | pd.Timestamp('2000-01-22'), 53 | None, 54 | 40 55 | ) 56 | ]) 57 | def test_gen_drawdown_table_relative( 58 | self, px, 59 | first_expected_peak, first_expected_valley, 60 | first_expected_recovery, first_net_drawdown, 61 | second_expected_peak, second_expected_valley, 62 | second_expected_recovery, second_net_drawdown 63 | ): 64 | 65 | rets = px.pct_change() 66 | 67 | drawdowns = timeseries.gen_drawdown_table(rets, top=2) 68 | 69 | self.assertEqual(np.round(drawdowns.loc[0, 'Net drawdown in %']), 70 | first_net_drawdown) 71 | self.assertEqual(drawdowns.loc[0, 'Peak date'], 72 | first_expected_peak) 73 | self.assertEqual(drawdowns.loc[0, 'Valley date'], 74 | first_expected_valley) 75 | self.assertEqual(drawdowns.loc[0, 'Recovery date'], 76 | first_expected_recovery) 77 | 78 | self.assertEqual(np.round(drawdowns.loc[1, 'Net drawdown in %']), 79 | second_net_drawdown) 80 | self.assertEqual(drawdowns.loc[1, 'Peak date'], 81 | second_expected_peak) 82 | self.assertEqual(drawdowns.loc[1, 'Valley date'], 83 | second_expected_valley) 84 | self.assertTrue(pd.isnull(drawdowns.loc[1, 'Recovery date'])) 85 | 86 | px_list_1 = np.array( 87 | [100, 120, 100, 80, 70, 110, 180, 150]) / 100. # Simple 88 | px_list_2 = np.array( 89 | [100, 120, 100, 80, 70, 80, 90, 90]) / 100. # Ends in drawdown 90 | dt = pd.date_range('2000-1-3', periods=8, freq='D') 91 | 92 | @parameterized.expand([ 93 | (pd.Series(px_list_1, 94 | index=dt), 95 | pd.Timestamp('2000-1-4'), 96 | pd.Timestamp('2000-1-7'), 97 | pd.Timestamp('2000-1-9')), 98 | (pd.Series(px_list_2, 99 | index=dt), 100 | pd.Timestamp('2000-1-4'), 101 | pd.Timestamp('2000-1-7'), 102 | None) 103 | ]) 104 | def test_get_max_drawdown( 105 | self, px, expected_peak, expected_valley, expected_recovery): 106 | rets = px.pct_change().iloc[1:] 107 | 108 | peak, valley, recovery = timeseries.get_max_drawdown(rets) 109 | # Need to use isnull because the result can be NaN, NaT, etc. 110 | self.assertTrue( 111 | pd.isnull(peak)) if expected_peak is None else self.assertEqual( 112 | peak, 113 | expected_peak) 114 | self.assertTrue( 115 | pd.isnull(valley)) if expected_valley is None else \ 116 | self.assertEqual( 117 | valley, 118 | expected_valley) 119 | self.assertTrue( 120 | pd.isnull(recovery)) if expected_recovery is None else \ 121 | self.assertEqual( 122 | recovery, 123 | expected_recovery) 124 | 125 | @parameterized.expand([ 126 | (pd.Series(px_list_2, 127 | index=dt), 128 | pd.Timestamp('2000-1-4'), 129 | pd.Timestamp('2000-1-7'), 130 | None, 131 | None), 132 | (pd.Series(px_list_1, 133 | index=dt), 134 | pd.Timestamp('2000-1-4'), 135 | pd.Timestamp('2000-1-7'), 136 | pd.Timestamp('2000-1-9'), 137 | 4) 138 | ]) 139 | def test_gen_drawdown_table(self, px, expected_peak, 140 | expected_valley, expected_recovery, 141 | expected_duration): 142 | rets = px.pct_change().iloc[1:] 143 | 144 | drawdowns = timeseries.gen_drawdown_table(rets, top=1) 145 | self.assertTrue( 146 | pd.isnull( 147 | drawdowns.loc[ 148 | 0, 149 | 'Peak date'])) if expected_peak is None \ 150 | else self.assertEqual(drawdowns.loc[0, 'Peak date'], 151 | expected_peak) 152 | self.assertTrue( 153 | pd.isnull( 154 | drawdowns.loc[0, 'Valley date'])) \ 155 | if expected_valley is None else self.assertEqual( 156 | drawdowns.loc[0, 'Valley date'], 157 | expected_valley) 158 | self.assertTrue( 159 | pd.isnull( 160 | drawdowns.loc[0, 'Recovery date'])) \ 161 | if expected_recovery is None else self.assertEqual( 162 | drawdowns.loc[0, 'Recovery date'], 163 | expected_recovery) 164 | self.assertTrue( 165 | pd.isnull(drawdowns.loc[0, 'Duration'])) \ 166 | if expected_duration is None else self.assertEqual( 167 | drawdowns.loc[0, 'Duration'], expected_duration) 168 | 169 | def test_drawdown_overlaps(self): 170 | rand = np.random.RandomState(1337) 171 | n_samples = 252 * 5 172 | spy_returns = pd.Series( 173 | rand.standard_t(3.1, n_samples), 174 | pd.date_range('2005-01-02', periods=n_samples), 175 | ) 176 | spy_drawdowns = timeseries.gen_drawdown_table( 177 | spy_returns, 178 | top=20).sort_values(by='Peak date') 179 | # Compare the recovery date of each drawdown with the peak of the next 180 | # Last pair might contain a NaT if drawdown didn't finish, so ignore it 181 | pairs = list(zip(spy_drawdowns['Recovery date'], 182 | spy_drawdowns['Peak date'].shift(-1)))[:-1] 183 | self.assertGreater(len(pairs), 0) 184 | for recovery, peak in pairs: 185 | if not pd.isnull(recovery): 186 | self.assertLessEqual(recovery, peak) 187 | 188 | @parameterized.expand([ 189 | (pd.Series(px_list_1, 190 | index=dt), 191 | 1, 192 | [(pd.Timestamp('2000-01-03 00:00:00'), 193 | pd.Timestamp('2000-01-03 00:00:00'), 194 | pd.Timestamp('2000-01-03 00:00:00'))]) 195 | ]) 196 | def test_top_drawdowns(self, returns, top, expected): 197 | self.assertEqual( 198 | timeseries.get_top_drawdowns( 199 | returns, 200 | top=top), 201 | expected) 202 | 203 | 204 | class TestVariance(TestCase): 205 | 206 | @parameterized.expand([ 207 | (1e7, 0.5, 1, 1, -10000000.0) 208 | ]) 209 | def test_var_cov_var_normal(self, P, c, mu, sigma, expected): 210 | self.assertEqual( 211 | timeseries.var_cov_var_normal( 212 | P, 213 | c, 214 | mu, 215 | sigma), 216 | expected) 217 | 218 | 219 | class TestNormalize(TestCase): 220 | dt = pd.date_range('2000-1-3', periods=8, freq='D') 221 | px_list = [1.0, 1.2, 1.0, 0.8, 0.7, 0.8, 0.8, 0.8] 222 | 223 | @parameterized.expand([ 224 | (pd.Series(np.array(px_list) * 100, index=dt), 225 | pd.Series(px_list, index=dt)) 226 | ]) 227 | def test_normalize(self, returns, expected): 228 | self.assertTrue(timeseries.normalize(returns).equals(expected)) 229 | 230 | 231 | class TestStats(TestCase): 232 | simple_rets = pd.Series( 233 | [0.1] * 3 + [0] * 497, 234 | pd.date_range( 235 | '2000-1-3', 236 | periods=500, 237 | freq='D')) 238 | 239 | simple_week_rets = pd.Series( 240 | [0.1] * 3 + [0] * 497, 241 | pd.date_range( 242 | '2000-1-31', 243 | periods=500, 244 | freq='W')) 245 | 246 | simple_month_rets = pd.Series( 247 | [0.1] * 3 + [0] * 497, 248 | pd.date_range( 249 | '2000-1-31', 250 | periods=500, 251 | freq='M')) 252 | 253 | simple_benchmark = pd.Series( 254 | [0.03] * 4 + [0] * 496, 255 | pd.date_range( 256 | '2000-1-1', 257 | periods=500, 258 | freq='D')) 259 | px_list = np.array( 260 | [10, -10, 10]) / 100. # Ends in drawdown 261 | dt = pd.date_range('2000-1-3', periods=3, freq='D') 262 | 263 | px_list_2 = [1.0, 1.2, 1.0, 0.8, 0.7, 0.8, 0.8, 0.8] 264 | dt_2 = pd.date_range('2000-1-3', periods=8, freq='D') 265 | 266 | @parameterized.expand([ 267 | (simple_rets[:5], 2, [np.nan, np.inf, np.inf, 11.224972160321, np.inf]) 268 | ]) 269 | def test_sharpe_2(self, returns, rolling_sharpe_window, expected): 270 | np.testing.assert_array_almost_equal( 271 | timeseries.rolling_sharpe(returns, 272 | rolling_sharpe_window).values, 273 | np.asarray(expected)) 274 | 275 | @parameterized.expand([ 276 | (simple_rets[:5], simple_benchmark, 2, 0) 277 | ]) 278 | def test_beta(self, returns, benchmark_rets, rolling_window, expected): 279 | actual = timeseries.rolling_beta( 280 | returns, 281 | benchmark_rets, 282 | rolling_window=rolling_window, 283 | ).values.tolist()[2] 284 | 285 | np.testing.assert_almost_equal(actual, expected) 286 | 287 | 288 | class TestCone(TestCase): 289 | def test_bootstrap_cone_against_linear_cone_normal_returns(self): 290 | random_seed = 100 291 | np.random.seed(random_seed) 292 | days_forward = 200 293 | cone_stdevs = (1., 1.5, 2.) 294 | mu = .005 295 | sigma = .002 296 | rets = pd.Series(np.random.normal(mu, sigma, 10000)) 297 | 298 | midline = np.cumprod(1 + (rets.mean() * np.ones(days_forward))) 299 | stdev = rets.std() * midline * np.sqrt(np.arange(days_forward)+1) 300 | 301 | normal_cone = pd.DataFrame(columns=pd.Float64Index([])) 302 | for s in cone_stdevs: 303 | normal_cone[s] = midline + s * stdev 304 | normal_cone[-s] = midline - s * stdev 305 | 306 | bootstrap_cone = timeseries.forecast_cone_bootstrap( 307 | rets, days_forward, cone_stdevs, starting_value=1, 308 | random_seed=random_seed, num_samples=10000) 309 | 310 | for col, vals in bootstrap_cone.iteritems(): 311 | expected = normal_cone[col].values 312 | assert_allclose(vals.values, expected, rtol=.005) 313 | 314 | 315 | class TestBootstrap(TestCase): 316 | @parameterized.expand([ 317 | (0., 1., 1000), 318 | (1., 2., 500), 319 | (-1., 0.1, 10), 320 | ]) 321 | def test_calc_bootstrap(self, true_mean, true_sd, n): 322 | """Compare bootstrap distribution of the mean to sampling distribution 323 | of the mean. 324 | 325 | """ 326 | np.random.seed(123) 327 | func = np.mean 328 | returns = pd.Series((np.random.randn(n) * true_sd) + 329 | true_mean) 330 | 331 | samples = timeseries.calc_bootstrap(func, returns, 332 | n_samples=10000) 333 | 334 | # Calculate statistics of sampling distribution of the mean 335 | mean_of_mean = np.mean(returns) 336 | sd_of_mean = np.std(returns) / np.sqrt(n) 337 | 338 | assert_almost_equal( 339 | np.mean(samples), 340 | mean_of_mean, 341 | 3, 342 | 'Mean of bootstrap does not match theoretical mean of' 343 | 'sampling distribution') 344 | 345 | assert_almost_equal( 346 | np.std(samples), 347 | sd_of_mean, 348 | 3, 349 | 'SD of bootstrap does not match theoretical SD of' 350 | 'sampling distribution') 351 | 352 | 353 | class TestGrossLev(TestCase): 354 | __location__ = os.path.realpath( 355 | os.path.join(os.getcwd(), os.path.dirname(__file__))) 356 | 357 | test_pos = to_utc(pd.read_csv( 358 | gzip.open(__location__ + '/test_data/test_pos.csv.gz'), 359 | index_col=0, parse_dates=True)) 360 | test_gross_lev = pd.read_csv( 361 | gzip.open( 362 | __location__ + '/test_data/test_gross_lev.csv.gz'), 363 | index_col=0, parse_dates=True) 364 | test_gross_lev = to_series(to_utc(test_gross_lev)) 365 | 366 | def test_gross_lev_calculation(self): 367 | assert_series_equal( 368 | timeseries.gross_lev(self.test_pos)['2004-02-01':], 369 | self.test_gross_lev['2004-02-01':], check_names=False) 370 | -------------------------------------------------------------------------------- /pyfolio/tests/test_txn.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from pandas import ( 4 | Series, 5 | DataFrame, 6 | date_range 7 | ) 8 | from pandas.util.testing import (assert_series_equal) 9 | 10 | from pyfolio.txn import (get_turnover, 11 | adjust_returns_for_slippage) 12 | 13 | 14 | class TransactionsTestCase(TestCase): 15 | 16 | def test_get_turnover(self): 17 | """ 18 | Tests turnover using a 20 day period. 19 | 20 | With no transactions, the turnover should be 0. 21 | 22 | with 200% of the AGB traded each day, the daily 23 | turnover rate should be 2.0. 24 | """ 25 | dates = date_range(start='2015-01-01', freq='D', periods=20) 26 | 27 | # In this test, there is one sid (0) and a cash column 28 | positions = DataFrame([[10.0, 10.0]]*len(dates), 29 | columns=[0, 'cash'], index=dates) 30 | 31 | # Set every other non-cash position to 40 32 | positions[0][::2] = 40 33 | 34 | transactions = DataFrame(data=[], 35 | columns=['sid', 'amount', 'price', 'symbol'], 36 | index=dates) 37 | 38 | # Test with no transactions 39 | expected = Series([0.0]*len(dates), index=dates) 40 | result = get_turnover(positions, transactions) 41 | assert_series_equal(result, expected) 42 | 43 | transactions = DataFrame(data=[[1, 1, 10, 0]]*len(dates) + 44 | [[2, -1, 10, 0]]*len(dates), 45 | columns=['sid', 'amount', 'price', 'symbol'], 46 | index=dates.append(dates)).sort_index() 47 | 48 | # Turnover is more on day 1, because the day 0 AGB is set to zero 49 | # in get_turnover. On most days, we get 0.8 because we have 20 50 | # transacted and mean(10, 40) = 25, so 20/25. 51 | expected = Series([1.0] + [0.8] * (len(dates) - 1), index=dates) 52 | result = get_turnover(positions, transactions) 53 | 54 | assert_series_equal(result, expected) 55 | 56 | # Test with denominator = 'portfolio_value' 57 | result = get_turnover(positions, transactions, 58 | denominator='portfolio_value') 59 | 60 | # Our portfolio value alternates between $20 and $50 so turnover 61 | # should alternate between 20/20 = 1.0 and 20/50 = 0.4. 62 | expected = Series([0.4, 1.0] * (int((len(dates) - 1) / 2) + 1), 63 | index=dates) 64 | 65 | assert_series_equal(result, expected) 66 | 67 | def test_adjust_returns_for_slippage(self): 68 | dates = date_range(start='2015-01-01', freq='D', periods=20) 69 | 70 | positions = DataFrame([[0.0, 10.0]]*len(dates), 71 | columns=[0, 'cash'], index=dates) 72 | 73 | # 100% total, 50% average daily turnover 74 | transactions = DataFrame(data=[[1, 1, 10, 'A']]*len(dates), 75 | columns=['sid', 'amount', 'price', 'symbol'], 76 | index=dates) 77 | 78 | returns = Series([0.05]*len(dates), index=dates) 79 | # 0.001% slippage per dollar traded 80 | slippage_bps = 10 81 | expected = Series([0.049]*len(dates), index=dates) 82 | 83 | result = adjust_returns_for_slippage(returns, positions, 84 | transactions, slippage_bps) 85 | 86 | assert_series_equal(result, expected) 87 | -------------------------------------------------------------------------------- /pyfolio/txn.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from __future__ import division 16 | 17 | import pandas as pd 18 | 19 | 20 | def map_transaction(txn): 21 | """ 22 | Maps a single transaction row to a dictionary. 23 | 24 | Parameters 25 | ---------- 26 | txn : pd.DataFrame 27 | A single transaction object to convert to a dictionary. 28 | 29 | Returns 30 | ------- 31 | dict 32 | Mapped transaction. 33 | """ 34 | 35 | if isinstance(txn['sid'], dict): 36 | sid = txn['sid']['sid'] 37 | symbol = txn['sid']['symbol'] 38 | else: 39 | sid = txn['sid'] 40 | symbol = txn['sid'] 41 | 42 | return {'sid': sid, 43 | 'symbol': symbol, 44 | 'price': txn['price'], 45 | 'order_id': txn['order_id'], 46 | 'amount': txn['amount'], 47 | 'commission': txn['commission'], 48 | 'dt': txn['dt']} 49 | 50 | 51 | def make_transaction_frame(transactions): 52 | """ 53 | Formats a transaction DataFrame. 54 | 55 | Parameters 56 | ---------- 57 | transactions : pd.DataFrame 58 | Contains improperly formatted transactional data. 59 | 60 | Returns 61 | ------- 62 | df : pd.DataFrame 63 | Daily transaction volume and dollar ammount. 64 | - See full explanation in tears.create_full_tear_sheet. 65 | """ 66 | 67 | transaction_list = [] 68 | for dt in transactions.index: 69 | txns = transactions.loc[dt] 70 | if len(txns) == 0: 71 | continue 72 | 73 | for txn in txns: 74 | txn = map_transaction(txn) 75 | transaction_list.append(txn) 76 | df = pd.DataFrame(sorted(transaction_list, key=lambda x: x['dt'])) 77 | df['txn_dollars'] = -df['amount'] * df['price'] 78 | 79 | df.index = list(map(pd.Timestamp, df.dt.values)) 80 | return df 81 | 82 | 83 | def get_txn_vol(transactions): 84 | """ 85 | Extract daily transaction data from set of transaction objects. 86 | 87 | Parameters 88 | ---------- 89 | transactions : pd.DataFrame 90 | Time series containing one row per symbol (and potentially 91 | duplicate datetime indices) and columns for amount and 92 | price. 93 | 94 | Returns 95 | ------- 96 | pd.DataFrame 97 | Daily transaction volume and number of shares. 98 | - See full explanation in tears.create_full_tear_sheet. 99 | """ 100 | 101 | txn_norm = transactions.copy() 102 | txn_norm.index = txn_norm.index.normalize() 103 | amounts = txn_norm.amount.abs() 104 | prices = txn_norm.price 105 | values = amounts * prices 106 | daily_amounts = amounts.groupby(amounts.index).sum() 107 | daily_values = values.groupby(values.index).sum() 108 | daily_amounts.name = "txn_shares" 109 | daily_values.name = "txn_volume" 110 | return pd.concat([daily_values, daily_amounts], axis=1) 111 | 112 | 113 | def adjust_returns_for_slippage(returns, positions, transactions, 114 | slippage_bps): 115 | """ 116 | Apply a slippage penalty for every dollar traded. 117 | 118 | Parameters 119 | ---------- 120 | returns : pd.Series 121 | Daily returns of the strategy, noncumulative. 122 | - See full explanation in create_full_tear_sheet. 123 | positions : pd.DataFrame 124 | Daily net position values. 125 | - See full explanation in create_full_tear_sheet. 126 | transactions : pd.DataFrame 127 | Prices and amounts of executed trades. One row per trade. 128 | - See full explanation in create_full_tear_sheet. 129 | slippage_bps: int/float 130 | Basis points of slippage to apply. 131 | 132 | Returns 133 | ------- 134 | pd.Series 135 | Time series of daily returns, adjusted for slippage. 136 | """ 137 | 138 | slippage = 0.0001 * slippage_bps 139 | portfolio_value = positions.sum(axis=1) 140 | pnl = portfolio_value * returns 141 | traded_value = get_txn_vol(transactions).txn_volume 142 | slippage_dollars = traded_value * slippage 143 | adjusted_pnl = pnl.add(-slippage_dollars, fill_value=0) 144 | adjusted_returns = returns * adjusted_pnl / pnl 145 | 146 | return adjusted_returns 147 | 148 | 149 | def get_turnover(positions, transactions, denominator='AGB'): 150 | """ 151 | - Value of purchases and sales divided 152 | by either the actual gross book or the portfolio value 153 | for the time step. 154 | 155 | Parameters 156 | ---------- 157 | positions : pd.DataFrame 158 | Contains daily position values including cash. 159 | - See full explanation in tears.create_full_tear_sheet 160 | transactions : pd.DataFrame 161 | Prices and amounts of executed trades. One row per trade. 162 | - See full explanation in tears.create_full_tear_sheet 163 | denominator : str, optional 164 | Either 'AGB' or 'portfolio_value', default AGB. 165 | - AGB (Actual gross book) is the gross market 166 | value (GMV) of the specific algo being analyzed. 167 | Swapping out an entire portfolio of stocks for 168 | another will yield 200% turnover, not 100%, since 169 | transactions are being made for both sides. 170 | - We use average of the previous and the current end-of-period 171 | AGB to avoid singularities when trading only into or 172 | out of an entire book in one trading period. 173 | - portfolio_value is the total value of the algo's 174 | positions end-of-period, including cash. 175 | 176 | Returns 177 | ------- 178 | turnover_rate : pd.Series 179 | timeseries of portfolio turnover rates. 180 | """ 181 | 182 | txn_vol = get_txn_vol(transactions) 183 | traded_value = txn_vol.txn_volume 184 | 185 | if denominator == 'AGB': 186 | # Actual gross book is the same thing as the algo's GMV 187 | # We want our denom to be avg(AGB previous, AGB current) 188 | AGB = positions.drop('cash', axis=1).abs().sum(axis=1) 189 | denom = AGB.rolling(2).mean() 190 | 191 | # Since the first value of pd.rolling returns NaN, we 192 | # set our "day 0" AGB to 0. 193 | denom.iloc[0] = AGB.iloc[0] / 2 194 | elif denominator == 'portfolio_value': 195 | denom = positions.sum(axis=1) 196 | else: 197 | raise ValueError( 198 | "Unexpected value for denominator '{}'. The " 199 | "denominator parameter must be either 'AGB'" 200 | " or 'portfolio_value'.".format(denominator) 201 | ) 202 | 203 | denom.index = denom.index.normalize() 204 | turnover = traded_value.div(denom, axis='index') 205 | turnover = turnover.fillna(0) 206 | return turnover 207 | -------------------------------------------------------------------------------- /pyfolio/utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2018 Quantopian, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from __future__ import division 17 | 18 | import warnings 19 | 20 | from itertools import cycle 21 | from matplotlib.pyplot import cm 22 | import numpy as np 23 | import pandas as pd 24 | from IPython.display import display, HTML 25 | 26 | import empyrical.utils 27 | 28 | from . import pos 29 | from . import txn 30 | 31 | APPROX_BDAYS_PER_MONTH = 21 32 | APPROX_BDAYS_PER_YEAR = 252 33 | 34 | MONTHS_PER_YEAR = 12 35 | WEEKS_PER_YEAR = 52 36 | 37 | MM_DISPLAY_UNIT = 1000000. 38 | 39 | DAILY = 'daily' 40 | WEEKLY = 'weekly' 41 | MONTHLY = 'monthly' 42 | YEARLY = 'yearly' 43 | 44 | ANNUALIZATION_FACTORS = { 45 | DAILY: APPROX_BDAYS_PER_YEAR, 46 | WEEKLY: WEEKS_PER_YEAR, 47 | MONTHLY: MONTHS_PER_YEAR 48 | } 49 | 50 | COLORMAP = 'Paired' 51 | COLORS = ['#e6194b', '#3cb44b', '#ffe119', '#0082c8', '#f58231', 52 | '#911eb4', '#46f0f0', '#f032e6', '#d2f53c', '#fabebe', 53 | '#008080', '#e6beff', '#aa6e28', '#800000', '#aaffc3', 54 | '#808000', '#ffd8b1', '#000080', '#808080'] 55 | 56 | 57 | def one_dec_places(x, pos): 58 | """ 59 | Adds 1/10th decimal to plot ticks. 60 | """ 61 | 62 | return '%.1f' % x 63 | 64 | 65 | def two_dec_places(x, pos): 66 | """ 67 | Adds 1/100th decimal to plot ticks. 68 | """ 69 | 70 | return '%.2f' % x 71 | 72 | 73 | def percentage(x, pos): 74 | """ 75 | Adds percentage sign to plot ticks. 76 | """ 77 | 78 | return '%.0f%%' % x 79 | 80 | 81 | def format_asset(asset): 82 | """ 83 | If zipline asset objects are used, we want to print them out prettily 84 | within the tear sheet. This function should only be applied directly 85 | before displaying. 86 | """ 87 | 88 | try: 89 | import zipline.assets 90 | except ImportError: 91 | return asset 92 | 93 | if isinstance(asset, zipline.assets.Asset): 94 | return asset.symbol 95 | else: 96 | return asset 97 | 98 | 99 | def vectorize(func): 100 | """ 101 | Decorator so that functions can be written to work on Series but 102 | may still be called with DataFrames. 103 | """ 104 | 105 | def wrapper(df, *args, **kwargs): 106 | if df.ndim == 1: 107 | return func(df, *args, **kwargs) 108 | elif df.ndim == 2: 109 | return df.apply(func, *args, **kwargs) 110 | 111 | return wrapper 112 | 113 | 114 | def extract_rets_pos_txn_from_zipline(backtest): 115 | """ 116 | Extract returns, positions, transactions and leverage from the 117 | backtest data structure returned by zipline.TradingAlgorithm.run(). 118 | 119 | The returned data structures are in a format compatible with the 120 | rest of pyfolio and can be directly passed to 121 | e.g. tears.create_full_tear_sheet(). 122 | 123 | Parameters 124 | ---------- 125 | backtest : pd.DataFrame 126 | DataFrame returned by zipline.TradingAlgorithm.run() 127 | 128 | Returns 129 | ------- 130 | returns : pd.Series 131 | Daily returns of strategy. 132 | - See full explanation in tears.create_full_tear_sheet. 133 | positions : pd.DataFrame 134 | Daily net position values. 135 | - See full explanation in tears.create_full_tear_sheet. 136 | transactions : pd.DataFrame 137 | Prices and amounts of executed trades. One row per trade. 138 | - See full explanation in tears.create_full_tear_sheet. 139 | 140 | 141 | Example (on the Quantopian research platform) 142 | --------------------------------------------- 143 | >>> backtest = my_algo.run() 144 | >>> returns, positions, transactions = 145 | >>> pyfolio.utils.extract_rets_pos_txn_from_zipline(backtest) 146 | >>> pyfolio.tears.create_full_tear_sheet(returns, 147 | >>> positions, transactions) 148 | """ 149 | 150 | backtest.index = backtest.index.normalize() 151 | if backtest.index.tzinfo is None: 152 | backtest.index = backtest.index.tz_localize('UTC') 153 | returns = backtest.returns 154 | raw_positions = [] 155 | for dt, pos_row in backtest.positions.iteritems(): 156 | df = pd.DataFrame(pos_row) 157 | df.index = [dt] * len(df) 158 | raw_positions.append(df) 159 | if not raw_positions: 160 | raise ValueError("The backtest does not have any positions.") 161 | positions = pd.concat(raw_positions) 162 | positions = pos.extract_pos(positions, backtest.ending_cash) 163 | transactions = txn.make_transaction_frame(backtest.transactions) 164 | if transactions.index.tzinfo is None: 165 | transactions.index = transactions.index.tz_localize('utc') 166 | 167 | return returns, positions, transactions 168 | 169 | 170 | def print_table(table, 171 | name=None, 172 | float_format=None, 173 | formatters=None, 174 | header_rows=None): 175 | """ 176 | Pretty print a pandas DataFrame. 177 | 178 | Uses HTML output if running inside Jupyter Notebook, otherwise 179 | formatted text output. 180 | 181 | Parameters 182 | ---------- 183 | table : pandas.Series or pandas.DataFrame 184 | Table to pretty-print. 185 | name : str, optional 186 | Table name to display in upper left corner. 187 | float_format : function, optional 188 | Formatter to use for displaying table elements, passed as the 189 | `float_format` arg to pd.Dataframe.to_html. 190 | E.g. `'{0:.2%}'.format` for displaying 100 as '100.00%'. 191 | formatters : list or dict, optional 192 | Formatters to use by column, passed as the `formatters` arg to 193 | pd.Dataframe.to_html. 194 | header_rows : dict, optional 195 | Extra rows to display at the top of the table. 196 | """ 197 | 198 | if isinstance(table, pd.Series): 199 | table = pd.DataFrame(table) 200 | 201 | if name is not None: 202 | table.columns.name = name 203 | 204 | html = table.to_html(float_format=float_format, formatters=formatters) 205 | 206 | if header_rows is not None: 207 | # Count the number of columns for the text to span 208 | n_cols = html.split('')[1].split('')[0].count('