├── .flake8
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE.md
├── dependabot.yml
└── workflows
│ ├── build_wheels.yml
│ ├── conda_package.yml
│ ├── test_wheels.yml
│ └── unit_tests.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── MANIFEST.in
├── README.md
├── WHATSNEW.md
├── build_and_deploy_docs.sh
├── conda
└── recipe
│ └── meta.yaml
├── docs
├── Makefile
├── deploy.py
├── make.bat
└── source
│ ├── api-reference.rst
│ ├── conf.py
│ ├── index.rst
│ └── notebooks
│ ├── results.pickle
│ ├── round_trip_tear_sheet_example.ipynb
│ ├── sector_mappings_example.ipynb
│ ├── single_stock_example.ipynb
│ ├── slippage_example.ipynb
│ └── zipline_algo_example.ipynb
├── mkdocs.yml
├── pyproject.toml
├── src
└── pyfolio
│ ├── __init__.py
│ ├── capacity.py
│ ├── deprecate.py
│ ├── examples
│ ├── results.pickle
│ ├── round_trip_tear_sheet_example.ipynb
│ ├── sector_mappings_example.ipynb
│ ├── single_stock_example.ipynb
│ ├── slippage_example.ipynb
│ └── zipline_algo_example.ipynb
│ ├── interesting_periods.py
│ ├── ipycompat.py
│ ├── perf_attrib.py
│ ├── plotting.py
│ ├── pos.py
│ ├── round_trips.py
│ ├── tears.py
│ ├── timeseries.py
│ ├── txn.py
│ └── utils.py
└── tests
├── __init__.py
├── matplotlibrc
├── test_capacity.py
├── test_data
├── factor_loadings.csv
├── factor_returns.csv
├── intercepts.csv
├── positions.csv
├── residuals.csv
├── returns.csv
├── test_LMCAP.csv
├── test_LT_MOMENTUM.csv
├── test_MACDSignal.csv
├── test_VLTY.csv
├── test_caps.csv
├── test_gross_lev.csv.gz
├── test_pos.csv.gz
├── test_returns.csv.gz
├── test_sectors.csv
├── test_shares_held.csv
├── test_txn.csv.gz
└── test_volumes.csv
├── test_perf_attrib.py
├── test_pos.py
├── test_round_trips.py
├── test_tears.py
├── test_timeseries.py
└── test_txn.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude =
3 | .git,
4 | .pytest_cache
5 | conda,
6 | _sources,
7 | __pycache__,
8 | docs/source/conf.py,
9 | src/pyfolio/_version.py
10 | max-line-length = 88
11 | max-complexity = 18
12 | select = B,C,E,F,W,T4,B9
13 | ignore = E203, E266, E501, W503, F403, F401, E231
14 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | pyfolio/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Problem Description
2 |
3 | **Please provide a minimal, self-contained, and reproducible example:**
4 | ```python
5 | [Paste code here]
6 | ```
7 |
8 | **Please provide the full traceback:**
9 | ```python
10 | [Paste traceback here]
11 | ```
12 |
13 | **Please provide any additional information below:**
14 |
15 |
16 | ## Versions
17 |
18 | * Pyfolio version:
19 | * Python version:
20 | * Pandas version:
21 | * Matplotlib version:
22 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | # Maintain dependencies for GitHub Actions
9 | - package-ecosystem: "github-actions"
10 | # Workflow files stored in the default location of `.github/workflows`
11 | directory: "/"
12 | schedule:
13 | interval: "daily"
14 | open-pull-requests-limit: 10
15 |
--------------------------------------------------------------------------------
/.github/workflows/build_wheels.yml:
--------------------------------------------------------------------------------
1 | name: PyPI
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | target:
7 | type: choice
8 | description: 'Package Index'
9 | required: true
10 | default: 'PYPI'
11 | options: [ 'TESTPYPI', 'PYPI' ]
12 | version:
13 | type: string
14 | description: 'Version tag'
15 | required: true
16 | default: '0.9.9'
17 |
18 | jobs:
19 | dist:
20 | runs-on: ${{ matrix.os }}
21 | strategy:
22 | fail-fast: false
23 | matrix:
24 | os: [ ubuntu-latest ]
25 | python-version: [ "3.12" ]
26 |
27 | steps:
28 | - name: Checkout pyfolio
29 | uses: actions/checkout@v4
30 | with:
31 | fetch-depth: 0
32 | ref: ${{ github.event.inputs.version }}
33 |
34 | - name: Set up Python ${{ matrix.python-version }}
35 | uses: actions/setup-python@v5
36 | with:
37 | python-version: ${{ matrix.python-version }}
38 |
39 | - name: Build wheels
40 | run: pipx run build
41 |
42 | - name: Store artifacts
43 | uses: actions/upload-artifact@v4
44 | with:
45 | path: dist/*
46 |
47 | - name: Check metadata
48 | run: pipx run twine check dist/*
49 |
50 | upload_pypi:
51 | needs: [ dist ]
52 | runs-on: ubuntu-latest
53 | steps:
54 | - uses: actions/download-artifact@v4
55 | with:
56 | name: artifact
57 | path: dist
58 |
59 | - name: publish to testpypi
60 | if: ${{ github.event.inputs.target == 'TESTPYPI' }}
61 | uses: pypa/gh-action-pypi-publish@release/v1
62 | with:
63 | user: __token__
64 | password: ${{ secrets.TESTPYPI_TOKEN }}
65 | repository-url: https://test.pypi.org/legacy/
66 |
67 | - name: publish to pypi
68 | if: ${{ github.event.inputs.target == 'PYPI' }}
69 | uses: pypa/gh-action-pypi-publish@release/v1
70 | with:
71 | user: __token__
72 | password: ${{ secrets.PYPI_TOKEN }}
73 |
--------------------------------------------------------------------------------
/.github/workflows/conda_package.yml:
--------------------------------------------------------------------------------
1 | name: Anaconda
2 |
3 | on: workflow_dispatch
4 |
5 | jobs:
6 | build_wheels:
7 | name: py${{ matrix.python }} on ${{ matrix.os }}
8 | runs-on: ${{ matrix.os }}
9 | env:
10 | ANACONDA_API_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
11 | defaults:
12 | run:
13 | shell: bash -l {0}
14 |
15 | strategy:
16 | fail-fast: false
17 | matrix:
18 | os: [ macos-latest, windows-latest, ubuntu-latest ]
19 | python: [ '3.7', '3.8', '3.9']
20 |
21 | steps:
22 | - name: Checkout pyfolio-reloaded
23 | uses: actions/checkout@v4
24 |
25 | - name: Setup miniconda3
26 | uses: conda-incubator/setup-miniconda@v3
27 | with:
28 | miniconda-version: latest
29 | auto-update-conda: true
30 | channel-priority: true
31 | mamba-version: "*"
32 | python-version: ${{ matrix.python }}
33 | activate-environment: recipe
34 | channels: ml4t, conda-forge, defaults, anaconda, ranaroussi
35 |
36 | - name: create uploader
37 | # address broken client under py3.9
38 | if: ${{ matrix.python == '3.9' }}
39 | run: conda create -n up python=3.7 anaconda-client
40 |
41 | - name: conda build for ${{ matrix.os }}
42 | run: |
43 | conda activate recipe
44 | mamba install -n recipe boa conda-verify anaconda-client
45 | conda mambabuild --output-folder . --python ${{ matrix.python }} conda/recipe
46 |
47 | - name: activate uploader
48 | # address broken client under py3.9
49 | if: ${{ matrix.python == '3.9' }}
50 | run: conda activate up
51 |
52 | - name: store windows result
53 | uses: actions/upload-artifact@v4
54 | if: ${{ matrix.os == 'windows-latest' }}
55 | with:
56 | path: win-64/*.tar.bz2
57 |
58 | - name: upload windows
59 | if: ${{ matrix.os == 'windows-latest' }}
60 | run: anaconda upload -l main -u ml4t win-64/*.tar.bz2
61 |
62 | - name: store linux result
63 | uses: actions/upload-artifact@v4
64 | if: ${{ matrix.os == 'ubuntu-latest' }}
65 | with:
66 | path: linux-64/*.tar.bz2
67 |
68 | - name: upload linux
69 | if: ${{ matrix.os == 'ubuntu-latest' }}
70 | run: anaconda upload -l main -u ml4t linux-64/*.tar.bz2
71 |
72 | - name: store macos result
73 | uses: actions/upload-artifact@v4
74 | if: ${{ matrix.os == 'macos-latest' }}
75 | with:
76 | path: osx-64/*.tar.bz2
77 |
78 | - name: upload macos
79 | if: ${{ matrix.os == 'macos-latest' }}
80 | run: anaconda upload -l main -u ml4t osx-64/*.tar.bz2
81 |
--------------------------------------------------------------------------------
/.github/workflows/test_wheels.yml:
--------------------------------------------------------------------------------
1 | name: Test Wheels
2 |
3 | on: workflow_dispatch
4 |
5 | jobs:
6 | test_wheels:
7 | runs-on: ${{ matrix.os }}
8 | strategy:
9 | fail-fast: false
10 | matrix:
11 | os: [ ubuntu-latest, windows-latest, macos-latest ]
12 | python-version: [ "3.9", "3.10", "3.11", "3.12" ]
13 | steps:
14 | - name: Set up Python ${{ matrix.python-version }}
15 | uses: actions/setup-python@v5
16 | with:
17 | python-version: ${{ matrix.python-version }}
18 |
19 | - name: Checkout pyfolio
20 | uses: actions/checkout@v4
21 |
22 | - name: Unittests with tox & pytest
23 | uses: nick-fields/retry@v3
24 | with:
25 | timeout_minutes: 90
26 | max_attempts: 3
27 | retry_on: error
28 | new_command_on_retry: |
29 | python -m pip install -U pip wheel tox tox-gh-actions
30 | python -m pip install -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pyfolio-reloaded[test]
31 | tox
32 | command: tox -p auto -q
33 |
--------------------------------------------------------------------------------
/.github/workflows/unit_tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: "0 9 * * 6"
7 | push:
8 | branches:
9 | - main
10 | pull_request:
11 | branches:
12 | - main
13 |
14 | jobs:
15 | build:
16 | name: Unit Tests for ${{ matrix.python-version }} on ${{ matrix.os }}
17 | runs-on: ${{ matrix.os }}
18 | strategy:
19 | fail-fast: false
20 | matrix:
21 | os: [ ubuntu-latest , windows-latest, macos-latest ]
22 | python-version: [ '3.10', '3.11', '3.12', '3.13']
23 | steps:
24 | - name: Checkout pyfolio
25 | uses: actions/checkout@v4
26 |
27 | - name: Set up Python ${{ matrix.python-version }}
28 | uses: actions/setup-python@v5
29 | with:
30 | python-version: ${{ matrix.python-version }}
31 |
32 | - name: Install pyfolio
33 | run: |
34 | python -m pip install --upgrade pip
35 | pip install tox tox-gh-actions
36 | pip install -e .[test]
37 |
38 | - name: Lint with flake8
39 | run: |
40 | flake8
41 |
42 | - name: Tests with tox & pytest
43 | run: |
44 | tox
45 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # virutalenv
2 | .venv
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | env/
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .coverage
43 | .coverage.*
44 | .cache
45 | nosetests.xml
46 | coverage.xml
47 | *,cover
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 |
56 | # Sphinx documentation
57 | docs/_build/
58 |
59 | # PyBuilder
60 | target/
61 |
62 | # VIM
63 | *.sw?
64 |
65 | # IPython notebook checkpoints
66 | .ipynb_checkpoints/
67 |
68 | .idea
69 | src/pyfolio/_version.py
70 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v3.4.0
4 | hooks:
5 | - id: check-yaml
6 | - id: check-merge-conflict
7 | - id: end-of-file-fixer
8 | - id: trailing-whitespace
9 | - id: check-added-large-files
10 |
11 | - repo: https://github.com/PyCQA/flake8
12 | rev: 7.0.0
13 | hooks:
14 | - id: flake8
15 |
16 | - repo: https://github.com/psf/black
17 | rev: 24.4.2
18 | hooks:
19 | - id: black
20 |
21 | exclude: '^conda/recipe/meta.yaml$'
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2018 Quantopian, Inc.
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include pyfolio/_version.py
3 | include LICENSE
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 
8 | [](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/unit_tests.yml)
9 | [](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/conda_package.yml)
10 | [](https://github.com/stefan-jansen/pyfolio-reloaded/actions/workflows/build_wheels.yml)
11 | [](https://coveralls.io/github/stefan-jansen/pyfolio-reloaded?branch=main)
12 | 
13 | 
14 | 
15 |
16 | pyfolio is a Python library for performance and risk analysis of financial portfolios that works well with the [Zipline](https://zipline.ml4trading.io/) open source backtesting library.
17 |
18 | ## Trading Strategy Analysis with pyfolio
19 |
20 | At the core of pyfolio are various tear sheets that combine various individual plots and summary statistics to
21 | provide a comprehensive view of the performance of a trading algorithm.
22 |
23 | Here's an example of a simple tear sheet analyzing a strategy executed with the Zipline backtesting engine:
24 |
25 | ### Performance Metrics
26 |
27 | The tear sheet presents performance and risk metrics for the strategy separately during the backtest and out-of-sample periods:
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 | ### Performance Plots
36 |
37 | In addition, it visualizes how several risk and return metrics behave over time:
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | ## Installation
46 |
47 | To install pyfolio, run:
48 |
49 | ```bash
50 | pip install pyfolio-reloaded
51 | ```
52 | or
53 |
54 | ```bash
55 | conda install -c ml4t pyfolio-reloaded
56 | ```
57 |
58 | #### Development
59 |
60 | For development, you may want to use a [virtual environment](https://docs.python-guide.org/en/latest/dev/virtualenvs/) to avoid dependency conflicts between pyfolio and other Python projects you have.
61 |
62 | To get set up with a virtual env, run:
63 | ```bash
64 | mkvirtualenv pyfolio
65 | ```
66 |
67 | Next, clone this git repository and run `python -m pip install .[all]` and edit the library files directly.
68 |
69 | ## Usage
70 |
71 | A good way to get started is to run the pyfolio examples in a
72 | [Jupyter notebook](https://jupyter.org/). To do this, you first want to
73 | start a Jupyter notebook server:
74 |
75 | ```bash
76 | jupyter notebook
77 | ```
78 |
79 | From the notebook list page, navigate to the pyfolio examples directory
80 | and open a notebook. Execute the code in a notebook cell by clicking on it
81 | and hitting Shift+Enter.
82 |
83 |
84 | ## Questions?
85 |
86 | If you find a bug, feel free to [open an issue](https://github.com/stefan-jansen/pyfolio-reloaded/issues) in this repository.
87 |
88 | You can also join our [community](https://exchange.ml4trading.io).
89 |
90 | ## Support
91 |
92 | Please [open an issue](https://github.com/stefan-jansen/pyfolio-reloaded/issues/new) for support.
93 |
94 | ## Contributing
95 |
96 | If you'd like to contribute, a great place to look is the [issues marked with help-wanted](https://github.com/stefan-jansen/pyfolio-reloaded/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22).
97 |
98 | For a list of core developers and outside collaborators, see [the GitHub contributors list](https://github.com/stefan-jansen/pyfolio-reloaded/graphs/contributors).
99 |
--------------------------------------------------------------------------------
/WHATSNEW.md:
--------------------------------------------------------------------------------
1 | # What's New
2 |
3 | These are new features and improvements of note in each release.
4 |
5 | ## v0.9.3 (April 19, 2021)
6 |
7 | - Update to Python 3.7+, Pandas 1.0+
8 | - Drop support for Python 2
9 | - Updated docs
10 | - CI and packging via GitHub Actions
11 | - conda packages
12 |
13 | ## v0.9.0 (Aug 1st, 2018)
14 |
15 | ### New features
16 |
17 | - Previously, `pyfolio` has required a benchmark, usually the U.S. market returns `SPY`.
18 | In order to provide support for international equities and alternative data sets, `pyfolio` is now completely independent of benchmarks.
19 | If a benchmark is passed, all benchmark-related analyses will be performed; if not, they will simply be skipped. By [George Ho](https://github.com/eigenfoo)
20 | - Performance attribution tearsheet [PR441](https://github.com/quantopian/pyfolio/pull/441), [PR433](https://github.com/quantopian/pyfolio/pull/433), [PR442](https://github.com/quantopian/pyfolio/pull/442).
21 | By [Vikram Narayan](https://github.com/vikram-narayan).
22 | - Improved implementation of `get_turnover` [PR332](https://github.com/quantopian/pyfolio/pull/432).
23 | By [Gus Gordon](https://github.com/gusgordon).
24 | - Users can now pass in extra rows (as a dict or OrderedDict) to display in the perf_stats table [PR445](https://github.com/quantopian/pyfolio/pull/445). By [Gus Gordon](https://github.com/gusgordon).
25 |
26 | ### Maintenance
27 |
28 | - Many features have been more extensively troubleshooted, maintained and tested. By [Ana Ruelas](https://github.com/ahgnaw) and [Vikram Narayan](https://github.com/vikram-narayan).
29 | - Various fixes to support pandas versions >= 0.18.1 [PR443](https://github.com/quantopian/pyfolio/pull/443). By [Andrew Daniels](https://github.com/yankees714).
30 |
31 | ## v0.8.0 (Aug 23rd, 2017)
32 |
33 | This is a major release from `0.7.0`, and all users are recommended to upgrade.
34 |
35 | ### New features
36 |
37 | - Risk tear sheet: added a new tear sheet to analyze risk exposures to common factors (e.g. mean reversion and momentum), sector (e.g. Morningstar sectors), market cap and illiquid stocks. By [George Ho](https://github.com/eigenfoo).
38 | - Simple tear sheet: added a new tear sheet that presents only the most important plots in the full tear sheet, for a quick general overview of a portfolio's performance. By [George Ho](https://github.com/eigenfoo).
39 | - Performance attribution: added new table to do performance attribution analysis, such as the amount of returns attributable to common factors, and summary statistics such as the multi-factor alpha and multi-factor Sharpe ratio. By [Vikram Narayan](https://github.com/vikram-narayan).
40 | - Volatility plot: added a rolling annual volatility plot to the returns tear sheet. By [hkopp](https://github.com/hkopp).
41 |
42 | ### Bugfixes
43 |
44 | - Yahoo and pandas data-reader: fixed bug regarding Yahoo backend for market
45 | data and pandas data-reader. By [Thomas Wiecki](https://github.com/twiecki)
46 | and [Gus Gordon](https://github.com/gusgordon).
47 | - `empyrical` compatibility: removed `information_ratio` to remain compatible
48 | with `empyrical`. By [Thomas Wiecki](https://github.com/twiecki).
49 | - Fama-French rolling multivariate regression: fixed bug where the rolling
50 | Fama-French plot performed separate linear regressions instead of a
51 | multivariate regression. By [George Ho](https://github.com/eigenfoo).
52 | - Other minor bugfixes. By [Scott Sanderson](https://github.com/ssanderson),
53 | [Jonathan Ng](https://github.com/jonathanng),
54 | [SylvainDe](https://github.com/SylvainDe) and
55 | [mckelvin](https://github.com/mckelvin).
56 |
57 | ### Maintenance
58 |
59 | - Documentation: updated and improved `pyfolio` documentation and example
60 | Jupyter notebooks. By [George Ho](https://github.com/eigenfoo).
61 | - Data loader migration: all data loaders have been migrated from `pyfolio` to
62 | `empyrical`. By [James Christopher](https://github.com/jameschristopher).
63 | - Improved plotting style: fixed issues with formatting and presentation of
64 | plots. By [George Ho](https://github.com/eigenfoo).
65 |
66 | ## v0.7.0 (Jan 28th, 2017)
67 |
68 | This is a major release from `0.6.0`, and all users are recommended to upgrade.
69 |
70 | ### New features
71 |
72 | - Adds a transaction timing plot, which gives insight into the strategies'
73 | trade times.
74 | - Adds a plot showing the number of longs and shorts held over time.
75 | - New round trips plot selects a sample of held positions (16 by default) and
76 | shows their round trips. This replaces the old round trip plot, which became
77 | unreadable for strategies that traded many positions.
78 | - Adds basic capability for analyzing intraday strategies. If a strategy makes
79 | a large amount of transactions relative to its end-of-day positions, then
80 | pyfolio will attempt to reconstruct the intraday positions, take the point of
81 | peak exposure to the market during each day, and plot that data with the
82 | positions tear sheet. By default pyfolio will automatically detect this, but
83 | the behavior can be changed by passing either `estimate_intraday=True` or
84 | `estimate_intraday=False` to the tear sheet functions ([see
85 | here](https://github.com/quantopian/pyfolio/blob/master/pyfolio/tears.py#L131)).
86 | - Now formats [zipline](https://github.com/quantopian/zipline) assets,
87 | displaying their ticker symbol.
88 | - Gross leverage is no longer required to be passed, and will now be calculated
89 | from the passed positions DataFrame.
90 |
91 | ### Bugfixes
92 |
93 | - Cone plotting location is now correct.
94 | - Adjust scaling of beta and Fama-French plots.
95 | - Removed multiple dependencies, some of which were previously unused.
96 | - Various text fixes.
97 |
98 | ## v0.6.0 (Oct 17, 2016)
99 |
100 | This is a major new release from `0.5.1`. All users are recommended to upgrade.
101 |
102 | ### New features
103 |
104 | * Computation of performance and risk measures has been split off into
105 | [`empyrical`](https://github.com/quantopian/empyrical). This allows
106 | [`Zipline`](https://zipline.io) and `pyfolio` to use the same code to
107 | calculate its risk statistics. By [Ana Ruelas](https://github.com/ahgnaw) and
108 | [Abhi Kalyan](https://github.com/abhijeetkalyan).
109 | * New multistrike cone which redraws the cone when it crossed its initial bounds
110 | [PR310](https://github.com/quantopian/pyfolio/pull/310). By [Ana
111 | Ruelas](https://github.com/ahgnaw) and [Abhi
112 | Kalyan](https://github.com/abhijeetkalyan).
113 |
114 | ### Bugfixes
115 |
116 | * Can use most recent PyMC3 now.
117 | * Depends on seaborn 0.7.0 or later now
118 | [PR331](https://github.com/quantopian/pyfolio/pull/331).
119 | * Disable buggy computation of round trips per day and per month
120 | [PR339](https://github.com/quantopian/pyfolio/pull/339).
121 |
122 | ## v0.5.1 (June 10, 2016)
123 |
124 | This is a bugfix release from `0.5.0` with limited new functionality. All users are recommended to upgrade.
125 |
126 | ### New features
127 |
128 | * OOS data is now overlaid on top of box plot
129 | [PR306](https://github.com/quantopian/pyfolio/pull/306) by [Ana
130 | Ruelas](https://github.com/ahgnaw)
131 | * New logo [PR298](https://github.com/quantopian/pyfolio/pull/298) by [Taso
132 | Petridis](https://github.com/tasopetridis) and [Richard
133 | Frank](https://github.com/richafrank)
134 | * Raw returns plot and cumulative log returns plot
135 | [PR294](https://github.com/quantopian/pyfolio/pull/294) by [Thomas
136 | Wiecki](https://github.com/twiecki)
137 | * Net exposure line to the long/short exposure plot
138 | [PR301](https://github.com/quantopian/pyfolio/pull/301) by [Ana
139 | Ruelas](https://github.com/ahgnaw)
140 |
141 | ### Bugfixes
142 |
143 | * Fix drawdown behavior and pandas exception in tear-sheet creation
144 | [PR297](https://github.com/quantopian/pyfolio/pull/297) by [Flavio
145 | Duarte](https://github.com/flaviodrt)
146 |
147 | ## v0.5.0 (April 21, 2016) -- Olympia
148 |
149 | This is a major release from `0.4.0` that includes many new analyses and
150 | features. We recommend that all users upgrade to this new version. Also update
151 | your dependencies, specifically, `pandas>=0.18.0`, `seaborn>=0.6.0` and
152 | `zipline>=0.8.4`.
153 |
154 | ### New features
155 |
156 | * New capacity tear-sheet to assess how much capital can be traded on a strategy
157 | [PR284](https://github.com/quantopian/pyfolio/pull/284). [Andrew
158 | Campbell](https://github.com/a-campbell).
159 | * Bootstrap analysis to assess uncertainty in performance metrics
160 | [PR261](https://github.com/quantopian/pyfolio/pull/261). [Thomas
161 | Wiecki](https://github.com/twiecki)
162 | * Refactored round-trip analysis to be more general and have better output. Now
163 | does full portfolio reconstruction to match trades
164 | [PR293](https://github.com/quantopian/pyfolio/pull/293). [Thomas
165 | Wiecki](https://github.com/twiecki), [Andrew
166 | Campbell](https://github.com/a-campbell). See the
167 | [tutorial](http://quantopian.github.io/pyfolio/round_trip_example/) for more
168 | information.
169 | * Prettier printing of tables in notebooks
170 | [PR289](https://github.com/quantopian/pyfolio/pull/289). [Thomas
171 | Wiecki](https://github.com/twiecki)
172 | * Faster max-drawdown calculation
173 | [PR281](https://github.com/quantopian/pyfolio/pull/281). [Devin
174 | Stevenson](https://github.com/devinstevenson)
175 | * New metrics tail-ratio and common sense ratio
176 | [PR276](https://github.com/quantopian/pyfolio/pull/276). [Thomas
177 | Wiecki](https://github.com/twiecki)
178 | * Log-scaled cumulative returns plot and raw returns plot
179 | [PR294](https://github.com/quantopian/pyfolio/pull/294). [Thomas
180 | Wiecki](https://github.com/twiecki)
181 |
182 | ### Bug fixes
183 | * Many depracation fixes for Pandas 0.18.0, seaborn 0.6.0, and zipline 0.8.4
184 |
185 |
186 | ## v0.4.0 (Dec 10, 2015)
187 |
188 | This is a major release from 0.3.1 that includes new features and quite a few bug fixes. We recommend that all users upgrade to this new version.
189 |
190 | ### New features
191 |
192 | * Round-trip analysis [PR210](https://github.com/quantopian/pyfolio/pull/210)
193 | Andrew, Thomas
194 | * Improved cone to forecast returns that uses a bootstrap instead of linear
195 | forecasting [PR233](https://github.com/quantopian/pyfolio/pull/233) Andrew,
196 | Thomas
197 | * Plot max and median long/short exposures
198 | [PR237](https://github.com/quantopian/pyfolio/pull/237) Andrew
199 |
200 | ### Bug fixes
201 |
202 | * Sharpe ratio was calculated incorrectly
203 | [PR219](https://github.com/quantopian/pyfolio/pull/219) Thomas, Justin
204 | * annual_return() now only computes CAGR in the correct way
205 | [PR234](https://github.com/quantopian/pyfolio/pull/234) Justin
206 | * Cache SPY and Fama-French returns in home-directory instead of
207 | install-directory [PR241](https://github.com/quantopian/pyfolio/pull/241) Joe
208 | * Remove data files from package
209 | [PR241](https://github.com/quantopian/pyfolio/pull/241) Joe
210 | * Cast factor.name to str
211 | [PR223](https://github.com/quantopian/pyfolio/pull/223) Scotty
212 | * Test all `create_*_tear_sheet` functions in all configurations
213 | [PR247](https://github.com/quantopian/pyfolio/pull/247) Thomas
214 |
215 |
216 | ## v0.3.1 (Nov 12, 2015)
217 |
218 | This is a minor release from 0.3 that includes mostly bugfixes but also some new features. We recommend that all users upgrade to this new version.
219 |
220 | ### New features
221 |
222 | * Add Information Ratio [PR194](https://github.com/quantopian/pyfolio/pull/194)
223 | by @MridulS
224 | * Bayesian tear-sheet now accepts 'Fama-French' option to do Bayesian
225 | multivariate regression against Fama-French risk factors
226 | [PR200](https://github.com/quantopian/pyfolio/pull/200) by Shane Bussman
227 | * Plotting of monthly returns
228 | [PR195](https://github.com/quantopian/pyfolio/pull/195)
229 |
230 | ### Bug fixes
231 |
232 | * `pos.get_percent_alloc` was not handling short allocations correctly
233 | [PR201](https://github.com/quantopian/pyfolio/pull/201)
234 | * UTC bug with cached Fama-French factors
235 | [commit](https://github.com/quantopian/pyfolio/commit/709553a55b5df7c908d17f443cb17b51854a65be)
236 | * Sector map was not being passed from `create_returns_tearsheet`
237 | [commit](https://github.com/quantopian/pyfolio/commit/894b753e365f9cb4861ffca2ef214c5a64b2bef4)
238 | * New sector mapping feature was not Python 3 compatible
239 | [PR201](https://github.com/quantopian/pyfolio/pull/201)
240 |
241 |
242 | ### Maintenance
243 |
244 | * We now depend on pandas-datareader as the yahoo finance loaders from pandas
245 | will be deprecated [PR181](https://github.com/quantopian/pyfolio/pull/181) by
246 | @tswrightsandpointe
247 |
248 | ### Contributors
249 |
250 | Besiders the core developers, we have seen an increase in outside contributions
251 | which we greatly appreciate. Specifically, these people contributed to this
252 | release:
253 |
254 | * Shane Bussman
255 | * @MridulS
256 | * @YihaoLu
257 | * @jkrauss82
258 | * @tswrightsandpointe
259 | * @cgdeboer
260 |
261 |
262 | ## v0.3 (Oct 23, 2015)
263 |
264 | This is a major release from 0.2 that includes many exciting new features. We
265 | recommend that all users upgrade to this new version.
266 |
267 | ### New features
268 |
269 | * Sector exposures: sum positions by sector given a dictionary or series of
270 | symbol to sector mappings
271 | [PR166](https://github.com/quantopian/pyfolio/pull/166)
272 | * Ability to make cones with multiple shades stdev regions
273 | [PR168](https://github.com/quantopian/pyfolio/pull/168)
274 | * Slippage sweep: See how an algorithm performs with various levels of slippage
275 | [PR170](https://github.com/quantopian/pyfolio/pull/170)
276 | * Stochastic volatility model in Bayesian tear sheet
277 | [PR174](https://github.com/quantopian/pyfolio/pull/174)
278 | * Ability to suppress display of position information
279 | [PR177](https://github.com/quantopian/pyfolio/pull/177)
280 |
281 | ### Bug fixes
282 |
283 | * Various fixes to make pyfolio pandas 0.17 compatible
284 |
285 | ## v0.2 (Oct 16, 2015)
286 |
287 | This is a major release from 0.1 that includes mainly bugfixes and refactorings
288 | but also some new features. We recommend that all users upgrade to this new
289 | version.
290 |
291 | ### New features
292 |
293 | * Volatility matched cumulative returns plot
294 | [PR126](https://github.com/quantopian/pyfolio/pull/126).
295 | * Allow for different periodicity (annualization factors) in the annual_()
296 | methods [PR164](https://github.com/quantopian/pyfolio/pull/164).
297 | * Users can supply their own interesting periods
298 | [PR163](https://github.com/quantopian/pyfolio/pull/163).
299 | * Ability to weight a portfolio of holdings by a metric valued
300 | [PR161](https://github.com/quantopian/pyfolio/pull/161).
301 |
302 | ### Bug fixes
303 |
304 | * Fix drawdown overlaps [PR150](https://github.com/quantopian/pyfolio/pull/150).
305 | * Monthly returns distribution should not stack by year
306 | [PR162](https://github.com/quantopian/pyfolio/pull/162).
307 | * Fix gross leverage [PR147](https://github.com/quantopian/pyfolio/pull/147)
308 |
--------------------------------------------------------------------------------
/build_and_deploy_docs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | pushd docs
4 | bash convert_nbs_to_md.sh
5 | popd
6 | mkdocs build --clean
7 | mkdocs gh-deploy
8 |
--------------------------------------------------------------------------------
/conda/recipe/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set name = "pyfolio-reloaded" %}
2 | {% set version = "0.9.4" %}
3 |
4 | package:
5 | name: {{ name|lower }}
6 | version: {{ version }}
7 |
8 | source:
9 | url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
10 | md5: 0b8c38f40b57230623e1f32b53d16712
11 |
12 | build:
13 | number: 0
14 | skip: true # [py<37 or not x86_64]
15 | script: {{ PYTHON }} -m pip install . -vv
16 |
17 | requirements:
18 | build:
19 | - python
20 | - setuptools
21 |
22 | run:
23 | - python
24 | - ipython >=3.2.3
25 | - matplotlib >=1.4.0
26 | - numpy >=1.11.1
27 | - pandas >=1.0
28 | - pytz >=2014.10
29 | - scipy >=0.14.0
30 | - scikit-learn>=0.16.1
31 | - seaborn >=0.7.1
32 | - empyrical-reloaded >=0.5.8 # pending update
33 |
34 | test:
35 | imports:
36 | - pyfolio
37 |
38 | about:
39 | home: https://pyfolio.ml4trading.io
40 | summary: pyfolio is a Python library for performance and risk analysis of financial portfolios
41 | license: Apache 2.0
42 | license_file: LICENSE
43 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
21 |
22 | .PHONY: help
23 | help:
24 | @echo "Please use \`make ' where is one of"
25 | @echo " html to make standalone HTML files"
26 | @echo " dirhtml to make HTML files named index.html in directories"
27 | @echo " singlehtml to make a single large HTML file"
28 | @echo " pickle to make pickle files"
29 | @echo " json to make JSON files"
30 | @echo " htmlhelp to make HTML files and a HTML help project"
31 | @echo " qthelp to make HTML files and a qthelp project"
32 | @echo " applehelp to make an Apple Help Book"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
38 | @echo " text to make text files"
39 | @echo " man to make manual pages"
40 | @echo " texinfo to make Texinfo files"
41 | @echo " info to make Texinfo files and run them through makeinfo"
42 | @echo " gettext to make PO message catalogs"
43 | @echo " changes to make an overview of all changed/added/deprecated items"
44 | @echo " xml to make Docutils-native XML files"
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
46 | @echo " linkcheck to check all external links for integrity"
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
48 | @echo " coverage to run coverage check of the documentation (if enabled)"
49 |
50 | .PHONY: clean
51 | clean:
52 | rm -rf $(BUILDDIR)/*
53 |
54 | .PHONY: html
55 | html:
56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
57 | @echo
58 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
59 |
60 | .PHONY: dirhtml
61 | dirhtml:
62 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
63 | @echo
64 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
65 |
66 | .PHONY: singlehtml
67 | singlehtml:
68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
69 | @echo
70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
71 |
72 | .PHONY: pickle
73 | pickle:
74 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
75 | @echo
76 | @echo "Build finished; now you can process the pickle files."
77 |
78 | .PHONY: json
79 | json:
80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
81 | @echo
82 | @echo "Build finished; now you can process the JSON files."
83 |
84 | .PHONY: htmlhelp
85 | htmlhelp:
86 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
87 | @echo
88 | @echo "Build finished; now you can run HTML Help Workshop with the" \
89 | ".hhp project file in $(BUILDDIR)/htmlhelp."
90 |
91 | .PHONY: qthelp
92 | qthelp:
93 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
94 | @echo
95 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
96 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
97 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Qfactor.qhcp"
98 | @echo "To view the help file:"
99 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Qfactor.qhc"
100 |
101 | .PHONY: applehelp
102 | applehelp:
103 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
104 | @echo
105 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
106 | @echo "N.B. You won't be able to view it unless you put it in" \
107 | "~/Library/Documentation/Help or install it in your application" \
108 | "bundle."
109 |
110 | .PHONY: devhelp
111 | devhelp:
112 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
113 | @echo
114 | @echo "Build finished."
115 | @echo "To view the help file:"
116 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Qfactor"
117 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Qfactor"
118 | @echo "# devhelp"
119 |
120 | .PHONY: epub
121 | epub:
122 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
123 | @echo
124 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
125 |
126 | .PHONY: latex
127 | latex:
128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
129 | @echo
130 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
131 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
132 | "(use \`make latexpdf' here to do that automatically)."
133 |
134 | .PHONY: latexpdf
135 | latexpdf:
136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
137 | @echo "Running LaTeX files through pdflatex..."
138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
140 |
141 | .PHONY: latexpdfja
142 | latexpdfja:
143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
144 | @echo "Running LaTeX files through platex and dvipdfmx..."
145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
147 |
148 | .PHONY: text
149 | text:
150 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
151 | @echo
152 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
153 |
154 | .PHONY: man
155 | man:
156 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
157 | @echo
158 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
159 |
160 | .PHONY: texinfo
161 | texinfo:
162 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
163 | @echo
164 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
165 | @echo "Run \`make' in that directory to run these through makeinfo" \
166 | "(use \`make info' here to do that automatically)."
167 |
168 | .PHONY: info
169 | info:
170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
171 | @echo "Running Texinfo files through makeinfo..."
172 | make -C $(BUILDDIR)/texinfo info
173 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
174 |
175 | .PHONY: gettext
176 | gettext:
177 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
178 | @echo
179 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
180 |
181 | .PHONY: changes
182 | changes:
183 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
184 | @echo
185 | @echo "The overview file is in $(BUILDDIR)/changes."
186 |
187 | .PHONY: linkcheck
188 | linkcheck:
189 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
190 | @echo
191 | @echo "Link check complete; look for any errors in the above output " \
192 | "or in $(BUILDDIR)/linkcheck/output.txt."
193 |
194 | .PHONY: doctest
195 | doctest:
196 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
197 | @echo "Testing of doctests in the sources finished, look at the " \
198 | "results in $(BUILDDIR)/doctest/output.txt."
199 |
200 | .PHONY: coverage
201 | coverage:
202 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
203 | @echo "Testing of coverage in the sources finished, look at the " \
204 | "results in $(BUILDDIR)/coverage/python.txt."
205 |
206 | .PHONY: xml
207 | xml:
208 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
209 | @echo
210 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
211 |
212 | .PHONY: pseudoxml
213 | pseudoxml:
214 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
215 | @echo
216 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
217 |
--------------------------------------------------------------------------------
/docs/deploy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import print_function
3 | from contextlib import contextmanager
4 | from glob import glob
5 | import os
6 | from os.path import basename, exists, isfile
7 | from pathlib import Path
8 | from shutil import move, rmtree
9 | from subprocess import check_call
10 |
11 | HERE = Path(__file__).resolve(strict=True).parent
12 | PYFOLIO_ROOT = HERE.parent
13 | TEMP_LOCATION = "/tmp/pyfolio-doc"
14 | TEMP_LOCATION_GLOB = TEMP_LOCATION + "/*"
15 |
16 |
17 | @contextmanager
18 | def removing(path):
19 | try:
20 | yield
21 | finally:
22 | rmtree(path)
23 |
24 |
25 | def ensure_not_exists(path):
26 | if not exists(path):
27 | return
28 | if isfile(path):
29 | os.unlink(path)
30 | else:
31 | rmtree(path)
32 |
33 |
34 | def main():
35 | old_dir = Path.cwd()
36 | print("Moving to %s." % HERE)
37 | os.chdir(HERE)
38 |
39 | try:
40 | print("Cleaning docs with 'make clean'")
41 | check_call(["make", "clean"])
42 | print("Building docs with 'make html'")
43 | check_call(["make", "html"])
44 |
45 | print("Clearing temp location '%s'" % TEMP_LOCATION)
46 | rmtree(TEMP_LOCATION, ignore_errors=True)
47 |
48 | with removing(TEMP_LOCATION):
49 | print("Copying built files to temp location.")
50 | move("build/html", TEMP_LOCATION)
51 |
52 | print("Moving to '%s'" % PYFOLIO_ROOT)
53 | os.chdir(PYFOLIO_ROOT)
54 |
55 | print("Checking out gh-pages branch.")
56 | check_call(
57 | [
58 | "git",
59 | "branch",
60 | "-f",
61 | "--track",
62 | "gh-pages",
63 | "origin/gh-pages",
64 | ]
65 | )
66 | check_call(["git", "checkout", "gh-pages"])
67 | check_call(["git", "reset", "--hard", "origin/gh-pages"])
68 |
69 | print("Copying built files:")
70 | for file_ in glob(TEMP_LOCATION_GLOB):
71 | base = basename(file_)
72 |
73 | print("%s -> %s" % (file_, base))
74 | ensure_not_exists(base)
75 | move(file_, ".")
76 | finally:
77 | os.chdir(old_dir)
78 |
79 | print()
80 | print("Updated documentation branch in directory %s" % PYFOLIO_ROOT)
81 | print("If you are happy with these changes, commit and push to gh-pages.")
82 |
83 |
84 | if __name__ == "__main__":
85 | main()
86 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
10 | set I18NSPHINXOPTS=%SPHINXOPTS% source
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
31 | echo. text to make text files
32 | echo. man to make manual pages
33 | echo. texinfo to make Texinfo files
34 | echo. gettext to make PO message catalogs
35 | echo. changes to make an overview over all changed/added/deprecated items
36 | echo. xml to make Docutils-native XML files
37 | echo. pseudoxml to make pseudoxml-XML files for display purposes
38 | echo. linkcheck to check all external links for integrity
39 | echo. doctest to run all doctests embedded in the documentation if enabled
40 | echo. coverage to run coverage check of the documentation if enabled
41 | goto end
42 | )
43 |
44 | if "%1" == "clean" (
45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
46 | del /q /s %BUILDDIR%\*
47 | goto end
48 | )
49 |
50 |
51 | REM Check if sphinx-build is available and fallback to Python version if any
52 | %SPHINXBUILD% 1>NUL 2>NUL
53 | if errorlevel 9009 goto sphinx_python
54 | goto sphinx_ok
55 |
56 | :sphinx_python
57 |
58 | set SPHINXBUILD=python -m sphinx.__init__
59 | %SPHINXBUILD% 2> nul
60 | if errorlevel 9009 (
61 | echo.
62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
63 | echo.installed, then set the SPHINXBUILD environment variable to point
64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
65 | echo.may add the Sphinx directory to PATH.
66 | echo.
67 | echo.If you don't have Sphinx installed, grab it from
68 | echo.http://sphinx-doc.org/
69 | exit /b 1
70 | )
71 |
72 | :sphinx_ok
73 |
74 |
75 | if "%1" == "html" (
76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
77 | if errorlevel 1 exit /b 1
78 | echo.
79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
80 | goto end
81 | )
82 |
83 | if "%1" == "dirhtml" (
84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
85 | if errorlevel 1 exit /b 1
86 | echo.
87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
88 | goto end
89 | )
90 |
91 | if "%1" == "singlehtml" (
92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
93 | if errorlevel 1 exit /b 1
94 | echo.
95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
96 | goto end
97 | )
98 |
99 | if "%1" == "pickle" (
100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
101 | if errorlevel 1 exit /b 1
102 | echo.
103 | echo.Build finished; now you can process the pickle files.
104 | goto end
105 | )
106 |
107 | if "%1" == "json" (
108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
109 | if errorlevel 1 exit /b 1
110 | echo.
111 | echo.Build finished; now you can process the JSON files.
112 | goto end
113 | )
114 |
115 | if "%1" == "htmlhelp" (
116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
117 | if errorlevel 1 exit /b 1
118 | echo.
119 | echo.Build finished; now you can run HTML Help Workshop with the ^
120 | .hhp project file in %BUILDDIR%/htmlhelp.
121 | goto end
122 | )
123 |
124 | if "%1" == "qthelp" (
125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
126 | if errorlevel 1 exit /b 1
127 | echo.
128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
129 | .qhcp project file in %BUILDDIR%/qthelp, like this:
130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Qfactor.qhcp
131 | echo.To view the help file:
132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Qfactor.ghc
133 | goto end
134 | )
135 |
136 | if "%1" == "devhelp" (
137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
138 | if errorlevel 1 exit /b 1
139 | echo.
140 | echo.Build finished.
141 | goto end
142 | )
143 |
144 | if "%1" == "epub" (
145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
146 | if errorlevel 1 exit /b 1
147 | echo.
148 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
149 | goto end
150 | )
151 |
152 | if "%1" == "latex" (
153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
154 | if errorlevel 1 exit /b 1
155 | echo.
156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
157 | goto end
158 | )
159 |
160 | if "%1" == "latexpdf" (
161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
162 | cd %BUILDDIR%/latex
163 | make all-pdf
164 | cd %~dp0
165 | echo.
166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdfja" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf-ja
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "text" (
181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
182 | if errorlevel 1 exit /b 1
183 | echo.
184 | echo.Build finished. The text files are in %BUILDDIR%/text.
185 | goto end
186 | )
187 |
188 | if "%1" == "man" (
189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
190 | if errorlevel 1 exit /b 1
191 | echo.
192 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
193 | goto end
194 | )
195 |
196 | if "%1" == "texinfo" (
197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
198 | if errorlevel 1 exit /b 1
199 | echo.
200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
201 | goto end
202 | )
203 |
204 | if "%1" == "gettext" (
205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
206 | if errorlevel 1 exit /b 1
207 | echo.
208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
209 | goto end
210 | )
211 |
212 | if "%1" == "changes" (
213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
214 | if errorlevel 1 exit /b 1
215 | echo.
216 | echo.The overview file is in %BUILDDIR%/changes.
217 | goto end
218 | )
219 |
220 | if "%1" == "linkcheck" (
221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
222 | if errorlevel 1 exit /b 1
223 | echo.
224 | echo.Link check complete; look for any errors in the above output ^
225 | or in %BUILDDIR%/linkcheck/output.txt.
226 | goto end
227 | )
228 |
229 | if "%1" == "doctest" (
230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
231 | if errorlevel 1 exit /b 1
232 | echo.
233 | echo.Testing of doctests in the sources finished, look at the ^
234 | results in %BUILDDIR%/doctest/output.txt.
235 | goto end
236 | )
237 |
238 | if "%1" == "coverage" (
239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
240 | if errorlevel 1 exit /b 1
241 | echo.
242 | echo.Testing of coverage in the sources finished, look at the ^
243 | results in %BUILDDIR%/coverage/python.txt.
244 | goto end
245 | )
246 |
247 | if "%1" == "xml" (
248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
249 | if errorlevel 1 exit /b 1
250 | echo.
251 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
252 | goto end
253 | )
254 |
255 | if "%1" == "pseudoxml" (
256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
257 | if errorlevel 1 exit /b 1
258 | echo.
259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
260 | goto end
261 | )
262 |
263 | :end
264 |
--------------------------------------------------------------------------------
/docs/source/api-reference.rst:
--------------------------------------------------------------------------------
1 | .. _api-reference:
2 |
3 | API
4 | ===
5 |
6 | The pyfolio API is organized into various modules:
7 |
8 | * Tear Sheets: :mod:`pyfolio.tears`
9 | * Time Series Metrics: :mod:`pyfolio.timeseries`
10 | * Plotting Functions: :mod:`pyfolio.plotting`
11 | * Performance Attribution: :mod:`pyfolio.perf_attrib`
12 | * Utilities: :mod:`pyfolio.utils`
13 |
14 | Tear Sheets
15 | -----------
16 |
17 | Pyfolio combines key portfolio metrics in thematic plots and summary tear sheets.
18 |
19 | .. automodule:: pyfolio.tears
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
24 | Time Series Metrics
25 | -------------------
26 |
27 | The module :mod:`pyfolio.timeseries` provides performance and risk metrics.
28 |
29 | .. automodule:: pyfolio.timeseries
30 | :members:
31 | :undoc-members:
32 | :show-inheritance:
33 |
34 | Performance Attribution:
35 | ------------------------
36 |
37 | The module :mod:`pyfolio.perf_attrib` provides performance and risk metrics.
38 |
39 | .. automodule:: pyfolio.perf_attrib
40 | :members:
41 | :undoc-members:
42 | :show-inheritance:
43 |
44 | Plotting Functions
45 | ------------------
46 |
47 | The module :mod:`pyfolio.plotting` facilitates the visualization of performance metrics.
48 |
49 | .. automodule:: pyfolio.plotting
50 | :members:
51 | :undoc-members:
52 | :show-inheritance:
53 |
54 | Utilities
55 | ---------
56 |
57 | The module :mod:`pyfolio.utils` contains various helper functions, e.g. to format factor data into the requisite input format.
58 |
59 | .. automodule:: pyfolio.utils
60 | :members:
61 | :undoc-members:
62 | :show-inheritance:
63 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import sys
3 | from pathlib import Path
4 | import pydata_sphinx_theme
5 | from pyfolio import __version__ as version
6 |
7 | sys.path.insert(0, Path("../..").resolve(strict=True).as_posix())
8 |
9 | extensions = [
10 | "sphinx.ext.autodoc",
11 | "numpydoc",
12 | "m2r2",
13 | "sphinx_markdown_tables",
14 | "nbsphinx",
15 | "sphinx.ext.mathjax",
16 | "sphinx_copybutton",
17 | ]
18 |
19 | templates_path = ["_templates"]
20 |
21 | source_suffix = {".rst": "restructuredtext", ".md": "markdown"}
22 |
23 | master_doc = "index"
24 |
25 | project = "pyfolio"
26 | copyright = "2016, Quantopian, Inc."
27 | author = "Quantopian, Inc."
28 |
29 | release = version
30 | language = None
31 |
32 | exclude_patterns = []
33 |
34 | highlight_language = "python"
35 |
36 | pygments_style = "sphinx"
37 |
38 | todo_include_todos = False
39 |
40 | html_theme = "pydata_sphinx_theme"
41 | html_theme_path = pydata_sphinx_theme.get_html_theme_path()
42 |
43 | html_theme_options = {
44 | "github_url": "https://github.com/stefan-jansen/pyfolio-reloaded",
45 | "twitter_url": "https://twitter.com/ml4trading",
46 | "external_links": [
47 | {"name": "ML for Trading", "url": "https://ml4trading.io"},
48 | {"name": "Community", "url": "https://exchange.ml4trading.io"},
49 | ],
50 | "google_analytics_id": "UA-74956955-3",
51 | "use_edit_page_button": True,
52 | "favicons": [
53 | {
54 | "rel": "icon",
55 | "sizes": "16x16",
56 | "href": "assets/favicon16x16.ico",
57 | },
58 | {
59 | "rel": "icon",
60 | "sizes": "32x32",
61 | "href": "assets/favicon32x32.ico",
62 | },
63 | ],
64 | }
65 |
66 | html_context = {
67 | "github_url": "https://github.com",
68 | "github_user": "stefan-jansen",
69 | "github_repo": "pyfolio-reloaded",
70 | "github_version": "main",
71 | "doc_path": "docs/source",
72 | }
73 |
74 | html_static_path = []
75 |
76 | htmlhelp_basename = "Pyfoliodoc"
77 |
78 | latex_elements = {}
79 |
80 | latex_documents = [
81 | (
82 | master_doc,
83 | "Pyfolio.tex",
84 | "Pyfolio Documentation",
85 | "Quantopian, Inc.",
86 | "manual",
87 | )
88 | ]
89 |
90 | man_pages = [(master_doc, "pyfolio", "Pyfolio Documentation", [author], 1)]
91 |
92 | texinfo_documents = [
93 | (
94 | master_doc,
95 | "Pyfolio",
96 | "Pyfolio Documentation",
97 | author,
98 | "Pyfolio",
99 | "One line description of project.",
100 | "Miscellaneous",
101 | )
102 | ]
103 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. title:: pyfolio
2 |
3 | .. mdinclude:: ../../README.md
4 |
5 | .. toctree::
6 | :maxdepth: 4
7 |
8 | notebooks/single_stock_example
9 | notebooks/zipline_algo_example
10 | notebooks/round_trip_tear_sheet_example
11 | notebooks/sector_mappings_example
12 | api-reference
13 |
--------------------------------------------------------------------------------
/docs/source/notebooks/results.pickle:
--------------------------------------------------------------------------------
1 | ../../../pyfolio/examples/results.pickle
--------------------------------------------------------------------------------
/docs/source/notebooks/round_trip_tear_sheet_example.ipynb:
--------------------------------------------------------------------------------
1 | ../../../pyfolio/examples/round_trip_tear_sheet_example.ipynb
--------------------------------------------------------------------------------
/docs/source/notebooks/sector_mappings_example.ipynb:
--------------------------------------------------------------------------------
1 | ../../../pyfolio/examples/sector_mappings_example.ipynb
--------------------------------------------------------------------------------
/docs/source/notebooks/single_stock_example.ipynb:
--------------------------------------------------------------------------------
1 | ../../../pyfolio/examples/single_stock_example.ipynb
--------------------------------------------------------------------------------
/docs/source/notebooks/slippage_example.ipynb:
--------------------------------------------------------------------------------
1 | ../../../pyfolio/examples/slippage_example.ipynb
--------------------------------------------------------------------------------
/docs/source/notebooks/zipline_algo_example.ipynb:
--------------------------------------------------------------------------------
1 | ../../../pyfolio/examples/zipline_algo_example.ipynb
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: pyfolio
2 | repo_url: https://github.com/quantopian/pyfolio
3 | site_author: Quantopian Inc.
4 |
5 | pages:
6 | - Overview: 'index.md'
7 | - Releases: 'whatsnew.md'
8 | - Tutorial:
9 | - 'Single stock': 'notebooks/single_stock_example.md'
10 | - 'Zipline algorithm': 'notebooks/zipline_algo_example.md'
11 | - 'Sector analysis': 'notebooks/sector_mappings_example.md'
12 | - 'Round trip analysis': 'notebooks/round_trip_tear_sheet_example.md'
13 | - 'Slippage analysis': 'notebooks/slippage_example.md'
14 |
15 | extra_css: [extra.css]
16 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "pyfolio-reloaded"
3 | description = "Performance and risk analysis of financial portfolios with Python"
4 |
5 | requires-python = '>=3.9'
6 | dynamic = ["version"]
7 | readme = "README.md"
8 | authors = [
9 | { name = 'Quantopian Inc' },
10 | { email = 'pm@ml4trading.io' }
11 | ]
12 | maintainers = [
13 | { name = 'Stefan Jansen' },
14 | { email = 'pm@ml4trading.io' }
15 | ]
16 | license = { file = "LICENSE" }
17 |
18 | classifiers = [
19 | 'Development Status :: 4 - Beta',
20 | 'License :: OSI Approved :: Apache Software License',
21 | 'Natural Language :: English',
22 | 'Programming Language :: Python',
23 | 'Programming Language :: Python :: 3.9',
24 | 'Programming Language :: Python :: 3.10',
25 | 'Programming Language :: Python :: 3.11',
26 | 'Programming Language :: Python :: 3.12',
27 | 'Programming Language :: Python :: 3.13',
28 | 'Operating System :: OS Independent',
29 | 'Intended Audience :: Science/Research',
30 | 'Topic :: Office/Business :: Financial :: Investment',
31 | 'Topic :: Scientific/Engineering :: Information Analysis',
32 | ]
33 |
34 | dependencies = [
35 | # following pandas
36 | "numpy>=1.23.5; python_version<'3.12'",
37 | "numpy>=1.26.0; python_version>='3.12'",
38 | "pandas >=1.5.0,<3.0",
39 | "ipython >=3.2.3",
40 | "matplotlib >=1.4.0",
41 | "pytz >=2014.10",
42 | "scipy >=0.14.0",
43 | "scikit-learn >=0.16.1",
44 | "seaborn >=0.7.1",
45 | "empyrical-reloaded >=0.5.9",
46 | ]
47 |
48 | [project.urls]
49 | homepage = 'https://ml4trading.io'
50 | repository = 'https://github.com/stefan-jansen/pyfolio-reloaded'
51 | documentation = 'https://pyfolio.ml4trading.io'
52 |
53 | [build-system]
54 | requires = [
55 | 'setuptools>=54.0.0',
56 | "setuptools_scm[toml]>=6.2",
57 | 'wheel>=0.31.0',
58 | 'oldest-supported-numpy; python_version>="3.8"',
59 | ]
60 | build-backend = 'setuptools.build_meta'
61 |
62 | [project.optional-dependencies]
63 | test = [
64 | "tox >=2.3.1",
65 | "coverage >=4.0.3",
66 | "coveralls ==3.0.1",
67 | "pytest >=6.2",
68 | 'pytest-xdist >=2.5.0',
69 | "pytest-cov >=2.12",
70 | "parameterized >=0.6.1",
71 | "pytest-rerunfailures",
72 | "flake8 >=3.9.1",
73 | "black",
74 | ]
75 | dev = [
76 | "flake8 >=3.9.1",
77 | "black",
78 | "pre-commit >=2.12.1",
79 | ]
80 | docs = [
81 | 'Cython',
82 | 'Sphinx >=1.3.2',
83 | 'numpydoc >=0.5.0',
84 | 'sphinx-autobuild >=0.6.0',
85 | 'pydata-sphinx-theme',
86 | 'sphinx-markdown-tables',
87 | "sphinx_copybutton",
88 | 'm2r2'
89 | ]
90 |
91 | [tool.setuptools]
92 | include-package-data = true
93 | zip-safe = false
94 |
95 | [tool.setuptools.packages.find]
96 | where = ['src']
97 | exclude = ['tests*']
98 |
99 | [tool.setuptools_scm]
100 | write_to = "src/pyfolio/_version.py"
101 | version_scheme = 'guess-next-dev'
102 | local_scheme = 'dirty-tag'
103 |
104 |
105 | [tool.pytest.ini_options]
106 | pythonpath = ['src']
107 | minversion = "6.0"
108 | testpaths = 'tests'
109 | addopts = '-v'
110 |
111 |
112 | [tool.cibuildwheel]
113 | test-extras = "test"
114 | test-command = "pytest -n 2 {package}/tests"
115 | build-verbosity = 3
116 |
117 |
118 | [tool.cibuildwheel.macos]
119 | archs = ["x86_64", "arm64", "universal2"]
120 | test-skip = ["*universal2:arm64"]
121 |
122 |
123 | [tool.cibuildwheel.linux]
124 | archs = ["auto64"]
125 | skip = "*musllinux*"
126 |
127 |
128 | [tool.black]
129 | line-length = 88
130 | target-version = ['py38', 'py39', 'py310']
131 | include = '\.pyi?$'
132 | extend-exclude = '''
133 | \(
134 | docs/source/conf.py
135 | \)
136 | '''
137 |
138 | [tool.tox]
139 | legacy_tox_ini = """
140 | [tox]
141 |
142 | envlist =
143 | py310-pandas{15,20,21,22}-numpy1
144 | py311-pandas{15,20,21,22}-numpy1
145 | py312-pandas{15,20,21,22}-numpy1
146 | py310-pandas222-numpy2{0,1,2}
147 | py311-pandas222-numpy2{0,1,2}
148 | py312-pandas222-numpy2{01,2}
149 | py313-pandas222-numpy2{1,2}
150 |
151 | isolated_build = True
152 | skip_missing_interpreters = True
153 | minversion = 3.23.0
154 |
155 | [gh-actions]
156 | python =
157 | 3.10: py310
158 | 3.11: py311
159 | 3.12: py312
160 | 3.13: py313
161 |
162 | [testenv]
163 | usedevelop = True
164 | setenv =
165 | MPLBACKEND = Agg
166 |
167 | changedir = tmp
168 | extras = test
169 | deps =
170 | pandas15: pandas>=1.5.0,<1.6
171 | pandas20: pandas>=2.0,<2.1
172 | pandas21: pandas>=2.1,<2.2
173 | pandas22: pandas>=2.2,<2.3
174 | pandas222: pandas>=2.2.2,<2.3
175 | numpy1: numpy>=1.23.5,<2.0
176 | numpy20: numpy>=2.0,<2.1
177 | numpy21: numpy>=2.1,<2.2
178 | numpy22: numpy>=2.2,<2.3
179 |
180 |
181 | commands =
182 | pytest -n 2 --cov={toxinidir}/src --cov-report term --cov-report=xml --cov-report=html:htmlcov {toxinidir}/tests
183 | """
184 |
--------------------------------------------------------------------------------
/src/pyfolio/__init__.py:
--------------------------------------------------------------------------------
1 | from . import capacity
2 | from . import interesting_periods
3 | from . import perf_attrib
4 | from . import pos
5 | from . import round_trips
6 | from . import timeseries
7 | from . import txn
8 | from . import utils
9 | from .plotting import * # noqa
10 | from .tears import * # noqa
11 |
12 | try:
13 | from ._version import version as __version__
14 | from ._version import version_tuple
15 | except ImportError:
16 | __version__ = "unknown version"
17 | version_tuple = (0, 0, "unknown version")
18 |
19 | __all__ = [
20 | "utils",
21 | "timeseries",
22 | "pos",
23 | "txn",
24 | "interesting_periods",
25 | "capacity",
26 | "round_trips",
27 | "perf_attrib",
28 | ]
29 |
--------------------------------------------------------------------------------
/src/pyfolio/capacity.py:
--------------------------------------------------------------------------------
1 | import empyrical as ep
2 | import numpy as np
3 | import pandas as pd
4 |
5 | from . import pos
6 |
7 |
8 | def daily_txns_with_bar_data(transactions, market_data):
9 | """
10 | Sums the absolute value of shares traded in each name on each day.
11 | Adds columns containing the closing price and total daily volume for
12 | each day-ticker combination.
13 |
14 | Parameters
15 | ----------
16 | transactions : pd.DataFrame
17 | Prices and amounts of executed trades. One row per trade.
18 | - See full explanation in tears.create_full_tear_sheet
19 | market_data : pd.DataFrame
20 | Daily market_data
21 | - DataFrame has a multi-index index, one level is dates and another is
22 | market_data contains volume & price, equities as columns
23 |
24 | Returns
25 | -------
26 | txn_daily : pd.DataFrame
27 | Daily totals for transacted shares in each traded name.
28 | price and volume columns for close price and daily volume for
29 | the corresponding ticker, respectively.
30 | """
31 |
32 | transactions.index.name = "date"
33 | txn_daily = pd.DataFrame(
34 | transactions.assign(amount=abs(transactions.amount))
35 | .groupby(["symbol", pd.Grouper(freq="D")])
36 | .sum()["amount"]
37 | )
38 |
39 | txn_daily["price"] = market_data.xs("price", level=1).unstack()
40 | txn_daily["volume"] = market_data.xs("volume", level=1).unstack()
41 |
42 | txn_daily = txn_daily.reset_index().set_index("date").sort_index().asfreq("D")
43 |
44 | return txn_daily
45 |
46 |
47 | def days_to_liquidate_positions(
48 | positions,
49 | market_data,
50 | max_bar_consumption=0.2,
51 | capital_base=1e6,
52 | mean_volume_window=5,
53 | ):
54 | """
55 | Compute the number of days that would have been required
56 | to fully liquidate each position on each day based on the
57 | trailing n day mean daily bar volume and a limit on the proportion
58 | of a daily bar that we are allowed to consume.
59 |
60 | This analysis uses portfolio allocations and a provided capital base
61 | rather than the dollar values in the positions DataFrame to remove the
62 | effect of compounding on days to liquidate. In other words, this function
63 | assumes that the net liquidation portfolio value will always remain
64 | constant at capital_base.
65 |
66 | Parameters
67 | ----------
68 | positions: pd.DataFrame
69 | Contains daily position values including cash
70 | - See full explanation in tears.create_full_tear_sheet
71 | market_data : pd.DataFrame
72 | Daily market_data
73 | - DataFrame has a multi-index index, one level is dates and another is
74 | market_data contains volume & price, equities as columns
75 | max_bar_consumption : float
76 | Max proportion of a daily bar that can be consumed in the
77 | process of liquidating a position.
78 | capital_base : integer
79 | Capital base multiplied by portfolio allocation to compute
80 | position value that needs liquidating.
81 | mean_volume_window : float
82 | Trailing window to use in mean volume calculation.
83 |
84 | Returns
85 | -------
86 | days_to_liquidate : pd.DataFrame
87 | Number of days required to fully liquidate daily positions.
88 | Datetime index, symbols as columns.
89 | """
90 |
91 | DV = market_data.xs("volume", level=1) * market_data.xs("price", level=1)
92 | roll_mean_dv = DV.rolling(window=mean_volume_window, center=False).mean().shift()
93 | roll_mean_dv = roll_mean_dv.replace(0, np.nan)
94 |
95 | positions_alloc = pos.get_percent_alloc(positions)
96 | positions_alloc = positions_alloc.drop("cash", axis=1)
97 |
98 | days_to_liquidate = (positions_alloc * capital_base) / (
99 | max_bar_consumption * roll_mean_dv
100 | )
101 |
102 | return days_to_liquidate.iloc[mean_volume_window:]
103 |
104 |
105 | def get_max_days_to_liquidate_by_ticker(
106 | positions,
107 | market_data,
108 | max_bar_consumption=0.2,
109 | capital_base=1e6,
110 | mean_volume_window=5,
111 | last_n_days=None,
112 | ):
113 | """
114 | Finds the longest estimated liquidation time for each traded
115 | name over the course of backtest (or last n days of the backtest).
116 |
117 | Parameters
118 | ----------
119 | positions: pd.DataFrame
120 | Contains daily position values including cash
121 | - See full explanation in tears.create_full_tear_sheet
122 | market_data : pd.DataFrame
123 | Daily market_data
124 | - DataFrame has a multi-index index, one level is dates and another is
125 | market_data contains volume & price, equities as columns
126 | max_bar_consumption : float
127 | Max proportion of a daily bar that can be consumed in the
128 | process of liquidating a position.
129 | capital_base : integer
130 | Capital base multiplied by portfolio allocation to compute
131 | position value that needs liquidating.
132 | mean_volume_window : float
133 | Trailing window to use in mean volume calculation.
134 | last_n_days : integer
135 | Compute for only the last n days of the passed backtest data.
136 |
137 | Returns
138 | -------
139 | days_to_liquidate : pd.DataFrame
140 | Max Number of days required to fully liquidate each traded name.
141 | Index of symbols. Columns for days_to_liquidate and the corresponding
142 | date and position_alloc on that day.
143 | """
144 |
145 | dtlp = days_to_liquidate_positions(
146 | positions,
147 | market_data,
148 | max_bar_consumption=max_bar_consumption,
149 | capital_base=capital_base,
150 | mean_volume_window=mean_volume_window,
151 | )
152 |
153 | if last_n_days is not None:
154 | dtlp = dtlp.loc[dtlp.index.max() - pd.Timedelta(days=last_n_days) :]
155 |
156 | pos_alloc = pos.get_percent_alloc(positions)
157 | pos_alloc = pos_alloc.drop("cash", axis=1)
158 |
159 | liq_desc = pd.DataFrame()
160 | liq_desc["days_to_liquidate"] = dtlp.unstack()
161 | liq_desc["pos_alloc_pct"] = pos_alloc.unstack() * 100
162 | liq_desc.index.set_names(["symbol", "date"], inplace=True)
163 |
164 | worst_liq = (
165 | liq_desc.reset_index()
166 | .sort_values("days_to_liquidate", ascending=False)
167 | .groupby("symbol")
168 | .first()
169 | )
170 |
171 | return worst_liq
172 |
173 |
174 | def get_low_liquidity_transactions(transactions, market_data, last_n_days=None):
175 | """
176 | For each traded name, find the daily transaction total that consumed
177 | the greatest proportion of available daily bar volume.
178 |
179 | Parameters
180 | ----------
181 | transactions : pd.DataFrame
182 | Prices and amounts of executed trades. One row per trade.
183 | - See full explanation in create_full_tear_sheet.
184 | market_data : pd.DataFrame
185 | Daily market_data
186 | - DataFrame has a multi-index index, one level is dates and another is
187 | market_data contains volume & price, equities as columns
188 | last_n_days : integer
189 | Compute for only the last n days of the passed backtest data.
190 | """
191 |
192 | txn_daily_w_bar = daily_txns_with_bar_data(transactions, market_data)
193 | txn_daily_w_bar.index.name = "date"
194 | txn_daily_w_bar = txn_daily_w_bar.reset_index()
195 |
196 | if last_n_days is not None:
197 | md = txn_daily_w_bar.date.max() - pd.Timedelta(days=last_n_days)
198 | txn_daily_w_bar = txn_daily_w_bar[txn_daily_w_bar.date > md]
199 |
200 | bar_consumption = txn_daily_w_bar.assign(
201 | max_pct_bar_consumed=txn_daily_w_bar.amount.div(txn_daily_w_bar.volume).mul(100)
202 | ).sort_values("max_pct_bar_consumed", ascending=False)
203 | max_bar_consumption = bar_consumption.groupby("symbol").first()
204 |
205 | return max_bar_consumption[["date", "max_pct_bar_consumed"]]
206 |
207 |
208 | def apply_slippage_penalty(
209 | returns,
210 | txn_daily,
211 | simulate_starting_capital,
212 | backtest_starting_capital,
213 | impact=0.1,
214 | ):
215 | """
216 | Applies quadratic volumeshare slippage model to daily returns based
217 | on the proportion of the observed historical daily bar dollar volume
218 | consumed by the strategy's trades. Scales the size of trades based
219 | on the ratio of the starting capital we wish to test to the starting
220 | capital of the passed backtest data.
221 |
222 | Parameters
223 | ----------
224 | returns : pd.Series
225 | Time series of daily returns.
226 | txn_daily : pd.Series
227 | Daily transaciton totals, closing price, and daily volume for
228 | each traded name. See price_volume_daily_txns for more details.
229 | simulate_starting_capital : integer
230 | capital at which we want to test
231 | backtest_starting_capital: capital base at which backtest was
232 | origionally run. impact: See Zipline volumeshare slippage model
233 | impact : float
234 | Scales the size of the slippage penalty.
235 |
236 | Returns
237 | -------
238 | adj_returns : pd.Series
239 | Slippage penalty adjusted daily returns.
240 | """
241 |
242 | mult = simulate_starting_capital / backtest_starting_capital
243 | simulate_traded_shares = abs(mult * txn_daily.amount)
244 | simulate_traded_dollars = txn_daily.price * simulate_traded_shares
245 | simulate_pct_volume_used = simulate_traded_shares / txn_daily.volume
246 |
247 | penalties = simulate_pct_volume_used**2 * impact * simulate_traded_dollars
248 |
249 | daily_penalty = penalties.resample("D").sum()
250 | daily_penalty = daily_penalty.reindex(returns.index).fillna(0)
251 |
252 | # Since we are scaling the numerator of the penalties linearly
253 | # by capital base, it makes the most sense to scale the denominator
254 | # similarly. In other words, since we aren't applying compounding to
255 | # simulate_traded_shares, we shouldn't apply compounding to pv.
256 | portfolio_value = (
257 | ep.cum_returns(returns, starting_value=backtest_starting_capital) * mult
258 | )
259 |
260 | adj_returns = returns - (daily_penalty / portfolio_value)
261 |
262 | return adj_returns
263 |
--------------------------------------------------------------------------------
/src/pyfolio/deprecate.py:
--------------------------------------------------------------------------------
1 | """Utilities for marking deprecated functions."""
2 | # Copyright 2018 Quantopian, Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import warnings
17 | from functools import wraps
18 |
19 |
20 | def deprecated(msg=None, stacklevel=2):
21 | """
22 | Used to mark a function as deprecated.
23 | Parameters
24 | ----------
25 | msg : str
26 | The message to display in the deprecation warning.
27 | stacklevel : int
28 | How far up the stack the warning needs to go, before
29 | showing the relevant calling lines.
30 | Usage
31 | -----
32 | @deprecated(msg='function_a is deprecated! Use function_b instead.')
33 | def function_a(*args, **kwargs):
34 | """
35 |
36 | def deprecated_dec(fn):
37 | @wraps(fn)
38 | def wrapper(*args, **kwargs):
39 | warnings.warn(
40 | msg or "Function %s is deprecated." % fn.__name__,
41 | category=DeprecationWarning,
42 | stacklevel=stacklevel,
43 | )
44 | return fn(*args, **kwargs)
45 |
46 | return wrapper
47 |
48 | return deprecated_dec
49 |
--------------------------------------------------------------------------------
/src/pyfolio/examples/results.pickle:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stefan-jansen/pyfolio-reloaded/6b55da7fce365e325046bb080cb13ced1822de95/src/pyfolio/examples/results.pickle
--------------------------------------------------------------------------------
/src/pyfolio/examples/round_trip_tear_sheet_example.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Round Trip Analysis"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "When evaluating the performance of an investing strategy, it is helpful to quantify the frequency, duration, and profitability of its independent bets, or \"round trip\" trades. A round trip trade is started when a new long or short position is opened and then later completely or partially closed out.\n",
15 | "\n",
16 | "The intent of the round trip tearsheet is to help differentiate strategies that profited off a few lucky trades from strategies that profited repeatedly from genuine alpha. Breaking down round trip profitability by traded name and sector can also help inform universe selection and identify exposure risks. For example, even if your equity curve looks robust, if only two securities in your universe of fifteen names contributed to overall profitability, you may have reason to question the logic of your strategy.\n",
17 | "\n",
18 | "To identify round trips, pyfolio reconstructs the complete portfolio based on the transactions that you pass in. When you make a trade, pyfolio checks if shares are already present in your portfolio purchased at a certain price. If there are, we compute the PnL, returns and duration of that round trip trade. In calculating round trips, pyfolio will also append position closing transactions at the last timestamp in the positions data. This closing transaction will cause the PnL from any open positions to realized as completed round trips."
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "## Imports & Settins"
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": 1,
31 | "metadata": {
32 | "ExecuteTime": {
33 | "end_time": "2021-09-07T02:43:48.874652Z",
34 | "start_time": "2021-09-07T02:43:48.864891Z"
35 | }
36 | },
37 | "source": [
38 | "# silence warnings\n",
39 | "import warnings\n",
40 | "warnings.filterwarnings('ignore')"
41 | ],
42 | "outputs": []
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": 2,
47 | "metadata": {
48 | "ExecuteTime": {
49 | "end_time": "2021-09-07T02:43:50.546591Z",
50 | "start_time": "2021-09-07T02:43:48.989776Z"
51 | }
52 | },
53 | "source": [
54 | "import pyfolio as pf\n",
55 | "%matplotlib inline\n",
56 | "import gzip\n",
57 | "import os\n",
58 | "import pandas as pd"
59 | ],
60 | "outputs": []
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "## Load Data"
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": 3,
72 | "metadata": {
73 | "ExecuteTime": {
74 | "end_time": "2021-09-07T02:43:51.697971Z",
75 | "start_time": "2021-09-07T02:43:51.616535Z"
76 | }
77 | },
78 | "source": [
79 | "transactions = pd.read_csv(gzip.open('../tests/test_data/test_txn.csv.gz'),\n",
80 | " index_col=0, parse_dates=True)\n",
81 | "positions = pd.read_csv(gzip.open('../tests/test_data/test_pos.csv.gz'),\n",
82 | " index_col=0, parse_dates=True)\n",
83 | "returns = pd.read_csv(gzip.open('../tests/test_data/test_returns.csv.gz'),\n",
84 | " index_col=0, parse_dates=True, header=None)[1]"
85 | ],
86 | "outputs": []
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "metadata": {},
91 | "source": [
92 | "## Add Sector Mapping"
93 | ]
94 | },
95 | {
96 | "cell_type": "code",
97 | "execution_count": 4,
98 | "metadata": {
99 | "ExecuteTime": {
100 | "end_time": "2021-09-07T02:43:52.439793Z",
101 | "start_time": "2021-09-07T02:43:52.437219Z"
102 | }
103 | },
104 | "source": [
105 | "# Optional: Sector mappings may be passed in as a dict or pd.Series. If a mapping is\n",
106 | "# provided, PnL from symbols with mappings will be summed to display profitability by sector.\n",
107 | "sect_map = {'COST': 'Consumer Goods', 'INTC':'Technology', 'CERN':'Healthcare', 'GPS':'Technology',\n",
108 | " 'MMM': 'Construction', 'DELL': 'Technology', 'AMD':'Technology'}"
109 | ],
110 | "outputs": []
111 | },
112 | {
113 | "cell_type": "markdown",
114 | "metadata": {},
115 | "source": [
116 | "## Run Round Trip Tear Sheet"
117 | ]
118 | },
119 | {
120 | "cell_type": "markdown",
121 | "metadata": {},
122 | "source": [
123 | "The easiest way to run the analysis is to call `pyfolio.create_round_trip_tear_sheet()`. Passing in a sector map is optional. You can also pass `round_trips=True` to `pyfolio.create_full_tear_sheet()` to have this be created along all the other analyses."
124 | ]
125 | },
126 | {
127 | "cell_type": "code",
128 | "execution_count": 5,
129 | "metadata": {
130 | "ExecuteTime": {
131 | "end_time": "2021-09-07T02:44:23.168045Z",
132 | "start_time": "2021-09-07T02:43:54.942707Z"
133 | },
134 | "scrolled": false
135 | },
136 | "source": [
137 | "pf.create_round_trip_tear_sheet(returns, positions, transactions, sector_mappings=sect_map)"
138 | ],
139 | "outputs": []
140 | },
141 | {
142 | "cell_type": "markdown",
143 | "metadata": {},
144 | "source": [
145 | "## Explore underlying functions"
146 | ]
147 | },
148 | {
149 | "cell_type": "markdown",
150 | "metadata": {},
151 | "source": [
152 | "Under the hood, several functions are being called. `extract_round_trips()` does the portfolio reconstruction and creates the round-trip trades."
153 | ]
154 | },
155 | {
156 | "cell_type": "code",
157 | "execution_count": 6,
158 | "metadata": {
159 | "ExecuteTime": {
160 | "end_time": "2021-09-07T02:44:28.339232Z",
161 | "start_time": "2021-09-07T02:44:23.171046Z"
162 | }
163 | },
164 | "source": [
165 | "rts = pf.round_trips.extract_round_trips(transactions, \n",
166 | " portfolio_value=positions.sum(axis='columns') / (returns + 1))"
167 | ],
168 | "outputs": []
169 | },
170 | {
171 | "cell_type": "code",
172 | "execution_count": 7,
173 | "metadata": {
174 | "ExecuteTime": {
175 | "end_time": "2021-09-07T02:44:28.358198Z",
176 | "start_time": "2021-09-07T02:44:28.340552Z"
177 | }
178 | },
179 | "source": [
180 | "rts.head()"
181 | ],
182 | "outputs": []
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": 8,
187 | "metadata": {
188 | "ExecuteTime": {
189 | "end_time": "2021-09-07T02:44:28.443008Z",
190 | "start_time": "2021-09-07T02:44:28.359105Z"
191 | }
192 | },
193 | "source": [
194 | "pf.round_trips.print_round_trip_stats(rts)"
195 | ],
196 | "outputs": []
197 | }
198 | ],
199 | "metadata": {
200 | "kernelspec": {
201 | "display_name": "Python 3",
202 | "language": "python",
203 | "name": "python3"
204 | },
205 | "language_info": {
206 | "codemirror_mode": {
207 | "name": "ipython",
208 | "version": 3
209 | },
210 | "file_extension": ".py",
211 | "mimetype": "text/x-python",
212 | "name": "python",
213 | "nbconvert_exporter": "python",
214 | "pygments_lexer": "ipython3",
215 | "version": "3.8.8"
216 | },
217 | "toc": {
218 | "base_numbering": 1,
219 | "nav_menu": {},
220 | "number_sections": true,
221 | "sideBar": true,
222 | "skip_h1_title": false,
223 | "title_cell": "Table of Contents",
224 | "title_sidebar": "Contents",
225 | "toc_cell": false,
226 | "toc_position": {},
227 | "toc_section_display": true,
228 | "toc_window_display": false
229 | }
230 | },
231 | "nbformat": 4,
232 | "nbformat_minor": 1
233 | }
234 |
--------------------------------------------------------------------------------
/src/pyfolio/interesting_periods.py:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2016 Quantopian, Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | """Generates a list of historical event dates that may have had
17 | significant impact on markets. See extract_interesting_date_ranges."""
18 |
19 | import datetime as dt
20 | from collections import OrderedDict
21 |
22 | import pandas as pd
23 |
24 | PERIODS = OrderedDict()
25 | # Dotcom bubble
26 | PERIODS["Dotcom"] = (
27 | pd.Timestamp("20000310", tzinfo=dt.timezone.utc),
28 | pd.Timestamp("20000910", tzinfo=dt.timezone.utc),
29 | )
30 |
31 | # Lehmann Brothers
32 | PERIODS["Lehman"] = (
33 | pd.Timestamp("20080801", tzinfo=dt.timezone.utc),
34 | pd.Timestamp("20081001", tzinfo=dt.timezone.utc),
35 | )
36 |
37 | # 9/11
38 | PERIODS["9/11"] = (
39 | pd.Timestamp("20010911", tzinfo=dt.timezone.utc),
40 | pd.Timestamp("20011011", tzinfo=dt.timezone.utc),
41 | )
42 |
43 | # 05/08/11 US down grade and European Debt Crisis 2011
44 | PERIODS["US downgrade/European Debt Crisis"] = (
45 | pd.Timestamp("20110805", tzinfo=dt.timezone.utc),
46 | pd.Timestamp("20110905", tzinfo=dt.timezone.utc),
47 | )
48 |
49 | # 16/03/11 Fukushima melt down 2011
50 | PERIODS["Fukushima"] = (
51 | pd.Timestamp("20110316", tzinfo=dt.timezone.utc),
52 | pd.Timestamp("20110416", tzinfo=dt.timezone.utc),
53 | )
54 |
55 | # 01/08/03 US Housing Bubble 2003
56 | PERIODS["US Housing"] = (
57 | pd.Timestamp("20030108", tzinfo=dt.timezone.utc),
58 | pd.Timestamp("20030208", tzinfo=dt.timezone.utc),
59 | )
60 |
61 | # 06/09/12 EZB IR Event 2012
62 | PERIODS["EZB IR Event"] = (
63 | pd.Timestamp("20120910", tzinfo=dt.timezone.utc),
64 | pd.Timestamp("20121010", tzinfo=dt.timezone.utc),
65 | )
66 |
67 | # August 2007, March and September of 2008, Q1 & Q2 2009,
68 | PERIODS["Aug07"] = (
69 | pd.Timestamp("20070801", tzinfo=dt.timezone.utc),
70 | pd.Timestamp("20070901", tzinfo=dt.timezone.utc),
71 | )
72 | PERIODS["Mar08"] = (
73 | pd.Timestamp("20080301", tzinfo=dt.timezone.utc),
74 | pd.Timestamp("20080401", tzinfo=dt.timezone.utc),
75 | )
76 | PERIODS["Sept08"] = (
77 | pd.Timestamp("20080901", tzinfo=dt.timezone.utc),
78 | pd.Timestamp("20081001", tzinfo=dt.timezone.utc),
79 | )
80 | PERIODS["2009Q1"] = (
81 | pd.Timestamp("20090101", tzinfo=dt.timezone.utc),
82 | pd.Timestamp("20090301", tzinfo=dt.timezone.utc),
83 | )
84 | PERIODS["2009Q2"] = (
85 | pd.Timestamp("20090301", tzinfo=dt.timezone.utc),
86 | pd.Timestamp("20090601", tzinfo=dt.timezone.utc),
87 | )
88 |
89 | # Flash Crash (May 6, 2010 + 1 week post),
90 | PERIODS["Flash Crash"] = (
91 | pd.Timestamp("20100505", tzinfo=dt.timezone.utc),
92 | pd.Timestamp("20100510", tzinfo=dt.timezone.utc),
93 | )
94 |
95 | # April and October 2014).
96 | PERIODS["Apr14"] = (
97 | pd.Timestamp("20140401", tzinfo=dt.timezone.utc),
98 | pd.Timestamp("20140501", tzinfo=dt.timezone.utc),
99 | )
100 | PERIODS["Oct14"] = (
101 | pd.Timestamp("20141001", tzinfo=dt.timezone.utc),
102 | pd.Timestamp("20141101", tzinfo=dt.timezone.utc),
103 | )
104 |
105 | # Market down-turn in August/Sept 2015
106 | PERIODS["Fall2015"] = (
107 | pd.Timestamp("20150815", tzinfo=dt.timezone.utc),
108 | pd.Timestamp("20150930", tzinfo=dt.timezone.utc),
109 | )
110 |
111 | # Market regimes
112 | PERIODS["Low Volatility Bull Market"] = (
113 | pd.Timestamp("20050101", tzinfo=dt.timezone.utc),
114 | pd.Timestamp("20070801", tzinfo=dt.timezone.utc),
115 | )
116 |
117 | PERIODS["GFC Crash"] = (
118 | pd.Timestamp("20070801", tzinfo=dt.timezone.utc),
119 | pd.Timestamp("20090401", tzinfo=dt.timezone.utc),
120 | )
121 |
122 | PERIODS["Recovery"] = (
123 | pd.Timestamp("20090401", tzinfo=dt.timezone.utc),
124 | pd.Timestamp("20130101", tzinfo=dt.timezone.utc),
125 | )
126 |
127 | PERIODS["New Normal"] = (
128 | pd.Timestamp("20130101", tzinfo=dt.timezone.utc),
129 | pd.Timestamp("20180921", tzinfo=dt.timezone.utc),
130 | )
131 |
132 | PERIODS["Covid"] = (
133 | pd.Timestamp("20200211", tzinfo=dt.timezone.utc),
134 | pd.Timestamp("today", tzinfo=dt.timezone.utc),
135 | )
136 |
--------------------------------------------------------------------------------
/src/pyfolio/ipycompat.py:
--------------------------------------------------------------------------------
1 | import IPython
2 |
3 | IPY_MAJOR = IPython.version_info[0]
4 | if IPY_MAJOR < 3:
5 | raise ImportError("IPython version %d is not supported." % IPY_MAJOR)
6 |
7 | IPY3 = IPY_MAJOR == 3
8 |
9 | # IPython underwent a major refactor between versions 3 and 4. Many of the
10 | # imports in version 4 have aliases to their old locations in 3, but they raise
11 | # noisy deprecation warnings. By conditionally importing here, we can support
12 | # older versions without triggering warnings for users on new versions.
13 | if IPY3:
14 | from IPython.nbformat import read
15 | else:
16 | from nbformat import read
17 |
18 |
19 | __all__ = ["read"]
20 |
--------------------------------------------------------------------------------
/src/pyfolio/pos.py:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2016 Quantopian, Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | import pandas as pd
16 | import numpy as np
17 | import warnings
18 |
19 | try:
20 | from zipline.assets import Equity, Future
21 |
22 | ZIPLINE = True
23 | except ImportError:
24 | ZIPLINE = False
25 | warnings.warn(
26 | 'Module "zipline.assets" not found; multipliers will not be applied'
27 | " to position notionals."
28 | )
29 |
30 |
31 | def get_percent_alloc(values):
32 | """
33 | Determines a portfolio's allocations.
34 |
35 | Parameters
36 | ----------
37 | values : pd.DataFrame
38 | Contains position values or amounts.
39 |
40 | Returns
41 | -------
42 | allocations : pd.DataFrame
43 | Positions and their allocations.
44 | """
45 |
46 | return values.divide(values.sum(axis="columns"), axis="rows")
47 |
48 |
49 | def get_top_long_short_abs(positions, top=10):
50 | """
51 | Finds the top long, short, and absolute positions.
52 |
53 | Parameters
54 | ----------
55 | positions : pd.DataFrame
56 | The positions that the strategy takes over time.
57 | top : int, optional
58 | How many of each to find (default 10).
59 |
60 | Returns
61 | -------
62 | df_top_long : pd.DataFrame
63 | Top long positions.
64 | df_top_short : pd.DataFrame
65 | Top short positions.
66 | df_top_abs : pd.DataFrame
67 | Top absolute positions.
68 | """
69 |
70 | positions = positions.drop("cash", axis="columns")
71 | df_max = positions.max()
72 | df_min = positions.min()
73 | df_abs_max = positions.abs().max()
74 | df_top_long = df_max[df_max > 0].nlargest(top)
75 | df_top_short = df_min[df_min < 0].nsmallest(top)
76 | df_top_abs = df_abs_max.nlargest(top)
77 | return df_top_long, df_top_short, df_top_abs
78 |
79 |
80 | def get_max_median_position_concentration(positions):
81 | """
82 | Finds the max and median long and short position concentrations
83 | in each time period specified by the index of positions.
84 |
85 | Parameters
86 | ----------
87 | positions : pd.DataFrame
88 | The positions that the strategy takes over time.
89 |
90 | Returns
91 | -------
92 | pd.DataFrame
93 | Columns are max long, max short, median long, and median short
94 | position concentrations. Rows are timeperiods.
95 | """
96 |
97 | expos = get_percent_alloc(positions)
98 | expos = expos.drop("cash", axis=1)
99 |
100 | longs = expos.where(expos.applymap(lambda x: x > 0))
101 | shorts = expos.where(expos.applymap(lambda x: x < 0))
102 |
103 | alloc_summary = pd.DataFrame()
104 | alloc_summary["max_long"] = longs.max(axis=1)
105 | alloc_summary["median_long"] = longs.median(axis=1)
106 | alloc_summary["median_short"] = shorts.median(axis=1)
107 | alloc_summary["max_short"] = shorts.min(axis=1)
108 |
109 | return alloc_summary
110 |
111 |
112 | def extract_pos(positions, cash):
113 | """
114 | Extract position values from backtest object as returned by
115 | get_backtest() on the Quantopian research platform.
116 |
117 | Parameters
118 | ----------
119 | positions : pd.DataFrame
120 | timeseries containing one row per symbol (and potentially
121 | duplicate datetime indices) and columns for amount and
122 | last_sale_price.
123 | cash : pd.Series
124 | timeseries containing cash in the portfolio.
125 |
126 | Returns
127 | -------
128 | pd.DataFrame
129 | Daily net position values.
130 | - See full explanation in tears.create_full_tear_sheet.
131 | """
132 |
133 | positions = positions.copy()
134 | positions["values"] = positions.amount * positions.last_sale_price
135 |
136 | cash.name = "cash"
137 |
138 | values = positions.reset_index().pivot_table(
139 | index="index", columns="sid", values="values"
140 | )
141 |
142 | if ZIPLINE:
143 | for asset in values.columns:
144 | if type(asset) in [Equity, Future]:
145 | values[asset] = values[asset] * asset.price_multiplier
146 |
147 | values = values.join(cash).fillna(0)
148 |
149 | # NOTE: Set name of DataFrame.columns to sid, to match the behavior
150 | # of DataFrame.join in earlier versions of pandas.
151 | values.columns.name = "sid"
152 |
153 | return values
154 |
155 |
156 | def get_sector_exposures(positions, symbol_sector_map):
157 | """
158 | Sum position exposures by sector.
159 |
160 | Parameters
161 | ----------
162 | positions : pd.DataFrame
163 | Contains position values or amounts.
164 | - Example
165 | index 'AAPL' 'MSFT' 'CHK' cash
166 | 2004-01-09 13939.380 -15012.993 -403.870 1477.483
167 | 2004-01-12 14492.630 -18624.870 142.630 3989.610
168 | 2004-01-13 -13853.280 13653.640 -100.980 100.000
169 | symbol_sector_map : dict or pd.Series
170 | Security identifier to sector mapping.
171 | Security ids as keys/index, sectors as values.
172 | - Example:
173 | {'AAPL' : 'Technology'
174 | 'MSFT' : 'Technology'
175 | 'CHK' : 'Natural Resources'}
176 |
177 | Returns
178 | -------
179 | sector_exp : pd.DataFrame
180 | Sectors and their allocations.
181 | - Example:
182 | index 'Technology' 'Natural Resources' cash
183 | 2004-01-09 -1073.613 -403.870 1477.4830
184 | 2004-01-12 -4132.240 142.630 3989.6100
185 | 2004-01-13 -199.640 -100.980 100.0000
186 | """
187 |
188 | cash = positions["cash"]
189 | positions = positions.drop("cash", axis=1)
190 |
191 | unmapped_pos = np.setdiff1d(
192 | positions.columns.values, list(symbol_sector_map.keys())
193 | )
194 | if len(unmapped_pos) > 0:
195 | warn_message = """Warning: Symbols {} have no sector mapping.
196 | They will not be included in sector allocations""".format(
197 | ", ".join(map(str, unmapped_pos))
198 | )
199 | warnings.warn(warn_message, UserWarning)
200 |
201 | sector_exp = positions.groupby(by=symbol_sector_map, axis=1).sum()
202 |
203 | sector_exp["cash"] = cash
204 |
205 | return sector_exp
206 |
207 |
208 | def get_long_short_pos(positions):
209 | """
210 | Determines the long and short allocations in a portfolio.
211 |
212 | Parameters
213 | ----------
214 | positions : pd.DataFrame
215 | The positions that the strategy takes over time.
216 |
217 | Returns
218 | -------
219 | df_long_short : pd.DataFrame
220 | Long and short allocations as a decimal
221 | percentage of the total net liquidation
222 | """
223 |
224 | pos_wo_cash = positions.drop("cash", axis=1)
225 | longs = pos_wo_cash[pos_wo_cash > 0].sum(axis=1).fillna(0)
226 | shorts = pos_wo_cash[pos_wo_cash < 0].sum(axis=1).fillna(0)
227 | cash = positions.cash
228 | net_liquidation = longs + shorts + cash
229 | df_pos = pd.DataFrame(
230 | {
231 | "long": longs.divide(net_liquidation, axis="index"),
232 | "short": shorts.divide(net_liquidation, axis="index"),
233 | }
234 | )
235 | df_pos["net exposure"] = df_pos["long"] + df_pos["short"]
236 | return df_pos
237 |
--------------------------------------------------------------------------------
/src/pyfolio/round_trips.py:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2016 Quantopian, Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | import warnings
16 | from collections import deque, OrderedDict
17 | from math import copysign
18 |
19 | import numpy as np
20 | import pandas as pd
21 |
22 | from .utils import print_table, format_asset
23 |
24 | PNL_STATS = [
25 | ("Total profit", lambda x: x.sum()),
26 | ("Gross profit", lambda x: x[x > 0].sum()),
27 | ("Gross loss", lambda x: x[x < 0].sum()),
28 | (
29 | "Profit factor",
30 | lambda x: (
31 | x[x > 0].sum() / x[x < 0].abs().sum()
32 | if x[x < 0].abs().sum() != 0
33 | else np.nan
34 | ),
35 | ),
36 | ("Avg. trade net profit", "mean"),
37 | ("Avg. winning trade", lambda x: x[x > 0].mean()),
38 | ("Avg. losing trade", lambda x: x[x < 0].mean()),
39 | (
40 | "Ratio Avg. Win:Avg. Loss",
41 | lambda x: (
42 | x[x > 0].mean() / x[x < 0].abs().mean()
43 | if x[x < 0].abs().mean() != 0
44 | else np.nan
45 | ),
46 | ),
47 | ("Largest winning trade", "max"),
48 | ("Largest losing trade", "min"),
49 | ]
50 |
51 | SUMMARY_STATS = [
52 | ("Total number of round_trips", "count"),
53 | ("Percent profitable", lambda x: len(x[x > 0]) / float(len(x))),
54 | ("Winning round_trips", lambda x: len(x[x > 0])),
55 | ("Losing round_trips", lambda x: len(x[x < 0])),
56 | ("Even round_trips", lambda x: len(x[x == 0])),
57 | ]
58 |
59 | RETURN_STATS = [
60 | ("Avg returns all round_trips", lambda x: x.mean()),
61 | ("Avg returns winning", lambda x: x[x > 0].mean()),
62 | ("Avg returns losing", lambda x: x[x < 0].mean()),
63 | ("Median returns all round_trips", lambda x: x.median()),
64 | ("Median returns winning", lambda x: x[x > 0].median()),
65 | ("Median returns losing", lambda x: x[x < 0].median()),
66 | ("Largest winning trade", "max"),
67 | ("Largest losing trade", "min"),
68 | ]
69 |
70 | DURATION_STATS = [
71 | ("Avg duration", lambda x: x.mean()),
72 | ("Median duration", lambda x: x.median()),
73 | ("Longest duration", lambda x: x.max()),
74 | ("Shortest duration", lambda x: x.min()),
75 | # FIXME: Instead of x.max() - x.min() this should be
76 | # rts.close_dt.max() - rts.open_dt.min() which is not
77 | # available here. As it would require a new approach here
78 | # that passes in multiple fields we disable these measures
79 | # for now.
80 | # ('Avg # round_trips per day', lambda x: float(len(x)) /
81 | # (x.max() - x.min()).days),
82 | # ('Avg # round_trips per month', lambda x: float(len(x)) /
83 | # (((x.max() - x.min()).days) / APPROX_BDAYS_PER_MONTH)),
84 | ]
85 |
86 |
87 | def agg_all_long_short(round_trips, col, stats_dict):
88 | stats_all = (
89 | round_trips.assign(ones=1)
90 | .groupby("ones")[col]
91 | .agg(stats_dict)
92 | .T.rename(columns={1.0: "All trades"})
93 | )
94 | stats_long_short = (
95 | round_trips.groupby("long")[col]
96 | .agg(stats_dict)
97 | .T.rename(columns={False: "Short trades", True: "Long trades"})
98 | )
99 |
100 | return stats_all.join(stats_long_short)
101 |
102 |
103 | def _groupby_consecutive(txn, max_delta=pd.Timedelta("8h")):
104 | """Merge transactions of the same direction separated by less than
105 | max_delta time duration.
106 |
107 | Parameters
108 | ----------
109 | transactions : pd.DataFrame
110 | Prices and amounts of executed round_trips. One row per trade.
111 | - See full explanation in tears.create_full_tear_sheet
112 |
113 | max_delta : pandas.Timedelta (optional)
114 | Merge transactions in the same direction separated by less
115 | than max_delta time duration.
116 |
117 |
118 | Returns
119 | -------
120 | transactions : pd.DataFrame
121 |
122 | """
123 |
124 | def vwap(transaction):
125 | if transaction.amount.sum() == 0:
126 | warnings.warn("Zero transacted shares, setting vwap to nan.")
127 | return np.nan
128 | return (transaction.amount * transaction.price).sum() / transaction.amount.sum()
129 |
130 | out = []
131 | for _, t in txn.groupby("symbol"):
132 | t = t.sort_index()
133 | t.index.name = "dt"
134 | t = t.reset_index()
135 |
136 | t["order_sign"] = t.amount > 0
137 | t["block_dir"] = (t.order_sign.shift(1) != t.order_sign).astype(int).cumsum()
138 | t["block_time"] = ((t.dt.sub(t.dt.shift(1))) > max_delta).astype(int).cumsum()
139 | grouped_price = t.groupby(["block_dir", "block_time"]).apply(vwap)
140 | grouped_price.name = "price"
141 | grouped_rest = t.groupby(["block_dir", "block_time"]).agg(
142 | {"amount": "sum", "symbol": "first", "dt": "first"}
143 | )
144 |
145 | grouped = grouped_rest.join(grouped_price)
146 |
147 | out.append(grouped)
148 |
149 | out = pd.concat(out)
150 | out = out.set_index("dt")
151 | return out
152 |
153 |
154 | def extract_round_trips(transactions, portfolio_value=None):
155 | """Group transactions into "round trips". First, transactions are
156 | grouped by day and directionality. Then, long and short
157 | transactions are matched to create round-trip round_trips for which
158 | PnL, duration and returns are computed. Crossings where a position
159 | changes from long to short and vice-versa are handled correctly.
160 |
161 | Under the hood, we reconstruct the individual shares in a
162 | portfolio over time and match round_trips in a FIFO-order.
163 |
164 | For example, the following transactions would constitute one round trip:
165 | index amount price symbol
166 | 2004-01-09 12:18:01 10 50 'AAPL'
167 | 2004-01-09 15:12:53 10 100 'AAPL'
168 | 2004-01-13 14:41:23 -10 100 'AAPL'
169 | 2004-01-13 15:23:34 -10 200 'AAPL'
170 |
171 | First, the first two and last two round_trips will be merged into two
172 | single transactions (computing the price via vwap). Then, during
173 | the portfolio reconstruction, the two resulting transactions will
174 | be merged and result in 1 round-trip trade with a PnL of
175 | (150 * 20) - (75 * 20) = 1500.
176 |
177 | Note, that round trips do not have to close out positions
178 | completely. For example, we could have removed the last
179 | transaction in the example above and still generated a round-trip
180 | over 10 shares with 10 shares left in the portfolio to be matched
181 | with a later transaction.
182 |
183 | Parameters
184 | ----------
185 | transactions : pd.DataFrame
186 | Prices and amounts of executed round_trips. One row per trade.
187 | - See full explanation in tears.create_full_tear_sheet
188 |
189 | portfolio_value : pd.Series (optional)
190 | Portfolio value (all net assets including cash) over time.
191 | Note that portfolio_value needs to beginning of day, so either
192 | use .shift() or positions.sum(axis='columns') / (1+returns).
193 |
194 | Returns
195 | -------
196 | round_trips : pd.DataFrame
197 | DataFrame with one row per round trip. The returns column
198 | contains returns in respect to the portfolio value while
199 | rt_returns are the returns regarding the invested capital
200 | into that particular round-trip.
201 | """
202 |
203 | transactions = _groupby_consecutive(transactions)
204 | roundtrips = []
205 |
206 | for sym, trans_sym in transactions.groupby("symbol"):
207 | trans_sym = trans_sym.sort_index()
208 | price_stack = deque()
209 | dt_stack = deque()
210 | trans_sym["signed_price"] = trans_sym.price * np.sign(trans_sym.amount)
211 | trans_sym["abs_amount"] = trans_sym.amount.abs().astype(int)
212 | for dt, t in trans_sym.iterrows():
213 | if t.price < 0:
214 | warnings.warn("Negative price detected, ignoring for" "round-trip.")
215 | continue
216 |
217 | indiv_prices = [t.signed_price] * t.abs_amount
218 | if (len(price_stack) == 0) or (
219 | copysign(1, price_stack[-1]) == copysign(1, t.amount)
220 | ):
221 | price_stack.extend(indiv_prices)
222 | dt_stack.extend([dt] * len(indiv_prices))
223 | else:
224 | # Close round-trip
225 | pnl = 0
226 | invested = 0
227 | cur_open_dts = []
228 |
229 | for price in indiv_prices:
230 | if len(price_stack) != 0 and (
231 | copysign(1, price_stack[-1]) != copysign(1, price)
232 | ):
233 | # Retrieve first dt, stock-price pair from
234 | # stack
235 | prev_price = price_stack.popleft()
236 | prev_dt = dt_stack.popleft()
237 |
238 | pnl += -(price + prev_price)
239 | cur_open_dts.append(prev_dt)
240 | invested += abs(prev_price)
241 |
242 | else:
243 | # Push additional stock-prices onto stack
244 | price_stack.append(price)
245 | dt_stack.append(dt)
246 |
247 | roundtrips.append(
248 | {
249 | "pnl": pnl,
250 | "open_dt": cur_open_dts[0],
251 | "close_dt": dt,
252 | "long": price < 0,
253 | "rt_returns": pnl / invested,
254 | "symbol": sym,
255 | }
256 | )
257 |
258 | roundtrips = pd.DataFrame(roundtrips)
259 |
260 | roundtrips["duration"] = roundtrips["close_dt"].sub(roundtrips["open_dt"])
261 |
262 | if portfolio_value is not None:
263 | # Need to normalize so that we can join
264 | pv = pd.DataFrame(portfolio_value, columns=["portfolio_value"]).assign(
265 | date=portfolio_value.index
266 | )
267 |
268 | roundtrips["date"] = roundtrips.close_dt.apply(
269 | lambda x: x.replace(hour=0, minute=0, second=0)
270 | )
271 |
272 | tmp = (
273 | roundtrips.set_index("date")
274 | .join(pv.set_index("date"), lsuffix="_")
275 | .reset_index()
276 | )
277 |
278 | roundtrips["returns"] = tmp.pnl / tmp.portfolio_value
279 | roundtrips = roundtrips.drop("date", axis="columns")
280 |
281 | return roundtrips
282 |
283 |
284 | def add_closing_transactions(positions, transactions):
285 | """
286 | Appends transactions that close out all positions at the end of
287 | the timespan covered by positions data. Utilizes pricing information
288 | in the positions DataFrame to determine closing price.
289 |
290 | Parameters
291 | ----------
292 | positions : pd.DataFrame
293 | The positions that the strategy takes over time.
294 | transactions : pd.DataFrame
295 | Prices and amounts of executed round_trips. One row per trade.
296 | - See full explanation in tears.create_full_tear_sheet
297 |
298 | Returns
299 | -------
300 | closed_txns : pd.DataFrame
301 | Transactions with closing transactions appended.
302 | """
303 |
304 | closed_txns = transactions[["symbol", "amount", "price"]]
305 |
306 | pos_at_end = positions.drop("cash", axis=1).iloc[-1]
307 | open_pos = pos_at_end.replace(0, np.nan).dropna()
308 | # Add closing round_trips one second after the close to be sure
309 | # they don't conflict with other round_trips executed at that time.
310 | end_dt = open_pos.name + pd.Timedelta(seconds=1)
311 |
312 | for sym, ending_val in open_pos.items():
313 | txn_sym = transactions[transactions.symbol == sym]
314 |
315 | ending_amount = txn_sym.amount.sum()
316 |
317 | ending_price = ending_val / ending_amount
318 | closing_txn = OrderedDict(
319 | [
320 | ("amount", -ending_amount),
321 | ("price", ending_price),
322 | ("symbol", sym),
323 | ]
324 | )
325 |
326 | closing_txn = pd.DataFrame(closing_txn, index=[end_dt])
327 | closed_txns = pd.concat([closed_txns, closing_txn])
328 |
329 | closed_txns = closed_txns[closed_txns.amount != 0]
330 |
331 | return closed_txns
332 |
333 |
334 | def apply_sector_mappings_to_round_trips(round_trips, sector_mappings):
335 | """
336 | Translates round trip symbols to sectors.
337 |
338 | Parameters
339 | ----------
340 | round_trips : pd.DataFrame
341 | DataFrame with one row per round trip trade.
342 | - See full explanation in round_trips.extract_round_trips
343 | sector_mappings : dict or pd.Series, optional
344 | Security identifier to sector mapping.
345 | Security ids as keys, sectors as values.
346 |
347 | Returns
348 | -------
349 | sector_round_trips : pd.DataFrame
350 | Round trips with symbol names replaced by sector names.
351 | """
352 |
353 | sector_round_trips = round_trips.copy()
354 | sector_round_trips.symbol = sector_round_trips.symbol.apply(
355 | lambda x: sector_mappings.get(x, "No Sector Mapping")
356 | )
357 | sector_round_trips = sector_round_trips.dropna(axis=0)
358 |
359 | return sector_round_trips
360 |
361 |
362 | def gen_round_trip_stats(round_trips):
363 | """Generate various round-trip statistics.
364 |
365 | Parameters
366 | ----------
367 | round_trips : pd.DataFrame
368 | DataFrame with one row per round trip trade.
369 | - See full explanation in round_trips.extract_round_trips
370 |
371 | Returns
372 | -------
373 | stats : dict
374 | A dictionary where each value is a pandas DataFrame containing
375 | various round-trip statistics.
376 |
377 | See also
378 | --------
379 | round_trips.print_round_trip_stats
380 | """
381 |
382 | stats = {}
383 | stats["pnl"] = agg_all_long_short(round_trips, "pnl", PNL_STATS)
384 | stats["summary"] = agg_all_long_short(round_trips, "pnl", SUMMARY_STATS)
385 | stats["duration"] = agg_all_long_short(round_trips, "duration", DURATION_STATS)
386 | stats["returns"] = agg_all_long_short(round_trips, "returns", RETURN_STATS)
387 |
388 | stats["symbols"] = round_trips.groupby("symbol")["returns"].agg(RETURN_STATS).T
389 |
390 | return stats
391 |
392 |
393 | def print_round_trip_stats(round_trips, hide_pos=False):
394 | """Print various round-trip statistics. Tries to pretty-print tables
395 | with HTML output if run inside IPython NB.
396 |
397 | Parameters
398 | ----------
399 | round_trips : pd.DataFrame
400 | DataFrame with one row per round trip trade.
401 | - See full explanation in round_trips.extract_round_trips
402 |
403 | See also
404 | --------
405 | round_trips.gen_round_trip_stats
406 | """
407 |
408 | stats = gen_round_trip_stats(round_trips)
409 |
410 | print_table(stats["summary"], float_format="{:.2f}".format, name="Summary stats")
411 | print_table(stats["pnl"], float_format="${:.2f}".format, name="PnL stats")
412 | print_table(stats["duration"], float_format="{:.2f}".format, name="Duration stats")
413 | print_table(
414 | stats["returns"] * 100,
415 | float_format="{:.2f}%".format,
416 | name="Return stats",
417 | )
418 |
419 | if not hide_pos:
420 | stats["symbols"].columns = stats["symbols"].columns.map(format_asset)
421 | print_table(
422 | stats["symbols"] * 100,
423 | float_format="{:.2f}%".format,
424 | name="Symbol stats",
425 | )
426 |
--------------------------------------------------------------------------------
/src/pyfolio/txn.py:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2016 Quantopian, Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | import pandas as pd
16 |
17 |
18 | def map_transaction(txn):
19 | """
20 | Maps a single transaction row to a dictionary.
21 |
22 | Parameters
23 | ----------
24 | txn : pd.DataFrame
25 | A single transaction object to convert to a dictionary.
26 |
27 | Returns
28 | -------
29 | dict
30 | Mapped transaction.
31 | """
32 |
33 | if isinstance(txn["sid"], dict):
34 | sid = txn["sid"]["sid"]
35 | symbol = txn["sid"]["symbol"]
36 | else:
37 | sid = txn["sid"]
38 | symbol = txn["sid"]
39 |
40 | return {
41 | "sid": sid,
42 | "symbol": symbol,
43 | "price": txn["price"],
44 | "order_id": txn["order_id"],
45 | "amount": txn["amount"],
46 | "commission": txn["commission"],
47 | "dt": txn["dt"],
48 | }
49 |
50 |
51 | def make_transaction_frame(transactions):
52 | """
53 | Formats a transaction DataFrame.
54 |
55 | Parameters
56 | ----------
57 | transactions : pd.DataFrame
58 | Contains improperly formatted transactional data.
59 |
60 | Returns
61 | -------
62 | df : pd.DataFrame
63 | Daily transaction volume and dollar ammount.
64 | - See full explanation in tears.create_full_tear_sheet.
65 | """
66 |
67 | transaction_list = []
68 | for dt in transactions.index:
69 | txns = transactions.loc[dt]
70 | if len(txns) == 0:
71 | continue
72 |
73 | for txn in txns:
74 | txn = map_transaction(txn)
75 | transaction_list.append(txn)
76 | df = pd.DataFrame(sorted(transaction_list, key=lambda x: x["dt"]))
77 | df["txn_dollars"] = -df["amount"] * df["price"]
78 |
79 | df.index = list(map(pd.Timestamp, df.dt.values))
80 | return df
81 |
82 |
83 | def get_txn_vol(transactions):
84 | """
85 | Extract daily transaction data from set of transaction objects.
86 |
87 | Parameters
88 | ----------
89 | transactions : pd.DataFrame
90 | Time series containing one row per symbol (and potentially
91 | duplicate datetime indices) and columns for amount and
92 | price.
93 |
94 | Returns
95 | -------
96 | pd.DataFrame
97 | Daily transaction volume and number of shares.
98 | - See full explanation in tears.create_full_tear_sheet.
99 | """
100 |
101 | txn_norm = transactions.copy()
102 | txn_norm.index = txn_norm.index.normalize()
103 | amounts = txn_norm.amount.abs()
104 | prices = txn_norm.price
105 | values = amounts * prices
106 | daily_amounts = amounts.groupby(amounts.index).sum()
107 | daily_values = values.groupby(values.index).sum()
108 | daily_amounts.name = "txn_shares"
109 | daily_values.name = "txn_volume"
110 | return pd.concat([daily_values, daily_amounts], axis=1)
111 |
112 |
113 | def adjust_returns_for_slippage(returns, positions, transactions, slippage_bps):
114 | """
115 | Apply a slippage penalty for every dollar traded.
116 |
117 | Parameters
118 | ----------
119 | returns : pd.Series
120 | Daily returns of the strategy, noncumulative.
121 | - See full explanation in create_full_tear_sheet.
122 | positions : pd.DataFrame
123 | Daily net position values.
124 | - See full explanation in create_full_tear_sheet.
125 | transactions : pd.DataFrame
126 | Prices and amounts of executed trades. One row per trade.
127 | - See full explanation in create_full_tear_sheet.
128 | slippage_bps: int/float
129 | Basis points of slippage to apply.
130 |
131 | Returns
132 | -------
133 | pd.Series
134 | Time series of daily returns, adjusted for slippage.
135 | """
136 |
137 | slippage = 0.0001 * slippage_bps
138 | portfolio_value = positions.sum(axis=1)
139 | pnl = portfolio_value * returns
140 | traded_value = get_txn_vol(transactions).txn_volume
141 | slippage_dollars = traded_value * slippage
142 | adjusted_pnl = pnl.add(-slippage_dollars, fill_value=0)
143 | adjusted_returns = returns * adjusted_pnl / pnl
144 |
145 | return adjusted_returns
146 |
147 |
148 | def get_turnover(positions, transactions, denominator="AGB"):
149 | """
150 | - Value of purchases and sales divided by either the actual
151 | gross book or the portfolio value for the time step.
152 |
153 | Parameters
154 | ----------
155 | positions : pd.DataFrame
156 | Contains daily position values including cash.
157 | - See full explanation in tears.create_full_tear_sheet
158 | transactions : pd.DataFrame
159 | Prices and amounts of executed trades. One row per trade.
160 | - See full explanation in tears.create_full_tear_sheet
161 | denominator : str, optional
162 | Either 'AGB' or 'portfolio_value', default AGB.
163 | - AGB (Actual gross book) is the gross market
164 | value (GMV) of the specific algo being analyzed.
165 | Swapping out an entire portfolio of stocks for
166 | another will yield 200% turnover, not 100%, since
167 | transactions are being made for both sides.
168 | - We use average of the previous and the current end-of-period
169 | AGB to avoid singularities when trading only into or
170 | out of an entire book in one trading period.
171 | - portfolio_value is the total value of the algo's
172 | positions end-of-period, including cash.
173 |
174 | Returns
175 | -------
176 | turnover_rate : pd.Series
177 | timeseries of portfolio turnover rates.
178 | """
179 |
180 | txn_vol = get_txn_vol(transactions)
181 | traded_value = txn_vol.txn_volume
182 |
183 | if denominator == "AGB":
184 | # Actual gross book is the same thing as the algo's GMV
185 | # We want our denom to be avg(AGB previous, AGB current)
186 | AGB = positions.drop("cash", axis=1).abs().sum(axis=1)
187 | denom = AGB.rolling(2).mean()
188 |
189 | # Since the first value of pd.rolling returns NaN, we
190 | # set our "day 0" AGB to 0.
191 | denom.iloc[0] = AGB.iloc[0] / 2
192 | elif denominator == "portfolio_value":
193 | denom = positions.sum(axis=1)
194 | else:
195 | raise ValueError(
196 | "Unexpected value for denominator '{}'. The "
197 | "denominator parameter must be either 'AGB'"
198 | " or 'portfolio_value'.".format(denominator)
199 | )
200 |
201 | denom.index = denom.index.normalize()
202 | turnover = traded_value.div(denom, axis="index")
203 | turnover = turnover.fillna(0)
204 | return turnover
205 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stefan-jansen/pyfolio-reloaded/6b55da7fce365e325046bb080cb13ced1822de95/tests/__init__.py
--------------------------------------------------------------------------------
/tests/matplotlibrc:
--------------------------------------------------------------------------------
1 | backend : Agg
2 |
--------------------------------------------------------------------------------
/tests/test_capacity.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from parameterized import parameterized
3 | from datetime import datetime
4 | import pandas as pd
5 |
6 | from pandas.testing import assert_frame_equal, assert_series_equal
7 |
8 | from pyfolio.capacity import (
9 | days_to_liquidate_positions,
10 | get_max_days_to_liquidate_by_ticker,
11 | get_low_liquidity_transactions,
12 | daily_txns_with_bar_data,
13 | apply_slippage_penalty,
14 | )
15 |
16 |
17 | class CapacityTestCase(TestCase):
18 | dates = pd.date_range(start="2015-01-01", freq="D", periods=3)
19 |
20 | positions = pd.DataFrame(
21 | [[1.0, 3.0, 0.0], [0.0, 1.0, 1.0], [3.0, 0.0, 1.0]],
22 | columns=["A", "B", "cash"],
23 | index=dates,
24 | )
25 |
26 | columns = ["sid", "amount", "price", "symbol"]
27 | transactions = pd.DataFrame(
28 | data=[[1, 100000, 10, "A"]] * len(dates), columns=columns, index=dates
29 | )
30 |
31 | volume = pd.DataFrame(
32 | [[1.0, 3.0], [2.0, 2.0], [3.0, 1.0]], columns=["A", "B"], index=dates
33 | )
34 |
35 | volume.index.name = "dt"
36 | volume *= 1000000
37 | volume["market_data"] = "volume"
38 |
39 | price = pd.DataFrame([[1.0, 1.0]] * len(dates), columns=["A", "B"], index=dates)
40 | price.index.name = "dt"
41 | price["market_data"] = "price"
42 |
43 | market_data = (
44 | pd.concat([volume, price]).reset_index().set_index(["dt", "market_data"])
45 | )
46 |
47 | def test_days_to_liquidate_positions(self):
48 | dtlp = days_to_liquidate_positions(
49 | self.positions,
50 | self.market_data,
51 | max_bar_consumption=1,
52 | capital_base=1e6,
53 | mean_volume_window=1,
54 | )
55 |
56 | expected = pd.DataFrame(
57 | [[0.0, 0.5 / 3], [0.75 / 2, 0.0]],
58 | columns=["A", "B"],
59 | index=self.dates[1:],
60 | )
61 | assert_frame_equal(dtlp, expected)
62 |
63 | def test_get_max_days_to_liquidate_by_ticker(self):
64 | mdtl = get_max_days_to_liquidate_by_ticker(
65 | self.positions,
66 | self.market_data,
67 | max_bar_consumption=1,
68 | capital_base=1e6,
69 | mean_volume_window=1,
70 | )
71 |
72 | expected = pd.DataFrame(
73 | [
74 | [datetime(2015, 1, 3), 0.75 / 2, 75.0],
75 | [datetime(2015, 1, 2), 0.5 / 3, 50.0],
76 | ],
77 | columns=["date", "days_to_liquidate", "pos_alloc_pct"],
78 | index=["A", "B"],
79 | )
80 | expected.index.name = "symbol"
81 |
82 | assert_frame_equal(mdtl, expected)
83 |
84 | @parameterized.expand(
85 | [
86 | (
87 | pd.DataFrame(
88 | [
89 | [datetime(2015, 1, 1), 100.0],
90 | [datetime(2015, 1, 2), 100.0],
91 | ],
92 | columns=["date", "max_pct_bar_consumed"],
93 | index=["A", "B"],
94 | ),
95 | None,
96 | ),
97 | (
98 | pd.DataFrame(
99 | [[datetime(2015, 1, 3), 100 / 3]],
100 | columns=["date", "max_pct_bar_consumed"],
101 | index=["A"],
102 | ),
103 | 1,
104 | ),
105 | ]
106 | )
107 | def test_get_low_liquidity_transactions(self, expected, last_n_days):
108 | txn_daily = pd.DataFrame(
109 | data=[
110 | [1, 1000000, 1, "A"],
111 | [2, 2000000, 1, "B"],
112 | [1, 1000000, 1, "A"],
113 | ],
114 | columns=["sid", "amount", "price", "symbol"],
115 | index=self.dates,
116 | )
117 |
118 | llt = get_low_liquidity_transactions(
119 | txn_daily, self.market_data, last_n_days=last_n_days
120 | )
121 | expected.index.name = "symbol"
122 | assert_frame_equal(llt, expected)
123 |
124 | def test_daily_txns_with_bar_data(self):
125 | daily_txn = daily_txns_with_bar_data(self.transactions, self.market_data)
126 |
127 | columns = ["symbol", "amount", "price", "volume"]
128 | expected = pd.DataFrame(
129 | data=[
130 | ["A", 100000, 1.0, 1000000.0],
131 | ["A", 100000, 1.0, 2000000.0],
132 | ["A", 100000, 1.0, 3000000.0],
133 | ],
134 | columns=columns,
135 | index=self.dates,
136 | )
137 |
138 | assert_frame_equal(daily_txn, expected)
139 |
140 | @parameterized.expand(
141 | [
142 | (1000000, 1, [0.9995, 0.9999375, 0.99998611]),
143 | (10000000, 1, [0.95, 0.99375, 0.998611]),
144 | (100000, 1, [0.999995, 0.999999375, 0.9999998611]),
145 | (1000000, 0.1, [0.99995, 0.99999375, 0.999998611]),
146 | ]
147 | )
148 | def test_apply_slippage_penalty(self, starting_base, impact, expected_adj_returns):
149 | returns = pd.Series([1.0, 1.0, 1.0], index=self.dates)
150 | daily_txn = daily_txns_with_bar_data(self.transactions, self.market_data)
151 |
152 | adj_returns = apply_slippage_penalty(
153 | returns, daily_txn, starting_base, 1000000, impact=impact
154 | )
155 | expected_adj_returns = pd.Series(expected_adj_returns, index=self.dates)
156 |
157 | assert_series_equal(adj_returns, expected_adj_returns)
158 |
--------------------------------------------------------------------------------
/tests/test_data/intercepts.csv:
--------------------------------------------------------------------------------
1 | 19001,0.0
2 | 19002,0.0
3 |
--------------------------------------------------------------------------------
/tests/test_data/positions.csv:
--------------------------------------------------------------------------------
1 | ,19001,19002,cash
2 | 2016-01-04,1.0,1.0,0.0
3 | 2016-01-05,1.0,1.0,0.0
4 | 2016-01-06,1.0,1.0,0.0
5 | 2016-01-07,1.0,1.0,0.0
6 | 2016-01-08,1.0,1.0,0.0
7 | 2016-01-11,1.0,1.0,0.0
8 | 2016-01-12,1.0,1.0,0.0
9 | 2016-01-13,1.0,1.0,0.0
10 | 2016-01-14,1.0,1.0,0.0
11 | 2016-01-15,1.0,1.0,0.0
12 | 2016-01-18,1.0,1.0,0.0
13 | 2016-01-19,1.0,1.0,0.0
14 | 2016-01-20,1.0,1.0,0.0
15 | 2016-01-21,1.0,1.0,0.0
16 | 2016-01-22,1.0,1.0,0.0
17 | 2016-01-25,1.0,1.0,0.0
18 | 2016-01-26,1.0,1.0,0.0
19 | 2016-01-27,1.0,1.0,0.0
20 | 2016-01-28,1.0,1.0,0.0
21 | 2016-01-29,1.0,1.0,0.0
22 | 2016-02-01,1.0,1.0,0.0
23 | 2016-02-02,1.0,1.0,0.0
24 | 2016-02-03,1.0,1.0,0.0
25 | 2016-02-04,1.0,1.0,0.0
26 | 2016-02-05,1.0,1.0,0.0
27 | 2016-02-08,1.0,1.0,0.0
28 | 2016-02-09,1.0,1.0,0.0
29 | 2016-02-10,1.0,1.0,0.0
30 | 2016-02-11,1.0,1.0,0.0
31 | 2016-02-12,1.0,1.0,0.0
32 | 2016-02-15,1.0,1.0,0.0
33 | 2016-02-16,1.0,1.0,0.0
34 | 2016-02-17,1.0,1.0,0.0
35 | 2016-02-18,1.0,1.0,0.0
36 | 2016-02-19,1.0,1.0,0.0
37 | 2016-02-22,1.0,1.0,0.0
38 | 2016-02-23,1.0,1.0,0.0
39 | 2016-02-24,1.0,1.0,0.0
40 | 2016-02-25,1.0,1.0,0.0
41 | 2016-02-26,1.0,1.0,0.0
42 | 2016-02-29,1.0,1.0,0.0
43 | 2016-03-01,1.0,1.0,0.0
44 | 2016-03-02,1.0,1.0,0.0
45 | 2016-03-03,1.0,1.0,0.0
46 | 2016-03-04,1.0,1.0,0.0
47 | 2016-03-07,1.0,1.0,0.0
48 | 2016-03-08,1.0,1.0,0.0
49 | 2016-03-09,1.0,1.0,0.0
50 | 2016-03-10,1.0,1.0,0.0
51 | 2016-03-11,1.0,1.0,0.0
52 | 2016-03-14,1.0,1.0,0.0
53 | 2016-03-15,1.0,1.0,0.0
54 | 2016-03-16,1.0,1.0,0.0
55 | 2016-03-17,1.0,1.0,0.0
56 | 2016-03-18,1.0,1.0,0.0
57 | 2016-03-21,1.0,1.0,0.0
58 | 2016-03-22,1.0,1.0,0.0
59 | 2016-03-23,1.0,1.0,0.0
60 | 2016-03-24,1.0,1.0,0.0
61 | 2016-03-25,1.0,1.0,0.0
62 | 2016-03-28,1.0,1.0,0.0
63 | 2016-03-29,1.0,1.0,0.0
64 | 2016-03-30,1.0,1.0,0.0
65 | 2016-03-31,1.0,1.0,0.0
66 | 2016-04-01,1.0,1.0,0.0
67 | 2016-04-04,1.0,1.0,0.0
68 | 2016-04-05,1.0,1.0,0.0
69 | 2016-04-06,1.0,1.0,0.0
70 | 2016-04-07,1.0,1.0,0.0
71 | 2016-04-08,1.0,1.0,0.0
72 | 2016-04-11,1.0,1.0,0.0
73 | 2016-04-12,1.0,1.0,0.0
74 | 2016-04-13,1.0,1.0,0.0
75 | 2016-04-14,1.0,1.0,0.0
76 | 2016-04-15,1.0,1.0,0.0
77 | 2016-04-18,1.0,1.0,0.0
78 | 2016-04-19,1.0,1.0,0.0
79 | 2016-04-20,1.0,1.0,0.0
80 | 2016-04-21,1.0,1.0,0.0
81 | 2016-04-22,1.0,1.0,0.0
82 | 2016-04-25,1.0,1.0,0.0
83 | 2016-04-26,1.0,1.0,0.0
84 | 2016-04-27,1.0,1.0,0.0
85 | 2016-04-28,1.0,1.0,0.0
86 | 2016-04-29,1.0,1.0,0.0
87 | 2016-05-02,1.0,1.0,0.0
88 | 2016-05-03,1.0,1.0,0.0
89 | 2016-05-04,1.0,1.0,0.0
90 | 2016-05-05,1.0,1.0,0.0
91 | 2016-05-06,1.0,1.0,0.0
92 | 2016-05-09,1.0,1.0,0.0
93 | 2016-05-10,1.0,1.0,0.0
94 | 2016-05-11,1.0,1.0,0.0
95 | 2016-05-12,1.0,1.0,0.0
96 | 2016-05-13,1.0,1.0,0.0
97 | 2016-05-16,1.0,1.0,0.0
98 | 2016-05-17,1.0,1.0,0.0
99 | 2016-05-18,1.0,1.0,0.0
100 | 2016-05-19,1.0,1.0,0.0
101 | 2016-05-20,1.0,1.0,0.0
102 | 2016-05-23,1.0,1.0,0.0
103 | 2016-05-24,1.0,1.0,0.0
104 | 2016-05-25,1.0,1.0,0.0
105 | 2016-05-26,1.0,1.0,0.0
106 | 2016-05-27,1.0,1.0,0.0
107 | 2016-05-30,1.0,1.0,0.0
108 | 2016-05-31,1.0,1.0,0.0
109 | 2016-06-01,1.0,1.0,0.0
110 | 2016-06-02,1.0,1.0,0.0
111 | 2016-06-03,1.0,1.0,0.0
112 | 2016-06-06,1.0,1.0,0.0
113 | 2016-06-07,1.0,1.0,0.0
114 | 2016-06-08,1.0,1.0,0.0
115 | 2016-06-09,1.0,1.0,0.0
116 | 2016-06-10,1.0,1.0,0.0
117 | 2016-06-13,1.0,1.0,0.0
118 | 2016-06-14,1.0,1.0,0.0
119 | 2016-06-15,1.0,1.0,0.0
120 | 2016-06-16,1.0,1.0,0.0
121 | 2016-06-17,1.0,1.0,0.0
122 | 2016-06-20,1.0,1.0,0.0
123 | 2016-06-21,1.0,1.0,0.0
124 | 2016-06-22,1.0,1.0,0.0
125 | 2016-06-23,1.0,1.0,0.0
126 | 2016-06-24,1.0,1.0,0.0
127 | 2016-06-27,1.0,1.0,0.0
128 | 2016-06-28,1.0,1.0,0.0
129 | 2016-06-29,1.0,1.0,0.0
130 | 2016-06-30,1.0,1.0,0.0
131 | 2016-07-01,1.0,1.0,0.0
132 | 2016-07-04,1.0,1.0,0.0
133 | 2016-07-05,1.0,1.0,0.0
134 | 2016-07-06,1.0,1.0,0.0
135 | 2016-07-07,1.0,1.0,0.0
136 | 2016-07-08,1.0,1.0,0.0
137 | 2016-07-11,1.0,1.0,0.0
138 | 2016-07-12,1.0,1.0,0.0
139 | 2016-07-13,1.0,1.0,0.0
140 | 2016-07-14,1.0,1.0,0.0
141 | 2016-07-15,1.0,1.0,0.0
142 | 2016-07-18,1.0,1.0,0.0
143 | 2016-07-19,1.0,1.0,0.0
144 | 2016-07-20,1.0,1.0,0.0
145 | 2016-07-21,1.0,1.0,0.0
146 | 2016-07-22,1.0,1.0,0.0
147 | 2016-07-25,1.0,1.0,0.0
148 | 2016-07-26,1.0,1.0,0.0
149 | 2016-07-27,1.0,1.0,0.0
150 | 2016-07-28,1.0,1.0,0.0
151 | 2016-07-29,1.0,1.0,0.0
152 | 2016-08-01,1.0,1.0,0.0
153 | 2016-08-02,1.0,1.0,0.0
154 | 2016-08-03,1.0,1.0,0.0
155 | 2016-08-04,1.0,1.0,0.0
156 | 2016-08-05,1.0,1.0,0.0
157 | 2016-08-08,1.0,1.0,0.0
158 | 2016-08-09,1.0,1.0,0.0
159 | 2016-08-10,1.0,1.0,0.0
160 | 2016-08-11,1.0,1.0,0.0
161 | 2016-08-12,1.0,1.0,0.0
162 | 2016-08-15,1.0,1.0,0.0
163 | 2016-08-16,1.0,1.0,0.0
164 | 2016-08-17,1.0,1.0,0.0
165 | 2016-08-18,1.0,1.0,0.0
166 | 2016-08-19,1.0,1.0,0.0
167 | 2016-08-22,1.0,1.0,0.0
168 | 2016-08-23,1.0,1.0,0.0
169 | 2016-08-24,1.0,1.0,0.0
170 | 2016-08-25,1.0,1.0,0.0
171 | 2016-08-26,1.0,1.0,0.0
172 | 2016-08-29,1.0,1.0,0.0
173 | 2016-08-30,1.0,1.0,0.0
174 | 2016-08-31,1.0,1.0,0.0
175 | 2016-09-01,1.0,1.0,0.0
176 | 2016-09-02,1.0,1.0,0.0
177 | 2016-09-05,1.0,1.0,0.0
178 | 2016-09-06,1.0,1.0,0.0
179 | 2016-09-07,1.0,1.0,0.0
180 | 2016-09-08,1.0,1.0,0.0
181 | 2016-09-09,1.0,1.0,0.0
182 | 2016-09-12,1.0,1.0,0.0
183 | 2016-09-13,1.0,1.0,0.0
184 | 2016-09-14,1.0,1.0,0.0
185 | 2016-09-15,1.0,1.0,0.0
186 | 2016-09-16,1.0,1.0,0.0
187 | 2016-09-19,1.0,1.0,0.0
188 | 2016-09-20,1.0,1.0,0.0
189 | 2016-09-21,1.0,1.0,0.0
190 | 2016-09-22,1.0,1.0,0.0
191 | 2016-09-23,1.0,1.0,0.0
192 | 2016-09-26,1.0,1.0,0.0
193 | 2016-09-27,1.0,1.0,0.0
194 | 2016-09-28,1.0,1.0,0.0
195 | 2016-09-29,1.0,1.0,0.0
196 | 2016-09-30,1.0,1.0,0.0
197 | 2016-10-03,1.0,1.0,0.0
198 | 2016-10-04,1.0,1.0,0.0
199 | 2016-10-05,1.0,1.0,0.0
200 | 2016-10-06,1.0,1.0,0.0
201 | 2016-10-07,1.0,1.0,0.0
202 | 2016-10-10,1.0,1.0,0.0
203 | 2016-10-11,1.0,1.0,0.0
204 | 2016-10-12,1.0,1.0,0.0
205 | 2016-10-13,1.0,1.0,0.0
206 | 2016-10-14,1.0,1.0,0.0
207 | 2016-10-17,1.0,1.0,0.0
208 | 2016-10-18,1.0,1.0,0.0
209 | 2016-10-19,1.0,1.0,0.0
210 | 2016-10-20,1.0,1.0,0.0
211 | 2016-10-21,1.0,1.0,0.0
212 | 2016-10-24,1.0,1.0,0.0
213 | 2016-10-25,1.0,1.0,0.0
214 | 2016-10-26,1.0,1.0,0.0
215 | 2016-10-27,1.0,1.0,0.0
216 | 2016-10-28,1.0,1.0,0.0
217 | 2016-10-31,1.0,1.0,0.0
218 | 2016-11-01,1.0,1.0,0.0
219 | 2016-11-02,1.0,1.0,0.0
220 | 2016-11-03,1.0,1.0,0.0
221 | 2016-11-04,1.0,1.0,0.0
222 | 2016-11-07,1.0,1.0,0.0
223 | 2016-11-08,1.0,1.0,0.0
224 | 2016-11-09,1.0,1.0,0.0
225 | 2016-11-10,1.0,1.0,0.0
226 | 2016-11-11,1.0,1.0,0.0
227 | 2016-11-14,1.0,1.0,0.0
228 | 2016-11-15,1.0,1.0,0.0
229 | 2016-11-16,1.0,1.0,0.0
230 | 2016-11-17,1.0,1.0,0.0
231 | 2016-11-18,1.0,1.0,0.0
232 | 2016-11-21,1.0,1.0,0.0
233 | 2016-11-22,1.0,1.0,0.0
234 | 2016-11-23,1.0,1.0,0.0
235 | 2016-11-24,1.0,1.0,0.0
236 | 2016-11-25,1.0,1.0,0.0
237 | 2016-11-28,1.0,1.0,0.0
238 | 2016-11-29,1.0,1.0,0.0
239 | 2016-11-30,1.0,1.0,0.0
240 | 2016-12-01,1.0,1.0,0.0
241 | 2016-12-02,1.0,1.0,0.0
242 | 2016-12-05,1.0,1.0,0.0
243 | 2016-12-06,1.0,1.0,0.0
244 | 2016-12-07,1.0,1.0,0.0
245 | 2016-12-08,1.0,1.0,0.0
246 | 2016-12-09,1.0,1.0,0.0
247 | 2016-12-12,1.0,1.0,0.0
248 | 2016-12-13,1.0,1.0,0.0
249 | 2016-12-14,1.0,1.0,0.0
250 | 2016-12-15,1.0,1.0,0.0
251 | 2016-12-16,1.0,1.0,0.0
252 | 2016-12-19,1.0,1.0,0.0
253 | 2016-12-20,1.0,1.0,0.0
254 | 2016-12-21,1.0,1.0,0.0
255 | 2016-12-22,1.0,1.0,0.0
256 | 2016-12-23,1.0,1.0,0.0
257 | 2016-12-26,1.0,1.0,0.0
258 | 2016-12-27,1.0,1.0,0.0
259 | 2016-12-28,1.0,1.0,0.0
260 | 2016-12-29,1.0,1.0,0.0
261 | 2016-12-30,1.0,1.0,0.0
262 | 2017-01-02,1.0,1.0,0.0
263 | 2017-01-03,1.0,1.0,0.0
264 | 2017-01-04,1.0,1.0,0.0
265 | 2017-01-05,1.0,1.0,0.0
266 | 2017-01-06,1.0,1.0,0.0
267 | 2017-01-09,1.0,1.0,0.0
268 | 2017-01-10,1.0,1.0,0.0
269 | 2017-01-11,1.0,1.0,0.0
270 | 2017-01-12,1.0,1.0,0.0
271 | 2017-01-13,1.0,1.0,0.0
272 | 2017-01-16,1.0,1.0,0.0
273 | 2017-01-17,1.0,1.0,0.0
274 | 2017-01-18,1.0,1.0,0.0
275 | 2017-01-19,1.0,1.0,0.0
276 | 2017-01-20,1.0,1.0,0.0
277 | 2017-01-23,1.0,1.0,0.0
278 | 2017-01-24,1.0,1.0,0.0
279 | 2017-01-25,1.0,1.0,0.0
280 | 2017-01-26,1.0,1.0,0.0
281 | 2017-01-27,1.0,1.0,0.0
282 | 2017-01-30,1.0,1.0,0.0
283 | 2017-01-31,1.0,1.0,0.0
284 | 2017-02-01,1.0,1.0,0.0
285 | 2017-02-02,1.0,1.0,0.0
286 | 2017-02-03,1.0,1.0,0.0
287 | 2017-02-06,1.0,1.0,0.0
288 | 2017-02-07,1.0,1.0,0.0
289 | 2017-02-08,1.0,1.0,0.0
290 | 2017-02-09,1.0,1.0,0.0
291 | 2017-02-10,1.0,1.0,0.0
292 | 2017-02-13,1.0,1.0,0.0
293 | 2017-02-14,1.0,1.0,0.0
294 | 2017-02-15,1.0,1.0,0.0
295 | 2017-02-16,1.0,1.0,0.0
296 | 2017-02-17,1.0,1.0,0.0
297 | 2017-02-20,1.0,1.0,0.0
298 | 2017-02-21,1.0,1.0,0.0
299 | 2017-02-22,1.0,1.0,0.0
300 | 2017-02-23,1.0,1.0,0.0
301 | 2017-02-24,1.0,1.0,0.0
302 | 2017-02-27,1.0,1.0,0.0
303 | 2017-02-28,1.0,1.0,0.0
304 | 2017-03-01,1.0,1.0,0.0
305 | 2017-03-02,1.0,1.0,0.0
306 | 2017-03-03,1.0,1.0,0.0
307 | 2017-03-06,1.0,1.0,0.0
308 | 2017-03-07,1.0,1.0,0.0
309 | 2017-03-08,1.0,1.0,0.0
310 | 2017-03-09,1.0,1.0,0.0
311 | 2017-03-10,1.0,1.0,0.0
312 | 2017-03-13,1.0,1.0,0.0
313 | 2017-03-14,1.0,1.0,0.0
314 | 2017-03-15,1.0,1.0,0.0
315 | 2017-03-16,1.0,1.0,0.0
316 | 2017-03-17,1.0,1.0,0.0
317 | 2017-03-20,1.0,1.0,0.0
318 | 2017-03-21,1.0,1.0,0.0
319 | 2017-03-22,1.0,1.0,0.0
320 | 2017-03-23,1.0,1.0,0.0
321 | 2017-03-24,1.0,1.0,0.0
322 | 2017-03-27,1.0,1.0,0.0
323 | 2017-03-28,1.0,1.0,0.0
324 | 2017-03-29,1.0,1.0,0.0
325 | 2017-03-30,1.0,1.0,0.0
326 | 2017-03-31,1.0,1.0,0.0
327 | 2017-04-03,1.0,1.0,0.0
328 | 2017-04-04,1.0,1.0,0.0
329 | 2017-04-05,1.0,1.0,0.0
330 | 2017-04-06,1.0,1.0,0.0
331 | 2017-04-07,1.0,1.0,0.0
332 | 2017-04-10,1.0,1.0,0.0
333 | 2017-04-11,1.0,1.0,0.0
334 | 2017-04-12,1.0,1.0,0.0
335 | 2017-04-13,1.0,1.0,0.0
336 | 2017-04-14,1.0,1.0,0.0
337 | 2017-04-17,1.0,1.0,0.0
338 | 2017-04-18,1.0,1.0,0.0
339 | 2017-04-19,1.0,1.0,0.0
340 | 2017-04-20,1.0,1.0,0.0
341 | 2017-04-21,1.0,1.0,0.0
342 | 2017-04-24,1.0,1.0,0.0
343 | 2017-04-25,1.0,1.0,0.0
344 | 2017-04-26,1.0,1.0,0.0
345 | 2017-04-27,1.0,1.0,0.0
346 | 2017-04-28,1.0,1.0,0.0
347 | 2017-05-01,1.0,1.0,0.0
348 | 2017-05-02,1.0,1.0,0.0
349 | 2017-05-03,1.0,1.0,0.0
350 | 2017-05-04,1.0,1.0,0.0
351 | 2017-05-05,1.0,1.0,0.0
352 | 2017-05-08,1.0,1.0,0.0
353 | 2017-05-09,1.0,1.0,0.0
354 | 2017-05-10,1.0,1.0,0.0
355 | 2017-05-11,1.0,1.0,0.0
356 | 2017-05-12,1.0,1.0,0.0
357 | 2017-05-15,1.0,1.0,0.0
358 | 2017-05-16,1.0,1.0,0.0
359 | 2017-05-17,1.0,1.0,0.0
360 | 2017-05-18,1.0,1.0,0.0
361 | 2017-05-19,1.0,1.0,0.0
362 | 2017-05-22,1.0,1.0,0.0
363 | 2017-05-23,1.0,1.0,0.0
364 | 2017-05-24,1.0,1.0,0.0
365 | 2017-05-25,1.0,1.0,0.0
366 | 2017-05-26,1.0,1.0,0.0
367 | 2017-05-29,1.0,1.0,0.0
368 | 2017-05-30,1.0,1.0,0.0
369 | 2017-05-31,1.0,1.0,0.0
370 | 2017-06-01,1.0,1.0,0.0
371 | 2017-06-02,1.0,1.0,0.0
372 | 2017-06-05,1.0,1.0,0.0
373 | 2017-06-06,1.0,1.0,0.0
374 | 2017-06-07,1.0,1.0,0.0
375 | 2017-06-08,1.0,1.0,0.0
376 | 2017-06-09,1.0,1.0,0.0
377 | 2017-06-12,1.0,1.0,0.0
378 | 2017-06-13,1.0,1.0,0.0
379 | 2017-06-14,1.0,1.0,0.0
380 | 2017-06-15,1.0,1.0,0.0
381 | 2017-06-16,1.0,1.0,0.0
382 | 2017-06-19,1.0,1.0,0.0
383 | 2017-06-20,1.0,1.0,0.0
384 | 2017-06-21,1.0,1.0,0.0
385 | 2017-06-22,1.0,1.0,0.0
386 | 2017-06-23,1.0,1.0,0.0
387 | 2017-06-26,1.0,1.0,0.0
388 | 2017-06-27,1.0,1.0,0.0
389 | 2017-06-28,1.0,1.0,0.0
390 | 2017-06-29,1.0,1.0,0.0
391 | 2017-06-30,1.0,1.0,0.0
392 | 2017-07-03,1.0,1.0,0.0
393 | 2017-07-04,1.0,1.0,0.0
394 | 2017-07-05,1.0,1.0,0.0
395 | 2017-07-06,1.0,1.0,0.0
396 | 2017-07-07,1.0,1.0,0.0
397 | 2017-07-10,1.0,1.0,0.0
398 | 2017-07-11,1.0,1.0,0.0
399 | 2017-07-12,1.0,1.0,0.0
400 | 2017-07-13,1.0,1.0,0.0
401 | 2017-07-14,1.0,1.0,0.0
402 | 2017-07-17,1.0,1.0,0.0
403 | 2017-07-18,1.0,1.0,0.0
404 | 2017-07-19,1.0,1.0,0.0
405 | 2017-07-20,1.0,1.0,0.0
406 | 2017-07-21,1.0,1.0,0.0
407 | 2017-07-24,1.0,1.0,0.0
408 | 2017-07-25,1.0,1.0,0.0
409 | 2017-07-26,1.0,1.0,0.0
410 | 2017-07-27,1.0,1.0,0.0
411 | 2017-07-28,1.0,1.0,0.0
412 | 2017-07-31,1.0,1.0,0.0
413 | 2017-08-01,1.0,1.0,0.0
414 | 2017-08-02,1.0,1.0,0.0
415 | 2017-08-03,1.0,1.0,0.0
416 | 2017-08-04,1.0,1.0,0.0
417 | 2017-08-07,1.0,1.0,0.0
418 | 2017-08-08,1.0,1.0,0.0
419 | 2017-08-09,1.0,1.0,0.0
420 | 2017-08-10,1.0,1.0,0.0
421 | 2017-08-11,1.0,1.0,0.0
422 | 2017-08-14,1.0,1.0,0.0
423 | 2017-08-15,1.0,1.0,0.0
424 | 2017-08-16,1.0,1.0,0.0
425 | 2017-08-17,1.0,1.0,0.0
426 | 2017-08-18,1.0,1.0,0.0
427 | 2017-08-21,1.0,1.0,0.0
428 | 2017-08-22,1.0,1.0,0.0
429 | 2017-08-23,1.0,1.0,0.0
430 | 2017-08-24,1.0,1.0,0.0
431 | 2017-08-25,1.0,1.0,0.0
432 | 2017-08-28,1.0,1.0,0.0
433 | 2017-08-29,1.0,1.0,0.0
434 | 2017-08-30,1.0,1.0,0.0
435 | 2017-08-31,1.0,1.0,0.0
436 | 2017-09-01,1.0,1.0,0.0
437 | 2017-09-04,1.0,1.0,0.0
438 | 2017-09-05,1.0,1.0,0.0
439 | 2017-09-06,1.0,1.0,0.0
440 | 2017-09-07,1.0,1.0,0.0
441 | 2017-09-08,1.0,1.0,0.0
442 | 2017-09-11,1.0,1.0,0.0
443 | 2017-09-12,1.0,1.0,0.0
444 | 2017-09-13,1.0,1.0,0.0
445 | 2017-09-14,1.0,1.0,0.0
446 | 2017-09-15,1.0,1.0,0.0
447 | 2017-09-18,1.0,1.0,0.0
448 | 2017-09-19,1.0,1.0,0.0
449 | 2017-09-20,1.0,1.0,0.0
450 | 2017-09-21,1.0,1.0,0.0
451 | 2017-09-22,1.0,1.0,0.0
452 | 2017-09-25,1.0,1.0,0.0
453 | 2017-09-26,1.0,1.0,0.0
454 | 2017-09-27,1.0,1.0,0.0
455 | 2017-09-28,1.0,1.0,0.0
456 | 2017-09-29,1.0,1.0,0.0
457 | 2017-10-02,1.0,1.0,0.0
458 | 2017-10-03,1.0,1.0,0.0
459 | 2017-10-04,1.0,1.0,0.0
460 | 2017-10-05,1.0,1.0,0.0
461 | 2017-10-06,1.0,1.0,0.0
462 | 2017-10-09,1.0,1.0,0.0
463 | 2017-10-10,1.0,1.0,0.0
464 | 2017-10-11,1.0,1.0,0.0
465 | 2017-10-12,1.0,1.0,0.0
466 | 2017-10-13,1.0,1.0,0.0
467 | 2017-10-16,1.0,1.0,0.0
468 | 2017-10-17,1.0,1.0,0.0
469 | 2017-10-18,1.0,1.0,0.0
470 | 2017-10-19,1.0,1.0,0.0
471 | 2017-10-20,1.0,1.0,0.0
472 | 2017-10-23,1.0,1.0,0.0
473 | 2017-10-24,1.0,1.0,0.0
474 | 2017-10-25,1.0,1.0,0.0
475 | 2017-10-26,1.0,1.0,0.0
476 | 2017-10-27,1.0,1.0,0.0
477 | 2017-10-30,1.0,1.0,0.0
478 | 2017-10-31,1.0,1.0,0.0
479 | 2017-11-01,1.0,1.0,0.0
480 | 2017-11-02,1.0,1.0,0.0
481 | 2017-11-03,1.0,1.0,0.0
482 | 2017-11-06,1.0,1.0,0.0
483 | 2017-11-07,1.0,1.0,0.0
484 | 2017-11-08,1.0,1.0,0.0
485 | 2017-11-09,1.0,1.0,0.0
486 | 2017-11-10,1.0,1.0,0.0
487 | 2017-11-13,1.0,1.0,0.0
488 | 2017-11-14,1.0,1.0,0.0
489 | 2017-11-15,1.0,1.0,0.0
490 | 2017-11-16,1.0,1.0,0.0
491 | 2017-11-17,1.0,1.0,0.0
492 | 2017-11-20,1.0,1.0,0.0
493 | 2017-11-21,1.0,1.0,0.0
494 | 2017-11-22,1.0,1.0,0.0
495 | 2017-11-23,1.0,1.0,0.0
496 | 2017-11-24,1.0,1.0,0.0
497 | 2017-11-27,1.0,1.0,0.0
498 | 2017-11-28,1.0,1.0,0.0
499 | 2017-11-29,1.0,1.0,0.0
500 | 2017-11-30,1.0,1.0,0.0
501 | 2017-12-01,1.0,1.0,0.0
502 | 2017-12-04,1.0,1.0,0.0
503 | 2017-12-05,1.0,1.0,0.0
504 | 2017-12-06,1.0,1.0,0.0
505 | 2017-12-07,1.0,1.0,0.0
506 |
--------------------------------------------------------------------------------
/tests/test_data/residuals.csv:
--------------------------------------------------------------------------------
1 | ,19001,19002
2 | 2016-01-04,0.0,0.0
3 | 2016-01-05,0.0,0.0
4 | 2016-01-06,0.0,0.0
5 | 2016-01-07,0.0,0.0
6 | 2016-01-08,0.0,0.0
7 | 2016-01-11,0.0,0.0
8 | 2016-01-12,0.0,0.0
9 | 2016-01-13,0.0,0.0
10 | 2016-01-14,0.0,0.0
11 | 2016-01-15,0.0,0.0
12 | 2016-01-18,0.0,0.0
13 | 2016-01-19,0.0,0.0
14 | 2016-01-20,0.0,0.0
15 | 2016-01-21,0.0,0.0
16 | 2016-01-22,0.0,0.0
17 | 2016-01-25,0.0,0.0
18 | 2016-01-26,0.0,0.0
19 | 2016-01-27,0.0,0.0
20 | 2016-01-28,0.0,0.0
21 | 2016-01-29,0.0,0.0
22 | 2016-02-01,0.0,0.0
23 | 2016-02-02,0.0,0.0
24 | 2016-02-03,0.0,0.0
25 | 2016-02-04,0.0,0.0
26 | 2016-02-05,0.0,0.0
27 | 2016-02-08,0.0,0.0
28 | 2016-02-09,0.0,0.0
29 | 2016-02-10,0.0,0.0
30 | 2016-02-11,0.0,0.0
31 | 2016-02-12,0.0,0.0
32 | 2016-02-15,0.0,0.0
33 | 2016-02-16,0.0,0.0
34 | 2016-02-17,0.0,0.0
35 | 2016-02-18,0.0,0.0
36 | 2016-02-19,0.0,0.0
37 | 2016-02-22,0.0,0.0
38 | 2016-02-23,0.0,0.0
39 | 2016-02-24,0.0,0.0
40 | 2016-02-25,0.0,0.0
41 | 2016-02-26,0.0,0.0
42 | 2016-02-29,0.0,0.0
43 | 2016-03-01,0.0,0.0
44 | 2016-03-02,0.0,0.0
45 | 2016-03-03,0.0,0.0
46 | 2016-03-04,0.0,0.0
47 | 2016-03-07,0.0,0.0
48 | 2016-03-08,0.0,0.0
49 | 2016-03-09,0.0,0.0
50 | 2016-03-10,0.0,0.0
51 | 2016-03-11,0.0,0.0
52 | 2016-03-14,0.0,0.0
53 | 2016-03-15,0.0,0.0
54 | 2016-03-16,0.0,0.0
55 | 2016-03-17,0.0,0.0
56 | 2016-03-18,0.0,0.0
57 | 2016-03-21,0.0,0.0
58 | 2016-03-22,0.0,0.0
59 | 2016-03-23,0.0,0.0
60 | 2016-03-24,0.0,0.0
61 | 2016-03-25,0.0,0.0
62 | 2016-03-28,0.0,0.0
63 | 2016-03-29,0.0,0.0
64 | 2016-03-30,0.0,0.0
65 | 2016-03-31,0.0,0.0
66 | 2016-04-01,0.0,0.0
67 | 2016-04-04,0.0,0.0
68 | 2016-04-05,0.0,0.0
69 | 2016-04-06,0.0,0.0
70 | 2016-04-07,0.0,0.0
71 | 2016-04-08,0.0,0.0
72 | 2016-04-11,0.0,0.0
73 | 2016-04-12,0.0,0.0
74 | 2016-04-13,0.0,0.0
75 | 2016-04-14,0.0,0.0
76 | 2016-04-15,0.0,0.0
77 | 2016-04-18,0.0,0.0
78 | 2016-04-19,0.0,0.0
79 | 2016-04-20,0.0,0.0
80 | 2016-04-21,0.0,0.0
81 | 2016-04-22,0.0,0.0
82 | 2016-04-25,0.0,0.0
83 | 2016-04-26,0.0,0.0
84 | 2016-04-27,0.0,0.0
85 | 2016-04-28,0.0,0.0
86 | 2016-04-29,0.0,0.0
87 | 2016-05-02,0.0,0.0
88 | 2016-05-03,0.0,0.0
89 | 2016-05-04,0.0,0.0
90 | 2016-05-05,0.0,0.0
91 | 2016-05-06,0.0,0.0
92 | 2016-05-09,0.0,0.0
93 | 2016-05-10,0.0,0.0
94 | 2016-05-11,0.0,0.0
95 | 2016-05-12,0.0,0.0
96 | 2016-05-13,0.0,0.0
97 | 2016-05-16,0.0,0.0
98 | 2016-05-17,0.0,0.0
99 | 2016-05-18,0.0,0.0
100 | 2016-05-19,0.0,0.0
101 | 2016-05-20,0.0,0.0
102 | 2016-05-23,0.0,0.0
103 | 2016-05-24,0.0,0.0
104 | 2016-05-25,0.0,0.0
105 | 2016-05-26,0.0,0.0
106 | 2016-05-27,0.0,0.0
107 | 2016-05-30,0.0,0.0
108 | 2016-05-31,0.0,0.0
109 | 2016-06-01,0.0,0.0
110 | 2016-06-02,0.0,0.0
111 | 2016-06-03,0.0,0.0
112 | 2016-06-06,0.0,0.0
113 | 2016-06-07,0.0,0.0
114 | 2016-06-08,0.0,0.0
115 | 2016-06-09,0.0,0.0
116 | 2016-06-10,0.0,0.0
117 | 2016-06-13,0.0,0.0
118 | 2016-06-14,0.0,0.0
119 | 2016-06-15,0.0,0.0
120 | 2016-06-16,0.0,0.0
121 | 2016-06-17,0.0,0.0
122 | 2016-06-20,0.0,0.0
123 | 2016-06-21,0.0,0.0
124 | 2016-06-22,0.0,0.0
125 | 2016-06-23,0.0,0.0
126 | 2016-06-24,0.0,0.0
127 | 2016-06-27,0.0,0.0
128 | 2016-06-28,0.0,0.0
129 | 2016-06-29,0.0,0.0
130 | 2016-06-30,0.0,0.0
131 | 2016-07-01,0.0,0.0
132 | 2016-07-04,0.0,0.0
133 | 2016-07-05,0.0,0.0
134 | 2016-07-06,0.0,0.0
135 | 2016-07-07,0.0,0.0
136 | 2016-07-08,0.0,0.0
137 | 2016-07-11,0.0,0.0
138 | 2016-07-12,0.0,0.0
139 | 2016-07-13,0.0,0.0
140 | 2016-07-14,0.0,0.0
141 | 2016-07-15,0.0,0.0
142 | 2016-07-18,0.0,0.0
143 | 2016-07-19,0.0,0.0
144 | 2016-07-20,0.0,0.0
145 | 2016-07-21,0.0,0.0
146 | 2016-07-22,0.0,0.0
147 | 2016-07-25,0.0,0.0
148 | 2016-07-26,0.0,0.0
149 | 2016-07-27,0.0,0.0
150 | 2016-07-28,0.0,0.0
151 | 2016-07-29,0.0,0.0
152 | 2016-08-01,0.0,0.0
153 | 2016-08-02,0.0,0.0
154 | 2016-08-03,0.0,0.0
155 | 2016-08-04,0.0,0.0
156 | 2016-08-05,0.0,0.0
157 | 2016-08-08,0.0,0.0
158 | 2016-08-09,0.0,0.0
159 | 2016-08-10,0.0,0.0
160 | 2016-08-11,0.0,0.0
161 | 2016-08-12,0.0,0.0
162 | 2016-08-15,0.0,0.0
163 | 2016-08-16,0.0,0.0
164 | 2016-08-17,0.0,0.0
165 | 2016-08-18,0.0,0.0
166 | 2016-08-19,0.0,0.0
167 | 2016-08-22,0.0,0.0
168 | 2016-08-23,0.0,0.0
169 | 2016-08-24,0.0,0.0
170 | 2016-08-25,0.0,0.0
171 | 2016-08-26,0.0,0.0
172 | 2016-08-29,0.0,0.0
173 | 2016-08-30,0.0,0.0
174 | 2016-08-31,0.0,0.0
175 | 2016-09-01,0.0,0.0
176 | 2016-09-02,0.0,0.0
177 | 2016-09-05,0.0,0.0
178 | 2016-09-06,0.0,0.0
179 | 2016-09-07,0.0,0.0
180 | 2016-09-08,0.0,0.0
181 | 2016-09-09,0.0,0.0
182 | 2016-09-12,0.0,0.0
183 | 2016-09-13,0.0,0.0
184 | 2016-09-14,0.0,0.0
185 | 2016-09-15,0.0,0.0
186 | 2016-09-16,0.0,0.0
187 | 2016-09-19,0.0,0.0
188 | 2016-09-20,0.0,0.0
189 | 2016-09-21,0.0,0.0
190 | 2016-09-22,0.0,0.0
191 | 2016-09-23,0.0,0.0
192 | 2016-09-26,0.0,0.0
193 | 2016-09-27,0.0,0.0
194 | 2016-09-28,0.0,0.0
195 | 2016-09-29,0.0,0.0
196 | 2016-09-30,0.0,0.0
197 | 2016-10-03,0.0,0.0
198 | 2016-10-04,0.0,0.0
199 | 2016-10-05,0.0,0.0
200 | 2016-10-06,0.0,0.0
201 | 2016-10-07,0.0,0.0
202 | 2016-10-10,0.0,0.0
203 | 2016-10-11,0.0,0.0
204 | 2016-10-12,0.0,0.0
205 | 2016-10-13,0.0,0.0
206 | 2016-10-14,0.0,0.0
207 | 2016-10-17,0.0,0.0
208 | 2016-10-18,0.0,0.0
209 | 2016-10-19,0.0,0.0
210 | 2016-10-20,0.0,0.0
211 | 2016-10-21,0.0,0.0
212 | 2016-10-24,0.0,0.0
213 | 2016-10-25,0.0,0.0
214 | 2016-10-26,0.0,0.0
215 | 2016-10-27,0.0,0.0
216 | 2016-10-28,0.0,0.0
217 | 2016-10-31,0.0,0.0
218 | 2016-11-01,0.0,0.0
219 | 2016-11-02,0.0,0.0
220 | 2016-11-03,0.0,0.0
221 | 2016-11-04,0.0,0.0
222 | 2016-11-07,0.0,0.0
223 | 2016-11-08,0.0,0.0
224 | 2016-11-09,0.0,0.0
225 | 2016-11-10,0.0,0.0
226 | 2016-11-11,0.0,0.0
227 | 2016-11-14,0.0,0.0
228 | 2016-11-15,0.0,0.0
229 | 2016-11-16,0.0,0.0
230 | 2016-11-17,0.0,0.0
231 | 2016-11-18,0.0,0.0
232 | 2016-11-21,0.0,0.0
233 | 2016-11-22,0.0,0.0
234 | 2016-11-23,0.0,0.0
235 | 2016-11-24,0.0,0.0
236 | 2016-11-25,0.0,0.0
237 | 2016-11-28,0.0,0.0
238 | 2016-11-29,0.0,0.0
239 | 2016-11-30,0.0,0.0
240 | 2016-12-01,0.0,0.0
241 | 2016-12-02,0.0,0.0
242 | 2016-12-05,0.0,0.0
243 | 2016-12-06,0.0,0.0
244 | 2016-12-07,0.0,0.0
245 | 2016-12-08,0.0,0.0
246 | 2016-12-09,0.0,0.0
247 | 2016-12-12,0.0,0.0
248 | 2016-12-13,0.0,0.0
249 | 2016-12-14,0.0,0.0
250 | 2016-12-15,0.0,0.0
251 | 2016-12-16,0.0,0.0
252 | 2016-12-19,0.0,0.0
253 | 2016-12-20,0.0,0.0
254 | 2016-12-21,0.0,0.0
255 | 2016-12-22,0.0,0.0
256 | 2016-12-23,0.0,0.0
257 | 2016-12-26,0.0,0.0
258 | 2016-12-27,0.0,0.0
259 | 2016-12-28,0.0,0.0
260 | 2016-12-29,0.0,0.0
261 | 2016-12-30,0.0,0.0
262 | 2017-01-02,0.0,0.0
263 | 2017-01-03,0.0,0.0
264 | 2017-01-04,0.0,0.0
265 | 2017-01-05,0.0,0.0
266 | 2017-01-06,0.0,0.0
267 | 2017-01-09,0.0,0.0
268 | 2017-01-10,0.0,0.0
269 | 2017-01-11,0.0,0.0
270 | 2017-01-12,0.0,0.0
271 | 2017-01-13,0.0,0.0
272 | 2017-01-16,0.0,0.0
273 | 2017-01-17,0.0,0.0
274 | 2017-01-18,0.0,0.0
275 | 2017-01-19,0.0,0.0
276 | 2017-01-20,0.0,0.0
277 | 2017-01-23,0.0,0.0
278 | 2017-01-24,0.0,0.0
279 | 2017-01-25,0.0,0.0
280 | 2017-01-26,0.0,0.0
281 | 2017-01-27,0.0,0.0
282 | 2017-01-30,0.0,0.0
283 | 2017-01-31,0.0,0.0
284 | 2017-02-01,0.0,0.0
285 | 2017-02-02,0.0,0.0
286 | 2017-02-03,0.0,0.0
287 | 2017-02-06,0.0,0.0
288 | 2017-02-07,0.0,0.0
289 | 2017-02-08,0.0,0.0
290 | 2017-02-09,0.0,0.0
291 | 2017-02-10,0.0,0.0
292 | 2017-02-13,0.0,0.0
293 | 2017-02-14,0.0,0.0
294 | 2017-02-15,0.0,0.0
295 | 2017-02-16,0.0,0.0
296 | 2017-02-17,0.0,0.0
297 | 2017-02-20,0.0,0.0
298 | 2017-02-21,0.0,0.0
299 | 2017-02-22,0.0,0.0
300 | 2017-02-23,0.0,0.0
301 | 2017-02-24,0.0,0.0
302 | 2017-02-27,0.0,0.0
303 | 2017-02-28,0.0,0.0
304 | 2017-03-01,0.0,0.0
305 | 2017-03-02,0.0,0.0
306 | 2017-03-03,0.0,0.0
307 | 2017-03-06,0.0,0.0
308 | 2017-03-07,0.0,0.0
309 | 2017-03-08,0.0,0.0
310 | 2017-03-09,0.0,0.0
311 | 2017-03-10,0.0,0.0
312 | 2017-03-13,0.0,0.0
313 | 2017-03-14,0.0,0.0
314 | 2017-03-15,0.0,0.0
315 | 2017-03-16,0.0,0.0
316 | 2017-03-17,0.0,0.0
317 | 2017-03-20,0.0,0.0
318 | 2017-03-21,0.0,0.0
319 | 2017-03-22,0.0,0.0
320 | 2017-03-23,0.0,0.0
321 | 2017-03-24,0.0,0.0
322 | 2017-03-27,0.0,0.0
323 | 2017-03-28,0.0,0.0
324 | 2017-03-29,0.0,0.0
325 | 2017-03-30,0.0,0.0
326 | 2017-03-31,0.0,0.0
327 | 2017-04-03,0.0,0.0
328 | 2017-04-04,0.0,0.0
329 | 2017-04-05,0.0,0.0
330 | 2017-04-06,0.0,0.0
331 | 2017-04-07,0.0,0.0
332 | 2017-04-10,0.0,0.0
333 | 2017-04-11,0.0,0.0
334 | 2017-04-12,0.0,0.0
335 | 2017-04-13,0.0,0.0
336 | 2017-04-14,0.0,0.0
337 | 2017-04-17,0.0,0.0
338 | 2017-04-18,0.0,0.0
339 | 2017-04-19,0.0,0.0
340 | 2017-04-20,0.0,0.0
341 | 2017-04-21,0.0,0.0
342 | 2017-04-24,0.0,0.0
343 | 2017-04-25,0.0,0.0
344 | 2017-04-26,0.0,0.0
345 | 2017-04-27,0.0,0.0
346 | 2017-04-28,0.0,0.0
347 | 2017-05-01,0.0,0.0
348 | 2017-05-02,0.0,0.0
349 | 2017-05-03,0.0,0.0
350 | 2017-05-04,0.0,0.0
351 | 2017-05-05,0.0,0.0
352 | 2017-05-08,0.0,0.0
353 | 2017-05-09,0.0,0.0
354 | 2017-05-10,0.0,0.0
355 | 2017-05-11,0.0,0.0
356 | 2017-05-12,0.0,0.0
357 | 2017-05-15,0.0,0.0
358 | 2017-05-16,0.0,0.0
359 | 2017-05-17,0.0,0.0
360 | 2017-05-18,0.0,0.0
361 | 2017-05-19,0.0,0.0
362 | 2017-05-22,0.0,0.0
363 | 2017-05-23,0.0,0.0
364 | 2017-05-24,0.0,0.0
365 | 2017-05-25,0.0,0.0
366 | 2017-05-26,0.0,0.0
367 | 2017-05-29,0.0,0.0
368 | 2017-05-30,0.0,0.0
369 | 2017-05-31,0.0,0.0
370 | 2017-06-01,0.0,0.0
371 | 2017-06-02,0.0,0.0
372 | 2017-06-05,0.0,0.0
373 | 2017-06-06,0.0,0.0
374 | 2017-06-07,0.0,0.0
375 | 2017-06-08,0.0,0.0
376 | 2017-06-09,0.0,0.0
377 | 2017-06-12,0.0,0.0
378 | 2017-06-13,0.0,0.0
379 | 2017-06-14,0.0,0.0
380 | 2017-06-15,0.0,0.0
381 | 2017-06-16,0.0,0.0
382 | 2017-06-19,0.0,0.0
383 | 2017-06-20,0.0,0.0
384 | 2017-06-21,0.0,0.0
385 | 2017-06-22,0.0,0.0
386 | 2017-06-23,0.0,0.0
387 | 2017-06-26,0.0,0.0
388 | 2017-06-27,0.0,0.0
389 | 2017-06-28,0.0,0.0
390 | 2017-06-29,0.0,0.0
391 | 2017-06-30,0.0,0.0
392 | 2017-07-03,0.0,0.0
393 | 2017-07-04,0.0,0.0
394 | 2017-07-05,0.0,0.0
395 | 2017-07-06,0.0,0.0
396 | 2017-07-07,0.0,0.0
397 | 2017-07-10,0.0,0.0
398 | 2017-07-11,0.0,0.0
399 | 2017-07-12,0.0,0.0
400 | 2017-07-13,0.0,0.0
401 | 2017-07-14,0.0,0.0
402 | 2017-07-17,0.0,0.0
403 | 2017-07-18,0.0,0.0
404 | 2017-07-19,0.0,0.0
405 | 2017-07-20,0.0,0.0
406 | 2017-07-21,0.0,0.0
407 | 2017-07-24,0.0,0.0
408 | 2017-07-25,0.0,0.0
409 | 2017-07-26,0.0,0.0
410 | 2017-07-27,0.0,0.0
411 | 2017-07-28,0.0,0.0
412 | 2017-07-31,0.0,0.0
413 | 2017-08-01,0.0,0.0
414 | 2017-08-02,0.0,0.0
415 | 2017-08-03,0.0,0.0
416 | 2017-08-04,0.0,0.0
417 | 2017-08-07,0.0,0.0
418 | 2017-08-08,0.0,0.0
419 | 2017-08-09,0.0,0.0
420 | 2017-08-10,0.0,0.0
421 | 2017-08-11,0.0,0.0
422 | 2017-08-14,0.0,0.0
423 | 2017-08-15,0.0,0.0
424 | 2017-08-16,0.0,0.0
425 | 2017-08-17,0.0,0.0
426 | 2017-08-18,0.0,0.0
427 | 2017-08-21,0.0,0.0
428 | 2017-08-22,0.0,0.0
429 | 2017-08-23,0.0,0.0
430 | 2017-08-24,0.0,0.0
431 | 2017-08-25,0.0,0.0
432 | 2017-08-28,0.0,0.0
433 | 2017-08-29,0.0,0.0
434 | 2017-08-30,0.0,0.0
435 | 2017-08-31,0.0,0.0
436 | 2017-09-01,0.0,0.0
437 | 2017-09-04,0.0,0.0
438 | 2017-09-05,0.0,0.0
439 | 2017-09-06,0.0,0.0
440 | 2017-09-07,0.0,0.0
441 | 2017-09-08,0.0,0.0
442 | 2017-09-11,0.0,0.0
443 | 2017-09-12,0.0,0.0
444 | 2017-09-13,0.0,0.0
445 | 2017-09-14,0.0,0.0
446 | 2017-09-15,0.0,0.0
447 | 2017-09-18,0.0,0.0
448 | 2017-09-19,0.0,0.0
449 | 2017-09-20,0.0,0.0
450 | 2017-09-21,0.0,0.0
451 | 2017-09-22,0.0,0.0
452 | 2017-09-25,0.0,0.0
453 | 2017-09-26,0.0,0.0
454 | 2017-09-27,0.0,0.0
455 | 2017-09-28,0.0,0.0
456 | 2017-09-29,0.0,0.0
457 | 2017-10-02,0.0,0.0
458 | 2017-10-03,0.0,0.0
459 | 2017-10-04,0.0,0.0
460 | 2017-10-05,0.0,0.0
461 | 2017-10-06,0.0,0.0
462 | 2017-10-09,0.0,0.0
463 | 2017-10-10,0.0,0.0
464 | 2017-10-11,0.0,0.0
465 | 2017-10-12,0.0,0.0
466 | 2017-10-13,0.0,0.0
467 | 2017-10-16,0.0,0.0
468 | 2017-10-17,0.0,0.0
469 | 2017-10-18,0.0,0.0
470 | 2017-10-19,0.0,0.0
471 | 2017-10-20,0.0,0.0
472 | 2017-10-23,0.0,0.0
473 | 2017-10-24,0.0,0.0
474 | 2017-10-25,0.0,0.0
475 | 2017-10-26,0.0,0.0
476 | 2017-10-27,0.0,0.0
477 | 2017-10-30,0.0,0.0
478 | 2017-10-31,0.0,0.0
479 | 2017-11-01,0.0,0.0
480 | 2017-11-02,0.0,0.0
481 | 2017-11-03,0.0,0.0
482 | 2017-11-06,0.0,0.0
483 | 2017-11-07,0.0,0.0
484 | 2017-11-08,0.0,0.0
485 | 2017-11-09,0.0,0.0
486 | 2017-11-10,0.0,0.0
487 | 2017-11-13,0.0,0.0
488 | 2017-11-14,0.0,0.0
489 | 2017-11-15,0.0,0.0
490 | 2017-11-16,0.0,0.0
491 | 2017-11-17,0.0,0.0
492 | 2017-11-20,0.0,0.0
493 | 2017-11-21,0.0,0.0
494 | 2017-11-22,0.0,0.0
495 | 2017-11-23,0.0,0.0
496 | 2017-11-24,0.0,0.0
497 | 2017-11-27,0.0,0.0
498 | 2017-11-28,0.0,0.0
499 | 2017-11-29,0.0,0.0
500 | 2017-11-30,0.0,0.0
501 | 2017-12-01,0.0,0.0
502 | 2017-12-04,0.0,0.0
503 | 2017-12-05,0.0,0.0
504 | 2017-12-06,0.0,0.0
505 | 2017-12-07,0.0,0.0
506 |
--------------------------------------------------------------------------------
/tests/test_data/returns.csv:
--------------------------------------------------------------------------------
1 | 2016-01-04,2.16420955433
2 | 2016-01-05,3.21963118331
3 | 2016-01-06,0.890280110274
4 | 2016-01-07,0.798731209228
5 | 2016-01-08,0.307379650145
6 | 2016-01-11,1.59831707812
7 | 2016-01-12,0.88271274164
8 | 2016-01-13,0.77753756012
9 | 2016-01-14,1.28892080939
10 | 2016-01-15,-0.541028037651
11 | 2016-01-18,-1.89937122039
12 | 2016-01-19,0.122271178453
13 | 2016-01-20,0.815388949389
14 | 2016-01-21,-0.141425332724
15 | 2016-01-22,3.00213798426
16 | 2016-01-25,0.533109945299
17 | 2016-01-26,-2.86858221585
18 | 2016-01-27,-0.191563180222
19 | 2016-01-28,2.43267052951
20 | 2016-01-29,-0.689629567983
21 | 2016-02-01,-2.46857090225
22 | 2016-02-02,0.244505204607
23 | 2016-02-03,-0.947726483363
24 | 2016-02-04,-0.475305004218
25 | 2016-02-05,-1.82663812777
26 | 2016-02-08,-0.508564063334
27 | 2016-02-09,-1.69143732169
28 | 2016-02-10,0.400149642192
29 | 2016-02-11,0.368989120123
30 | 2016-02-12,-0.997063259668
31 | 2016-02-15,-1.03201360932
32 | 2016-02-16,-2.53942888438
33 | 2016-02-17,-0.224354793955
34 | 2016-02-18,-1.16741609144
35 | 2016-02-19,-0.855352968587
36 | 2016-02-22,0.858073472935
37 | 2016-02-23,-0.0954251358104
38 | 2016-02-24,-0.282468449763
39 | 2016-02-25,-1.44964681395
40 | 2016-02-26,-0.255387189898
41 | 2016-02-29,-0.264323353829
42 | 2016-03-01,-1.07058124655
43 | 2016-03-02,3.38414136983
44 | 2016-03-03,0.998854735347
45 | 2016-03-04,-0.0163008945794
46 | 2016-03-07,0.819268123409
47 | 2016-03-08,1.18491401456
48 | 2016-03-09,1.06293956537
49 | 2016-03-10,1.79637051463
50 | 2016-03-11,0.528901456148
51 | 2016-03-14,0.535391635914
52 | 2016-03-15,-0.301088290328
53 | 2016-03-16,0.770497780535
54 | 2016-03-17,-1.1610737922
55 | 2016-03-18,3.40345681791
56 | 2016-03-21,2.7736036187
57 | 2016-03-22,1.04883926804
58 | 2016-03-23,0.534453024845
59 | 2016-03-24,0.792241874683
60 | 2016-03-25,1.53628604191
61 | 2016-03-28,-0.722975259429
62 | 2016-03-29,1.62462407089
63 | 2016-03-30,-0.844202400059
64 | 2016-03-31,1.41411017676
65 | 2016-04-01,1.07975659325
66 | 2016-04-04,-0.230666883153
67 | 2016-04-05,-0.642502102383
68 | 2016-04-06,0.0405872165676
69 | 2016-04-07,0.368292061037
70 | 2016-04-08,-0.697054796069
71 | 2016-04-11,-1.05186589144
72 | 2016-04-12,0.801704932265
73 | 2016-04-13,3.32762426185
74 | 2016-04-14,0.204194062652
75 | 2016-04-15,-1.77749201533
76 | 2016-04-18,1.64510111632
77 | 2016-04-19,-1.57119336071
78 | 2016-04-20,-0.761930810788
79 | 2016-04-21,0.0467044137431
80 | 2016-04-22,-1.58528869716
81 | 2016-04-25,1.43149960312
82 | 2016-04-26,1.03697204831
83 | 2016-04-27,-0.381072542429
84 | 2016-04-28,-2.54498644417
85 | 2016-04-29,1.50497240428
86 | 2016-05-02,1.23958647672
87 | 2016-05-03,0.205805018603
88 | 2016-05-04,-0.352648323503
89 | 2016-05-05,-1.49295944192
90 | 2016-05-06,-0.438053344492
91 | 2016-05-09,-1.72894520467
92 | 2016-05-10,-2.86702155506
93 | 2016-05-11,-0.97682620458
94 | 2016-05-12,-1.05221826017
95 | 2016-05-13,0.803451599015
96 | 2016-05-16,-1.02580604037
97 | 2016-05-17,-1.20737631597
98 | 2016-05-18,0.35173032931
99 | 2016-05-19,1.59529470518
100 | 2016-05-20,3.49976389872
101 | 2016-05-23,-0.608561015518
102 | 2016-05-24,1.75492332661
103 | 2016-05-25,-0.976824518213
104 | 2016-05-26,-0.762357033605
105 | 2016-05-27,0.1817742094
106 | 2016-05-30,1.22739712328
107 | 2016-05-31,0.319908865373
108 | 2016-06-01,-1.35449594912
109 | 2016-06-02,0.362131321694
110 | 2016-06-03,2.21705179903
111 | 2016-06-06,-1.30192677619
112 | 2016-06-07,0.0178854991274
113 | 2016-06-08,-1.47753502024
114 | 2016-06-09,0.388687574166
115 | 2016-06-10,-0.835237798701
116 | 2016-06-13,-1.91738079234
117 | 2016-06-14,-0.126811429755
118 | 2016-06-15,-0.374984330112
119 | 2016-06-16,-0.575500480522
120 | 2016-06-17,1.10316676581
121 | 2016-06-20,-1.03470883988
122 | 2016-06-21,-0.430671456989
123 | 2016-06-22,-1.98501677538
124 | 2016-06-23,2.23195015682
125 | 2016-06-24,-2.27978858701
126 | 2016-06-27,-0.0547230933603
127 | 2016-06-28,-0.177375253824
128 | 2016-06-29,1.38628789473
129 | 2016-06-30,-2.10896133386
130 | 2016-07-01,-0.972559018228
131 | 2016-07-04,-1.69567561208
132 | 2016-07-05,-0.64888133472
133 | 2016-07-06,-1.74750120905
134 | 2016-07-07,0.612313110879
135 | 2016-07-08,-0.21348600543
136 | 2016-07-11,-2.37354641079
137 | 2016-07-12,2.34600563094
138 | 2016-07-13,-1.04336195757
139 | 2016-07-14,0.377637838315
140 | 2016-07-15,0.0338083935778
141 | 2016-07-18,0.909632054483
142 | 2016-07-19,0.844327206461
143 | 2016-07-20,0.895187523368
144 | 2016-07-21,0.165891923536
145 | 2016-07-22,1.9916643941
146 | 2016-07-25,-1.1091146781
147 | 2016-07-26,1.24390087496
148 | 2016-07-27,1.00094166192
149 | 2016-07-28,0.680678647468
150 | 2016-07-29,-0.0293931414154
151 | 2016-08-01,0.351603827883
152 | 2016-08-02,-0.798342249125
153 | 2016-08-03,0.205663294643
154 | 2016-08-04,-2.6809759772
155 | 2016-08-05,0.534199714544
156 | 2016-08-08,0.944042246308
157 | 2016-08-09,-1.85750356162
158 | 2016-08-10,-0.290528219864
159 | 2016-08-11,-0.32905864368
160 | 2016-08-12,-0.168931678387
161 | 2016-08-15,-1.53259737711
162 | 2016-08-16,-0.616398725272
163 | 2016-08-17,-1.46964751032
164 | 2016-08-18,2.09905648113
165 | 2016-08-19,0.238560449113
166 | 2016-08-22,-0.441756620999
167 | 2016-08-23,-0.410627662791
168 | 2016-08-24,-2.05285271364
169 | 2016-08-25,-1.30495612163
170 | 2016-08-26,0.975539898453
171 | 2016-08-29,0.615123595465
172 | 2016-08-30,-1.90191501412
173 | 2016-08-31,-0.721278127477
174 | 2016-09-01,-0.207989689119
175 | 2016-09-02,0.928175954722
176 | 2016-09-05,-2.20193539771
177 | 2016-09-06,0.675082663553
178 | 2016-09-07,-1.17348291224
179 | 2016-09-08,-2.3210435542
180 | 2016-09-09,0.140702484336
181 | 2016-09-12,0.702228038194
182 | 2016-09-13,1.27181335792
183 | 2016-09-14,0.145246056696
184 | 2016-09-15,-0.585503007615
185 | 2016-09-16,-1.39574486836
186 | 2016-09-19,-0.712681905613
187 | 2016-09-20,0.592172683913
188 | 2016-09-21,0.543331757931
189 | 2016-09-22,-0.927308943571
190 | 2016-09-23,0.673275235917
191 | 2016-09-26,-1.31082534404
192 | 2016-09-27,-3.27807107304
193 | 2016-09-28,-1.61808455048
194 | 2016-09-29,-2.45734574515
195 | 2016-09-30,1.81236268769
196 | 2016-10-03,0.344615177338
197 | 2016-10-04,-1.96990593741
198 | 2016-10-05,-1.05332957456
199 | 2016-10-06,1.99902579095
200 | 2016-10-07,2.31913065504
201 | 2016-10-10,-1.71455092288
202 | 2016-10-11,1.12295599912
203 | 2016-10-12,-1.41305665793
204 | 2016-10-13,0.873445411669
205 | 2016-10-14,-0.992702158626
206 | 2016-10-17,-0.646236750223
207 | 2016-10-18,-0.542581106315
208 | 2016-10-19,2.41722229378
209 | 2016-10-20,0.512886806468
210 | 2016-10-21,3.23958416818
211 | 2016-10-24,1.51172970288
212 | 2016-10-25,-1.97088115697
213 | 2016-10-26,-0.0361537248081
214 | 2016-10-27,-1.79663107987
215 | 2016-10-28,-0.299407698529
216 | 2016-10-31,-1.88375165918
217 | 2016-11-01,1.14583539274
218 | 2016-11-02,-0.656287365929
219 | 2016-11-03,0.826878358349
220 | 2016-11-04,0.878824978593
221 | 2016-11-07,-1.55464949905
222 | 2016-11-08,0.108362171074
223 | 2016-11-09,0.7607252931
224 | 2016-11-10,-0.507196407513
225 | 2016-11-11,-0.893018454854
226 | 2016-11-14,-0.23438062666
227 | 2016-11-15,0.742226093711
228 | 2016-11-16,2.3599476867
229 | 2016-11-17,-2.67030547347
230 | 2016-11-18,0.148696655935
231 | 2016-11-21,-1.49634890187
232 | 2016-11-22,-0.257851092584
233 | 2016-11-23,1.9096369789
234 | 2016-11-24,-1.75362174434
235 | 2016-11-25,-2.03713562499
236 | 2016-11-28,-2.55586126117
237 | 2016-11-29,-0.985398500407
238 | 2016-11-30,2.73326706877
239 | 2016-12-01,0.436718057752
240 | 2016-12-02,1.62459501086
241 | 2016-12-05,1.80084477746
242 | 2016-12-06,-1.33308086694
243 | 2016-12-07,-1.79302308165
244 | 2016-12-08,2.06646014678
245 | 2016-12-09,0.174803695097
246 | 2016-12-12,-1.3798786479
247 | 2016-12-13,2.39830631055
248 | 2016-12-14,2.62229938628
249 | 2016-12-15,-1.17278693274
250 | 2016-12-16,-1.09589663123
251 | 2016-12-19,0.34849014948
252 | 2016-12-20,0.862131044321
253 | 2016-12-21,-0.928719129359
254 | 2016-12-22,-3.20040225054
255 | 2016-12-23,0.122270141027
256 | 2016-12-26,2.27022433928
257 | 2016-12-27,-3.30083634438
258 | 2016-12-28,-0.484237366838
259 | 2016-12-29,1.54666243088
260 | 2016-12-30,2.02694845146
261 | 2017-01-02,-1.13568489899
262 | 2017-01-03,-2.57018957359
263 | 2017-01-04,-0.646602296369
264 | 2017-01-05,2.34907016957
265 | 2017-01-06,-1.50553460473
266 | 2017-01-09,-1.83810500357
267 | 2017-01-10,1.28972667054
268 | 2017-01-11,-1.86512037748
269 | 2017-01-12,-0.443890229501
270 | 2017-01-13,-0.312779620076
271 | 2017-01-16,-0.995093604823
272 | 2017-01-17,1.27624134049
273 | 2017-01-18,-0.828481516298
274 | 2017-01-19,-1.48098736263
275 | 2017-01-20,0.549474843283
276 | 2017-01-23,0.260249928374
277 | 2017-01-24,0.674873372985
278 | 2017-01-25,0.619820009087
279 | 2017-01-26,-2.34383963544
280 | 2017-01-27,-2.10949881089
281 | 2017-01-30,1.96666125501
282 | 2017-01-31,-1.58649315855
283 | 2017-02-01,-0.532487258066
284 | 2017-02-02,0.971644247506
285 | 2017-02-03,0.535632107372
286 | 2017-02-06,-1.37595849837
287 | 2017-02-07,0.804908129643
288 | 2017-02-08,0.226021010764
289 | 2017-02-09,-1.92393843186
290 | 2017-02-10,1.00202586802
291 | 2017-02-13,-2.61169583121
292 | 2017-02-14,-0.354844934186
293 | 2017-02-15,-1.02494728473
294 | 2017-02-16,0.228443680958
295 | 2017-02-17,-3.43853205295
296 | 2017-02-20,0.98235484906
297 | 2017-02-21,-1.303577649
298 | 2017-02-22,0.731015644217
299 | 2017-02-23,-0.686764353276
300 | 2017-02-24,-1.10874559461
301 | 2017-02-27,-1.13311052405
302 | 2017-02-28,-0.706265342992
303 | 2017-03-01,-1.99602056214
304 | 2017-03-02,-1.77118921694
305 | 2017-03-03,-0.26399968974
306 | 2017-03-06,-3.04559895192
307 | 2017-03-07,1.50067606963
308 | 2017-03-08,0.272853172261
309 | 2017-03-09,0.553466545441
310 | 2017-03-10,-0.221014391134
311 | 2017-03-13,0.294451776784
312 | 2017-03-14,-0.526508664707
313 | 2017-03-15,-1.60134330844
314 | 2017-03-16,1.85428223205
315 | 2017-03-17,-0.0575180631839
316 | 2017-03-20,-0.804773583575
317 | 2017-03-21,0.0959239853297
318 | 2017-03-22,-0.0505395008888
319 | 2017-03-23,-0.665508142742
320 | 2017-03-24,2.18027033894
321 | 2017-03-27,1.27721523253
322 | 2017-03-28,0.0381972461105
323 | 2017-03-29,-1.52290214945
324 | 2017-03-30,0.956648485035
325 | 2017-03-31,0.951585622391
326 | 2017-04-03,-2.03368978779
327 | 2017-04-04,0.837201240864
328 | 2017-04-05,0.675320754703
329 | 2017-04-06,-1.38567147857
330 | 2017-04-07,-1.31631979878
331 | 2017-04-10,-2.1958092599
332 | 2017-04-11,0.550385238052
333 | 2017-04-12,-1.09750329041
334 | 2017-04-13,1.05577162309
335 | 2017-04-14,-1.62733919465
336 | 2017-04-17,-2.430297819
337 | 2017-04-18,-2.8584865773
338 | 2017-04-19,0.612572489773
339 | 2017-04-20,0.0780394187355
340 | 2017-04-21,1.81907008147
341 | 2017-04-24,0.533016516702
342 | 2017-04-25,1.62280310702
343 | 2017-04-26,-3.49101818025
344 | 2017-04-27,0.505912618034
345 | 2017-04-28,2.34497727936
346 | 2017-05-01,1.27982322983
347 | 2017-05-02,-3.28006352412
348 | 2017-05-03,0.558046942455
349 | 2017-05-04,-1.14088576872
350 | 2017-05-05,1.27990250842
351 | 2017-05-08,-2.6554831932
352 | 2017-05-09,0.305969120203
353 | 2017-05-10,2.36697493652
354 | 2017-05-11,0.901350548961
355 | 2017-05-12,1.47657485082
356 | 2017-05-15,-0.0249465082623
357 | 2017-05-16,-0.986723754665
358 | 2017-05-17,1.22650120974
359 | 2017-05-18,-1.26747907878
360 | 2017-05-19,0.469249912172
361 | 2017-05-22,-0.897163586484
362 | 2017-05-23,-0.201564266035
363 | 2017-05-24,-2.48901699082
364 | 2017-05-25,0.310530342949
365 | 2017-05-26,1.39993342151
366 | 2017-05-29,-1.32114985926
367 | 2017-05-30,-1.55939770421
368 | 2017-05-31,0.251878743216
369 | 2017-06-01,-0.720543762919
370 | 2017-06-02,-1.09234543399
371 | 2017-06-05,-2.31782526342
372 | 2017-06-06,1.62199773143
373 | 2017-06-07,-0.209915230395
374 | 2017-06-08,0.730383073908
375 | 2017-06-09,-1.52065275148
376 | 2017-06-12,-0.888903454012
377 | 2017-06-13,2.14437685725
378 | 2017-06-14,0.80654823367
379 | 2017-06-15,-0.0369352471997
380 | 2017-06-16,-1.52722797628
381 | 2017-06-19,-0.185615062136
382 | 2017-06-20,0.747712618986
383 | 2017-06-21,-0.382922482812
384 | 2017-06-22,-0.0824178900418
385 | 2017-06-23,1.63542459048
386 | 2017-06-26,-0.477665414151
387 | 2017-06-27,-0.726359595805
388 | 2017-06-28,-2.15638276459
389 | 2017-06-29,-0.376129645064
390 | 2017-06-30,-1.69955745668
391 | 2017-07-03,2.01065971035
392 | 2017-07-04,-0.729569532852
393 | 2017-07-05,0.625347950302
394 | 2017-07-06,0.951673860043
395 | 2017-07-07,-1.40118153706
396 | 2017-07-10,-0.80795495471
397 | 2017-07-11,0.415069440239
398 | 2017-07-12,-1.75791454491
399 | 2017-07-13,-1.00251266286
400 | 2017-07-14,-1.25462789997
401 | 2017-07-17,2.19697589072
402 | 2017-07-18,-0.448686570639
403 | 2017-07-19,1.3461216949
404 | 2017-07-20,0.471860167339
405 | 2017-07-21,-1.80069601033
406 | 2017-07-24,0.112565354251
407 | 2017-07-25,0.353891388233
408 | 2017-07-26,2.20426423196
409 | 2017-07-27,1.0142090195
410 | 2017-07-28,-0.829626091563
411 | 2017-07-31,0.000353288028221
412 | 2017-08-01,-1.42886114567
413 | 2017-08-02,-0.340757690955
414 | 2017-08-03,2.55597944625
415 | 2017-08-04,0.861145764153
416 | 2017-08-07,1.32198759659
417 | 2017-08-08,-0.0390397541084
418 | 2017-08-09,0.918851571578
419 | 2017-08-10,-1.17398999163
420 | 2017-08-11,0.781880216401
421 | 2017-08-14,-0.130218406447
422 | 2017-08-15,3.10640403635
423 | 2017-08-16,0.213238792126
424 | 2017-08-17,0.216607652142
425 | 2017-08-18,-0.716881597089
426 | 2017-08-21,-3.73674699662
427 | 2017-08-22,-1.70135071407
428 | 2017-08-23,-1.46939143935
429 | 2017-08-24,-2.04903708979
430 | 2017-08-25,-0.509864956148
431 | 2017-08-28,1.32668844699
432 | 2017-08-29,0.120516478373
433 | 2017-08-30,-0.789345873489
434 | 2017-08-31,0.193975917066
435 | 2017-09-01,-0.505107059727
436 | 2017-09-04,0.450000046009
437 | 2017-09-05,-1.11952813426
438 | 2017-09-06,-0.361841803858
439 | 2017-09-07,-1.08139691805
440 | 2017-09-08,-1.74327499448
441 | 2017-09-11,0.361855218159
442 | 2017-09-12,-0.152628361654
443 | 2017-09-13,-1.64989464856
444 | 2017-09-14,0.410757950451
445 | 2017-09-15,-0.530326700757
446 | 2017-09-18,-0.17493428176
447 | 2017-09-19,0.755092093784
448 | 2017-09-20,0.57603620811
449 | 2017-09-21,-2.39813670791
450 | 2017-09-22,2.19039229392
451 | 2017-09-25,-2.14517245505
452 | 2017-09-26,0.557856453616
453 | 2017-09-27,0.970994402874
454 | 2017-09-28,-1.7062662684
455 | 2017-09-29,2.289756245
456 | 2017-10-02,-2.21884039066
457 | 2017-10-03,-1.01688534564
458 | 2017-10-04,-0.259175509346
459 | 2017-10-05,-0.319289896615
460 | 2017-10-06,0.200042182949
461 | 2017-10-09,-0.0226113761569
462 | 2017-10-10,1.53034661666
463 | 2017-10-11,2.38475882145
464 | 2017-10-12,-0.53600982685
465 | 2017-10-13,1.83580320538
466 | 2017-10-16,1.33419812274
467 | 2017-10-17,-1.0697522211
468 | 2017-10-18,-1.1522665034
469 | 2017-10-19,0.674744963968
470 | 2017-10-20,-1.32389256982
471 | 2017-10-23,1.66367405489
472 | 2017-10-24,3.24047024041
473 | 2017-10-25,0.184048461979
474 | 2017-10-26,1.71065006077
475 | 2017-10-27,0.391009250722
476 | 2017-10-30,-0.703045138945
477 | 2017-10-31,0.990963037634
478 | 2017-11-01,0.775091407101
479 | 2017-11-02,0.0587659177434
480 | 2017-11-03,1.0674859235
481 | 2017-11-06,0.57254145092
482 | 2017-11-07,1.12671933158
483 | 2017-11-08,-0.570907316663
484 | 2017-11-09,1.58149159817
485 | 2017-11-10,1.48710113275
486 | 2017-11-13,0.310956546026
487 | 2017-11-14,1.61472697925
488 | 2017-11-15,1.70729437889
489 | 2017-11-16,-1.27034812155
490 | 2017-11-17,-0.525604960667
491 | 2017-11-20,0.214937582637
492 | 2017-11-21,0.702985855346
493 | 2017-11-22,-0.504772278
494 | 2017-11-23,0.318426777681
495 | 2017-11-24,1.0821632933
496 | 2017-11-27,0.619825773006
497 | 2017-11-28,-0.558634889801
498 | 2017-11-29,0.701991325725
499 | 2017-11-30,-0.10420659651
500 | 2017-12-01,-1.50572502032
501 | 2017-12-04,1.44843656704
502 | 2017-12-05,-0.317600794692
503 | 2017-12-06,0.429533271829
504 | 2017-12-07,-1.27730404508
505 |
--------------------------------------------------------------------------------
/tests/test_data/test_gross_lev.csv.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stefan-jansen/pyfolio-reloaded/6b55da7fce365e325046bb080cb13ced1822de95/tests/test_data/test_gross_lev.csv.gz
--------------------------------------------------------------------------------
/tests/test_data/test_pos.csv.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stefan-jansen/pyfolio-reloaded/6b55da7fce365e325046bb080cb13ced1822de95/tests/test_data/test_pos.csv.gz
--------------------------------------------------------------------------------
/tests/test_data/test_returns.csv.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stefan-jansen/pyfolio-reloaded/6b55da7fce365e325046bb080cb13ced1822de95/tests/test_data/test_returns.csv.gz
--------------------------------------------------------------------------------
/tests/test_data/test_txn.csv.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stefan-jansen/pyfolio-reloaded/6b55da7fce365e325046bb080cb13ced1822de95/tests/test_data/test_txn.csv.gz
--------------------------------------------------------------------------------
/tests/test_pos.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from parameterized import parameterized
3 | from collections import OrderedDict
4 | import os
5 | import gzip
6 |
7 | from pandas import Series, DataFrame, date_range, Timestamp, read_csv
8 | from pandas.testing import assert_frame_equal
9 |
10 | from numpy import (
11 | arange,
12 | zeros_like,
13 | nan,
14 | )
15 |
16 | import warnings
17 |
18 | from pyfolio.utils import (
19 | to_utc,
20 | to_series,
21 | check_intraday,
22 | detect_intraday,
23 | estimate_intraday,
24 | )
25 | from pyfolio.pos import (
26 | get_percent_alloc,
27 | extract_pos,
28 | get_sector_exposures,
29 | get_max_median_position_concentration,
30 | )
31 |
32 |
33 | class PositionsTestCase(TestCase):
34 | dates = date_range(start="2015-01-01", freq="D", periods=20)
35 |
36 | def test_get_percent_alloc(self):
37 | raw_data = arange(15, dtype=float).reshape(5, 3)
38 | # Make the first column negative to test absolute magnitudes.
39 | raw_data[:, 0] *= -1
40 |
41 | frame = DataFrame(
42 | raw_data,
43 | index=date_range("01-01-2015", freq="D", periods=5),
44 | columns=["A", "B", "C"],
45 | )
46 |
47 | result = get_percent_alloc(frame)
48 | expected_raw = zeros_like(raw_data)
49 | for idx, row in enumerate(raw_data):
50 | expected_raw[idx] = row / row.sum()
51 |
52 | expected = DataFrame(
53 | expected_raw,
54 | index=frame.index,
55 | columns=frame.columns,
56 | )
57 |
58 | assert_frame_equal(result, expected)
59 |
60 | def test_extract_pos(self):
61 | index_dup = [
62 | Timestamp("2015-06-08", tz="UTC"),
63 | Timestamp("2015-06-08", tz="UTC"),
64 | Timestamp("2015-06-09", tz="UTC"),
65 | Timestamp("2015-06-09", tz="UTC"),
66 | ]
67 | index = [
68 | Timestamp("2015-06-08", tz="UTC"),
69 | Timestamp("2015-06-09", tz="UTC"),
70 | ]
71 |
72 | positions = DataFrame(
73 | {
74 | "amount": [100.0, 200.0, 300.0, 400.0],
75 | "last_sale_price": [10.0, 20.0, 30.0, 40.0],
76 | "sid": [1, 2, 1, 2],
77 | },
78 | index=index_dup,
79 | )
80 | cash = Series([100.0, 200.0], index=index)
81 |
82 | result = extract_pos(positions, cash)
83 |
84 | expected = DataFrame(
85 | OrderedDict(
86 | [
87 | (1, [100.0 * 10.0, 300.0 * 30.0]),
88 | (2, [200.0 * 20.0, 400.0 * 40.0]),
89 | ("cash", [100.0, 200.0]),
90 | ]
91 | ),
92 | index=index,
93 | )
94 | expected.index.name = "index"
95 | expected.columns.name = "sid"
96 |
97 | assert_frame_equal(result, expected)
98 |
99 | @parameterized.expand(
100 | [
101 | (
102 | DataFrame(
103 | [[1.0, 2.0, 3.0, 10.0]] * len(dates),
104 | columns=[0, 1, 2, "cash"],
105 | index=dates,
106 | ),
107 | {0: "A", 1: "B", 2: "A"},
108 | DataFrame(
109 | [[4.0, 2.0, 10.0]] * len(dates),
110 | columns=["A", "B", "cash"],
111 | index=dates,
112 | ),
113 | False,
114 | ),
115 | (
116 | DataFrame(
117 | [[1.0, 2.0, 3.0, 10.0]] * len(dates),
118 | columns=[0, 1, 2, "cash"],
119 | index=dates,
120 | ),
121 | Series(index=[0, 1, 2], data=["A", "B", "A"]),
122 | DataFrame(
123 | [[4.0, 2.0, 10.0]] * len(dates),
124 | columns=["A", "B", "cash"],
125 | index=dates,
126 | ),
127 | False,
128 | ),
129 | (
130 | DataFrame(
131 | [[1.0, 2.0, 3.0, 10.0]] * len(dates),
132 | columns=[0, 1, 2, "cash"],
133 | index=dates,
134 | ),
135 | {0: "A", 1: "B"},
136 | DataFrame(
137 | [[1.0, 2.0, 10.0]] * len(dates),
138 | columns=["A", "B", "cash"],
139 | index=dates,
140 | ),
141 | True,
142 | ),
143 | ]
144 | )
145 | def test_sector_exposure(
146 | self, positions, mapping, expected_sector_exposure, warning_expected
147 | ):
148 | """
149 | Tests sector exposure mapping and rollup.
150 |
151 | """
152 | with warnings.catch_warnings(record=True) as w:
153 | result_sector_exposure = get_sector_exposures(positions, mapping)
154 |
155 | assert_frame_equal(result_sector_exposure, expected_sector_exposure)
156 | # avoids test failure due to DeprecationWarning for pandas>=1.0, <1.1
157 | w_ = [warn for warn in w if issubclass(warn.category, UserWarning)]
158 | if warning_expected:
159 | self.assertEqual(len(w_), 1)
160 | else:
161 | self.assertEqual(len(w_), 0)
162 |
163 | @parameterized.expand(
164 | [
165 | (
166 | DataFrame(
167 | [[1.0, 2.0, 3.0, 14.0]] * len(dates),
168 | columns=[0, 1, 2, "cash"],
169 | index=dates,
170 | ),
171 | DataFrame(
172 | [[0.15, 0.1, nan, nan]] * len(dates),
173 | columns=[
174 | "max_long",
175 | "median_long",
176 | "median_short",
177 | "max_short",
178 | ],
179 | index=dates,
180 | ),
181 | ),
182 | (
183 | DataFrame(
184 | [[1.0, -2.0, -13.0, 15.0]] * len(dates),
185 | columns=[0, 1, 2, "cash"],
186 | index=dates,
187 | ),
188 | DataFrame(
189 | [[1.0, 1.0, -7.5, -13.0]] * len(dates),
190 | columns=[
191 | "max_long",
192 | "median_long",
193 | "median_short",
194 | "max_short",
195 | ],
196 | index=dates,
197 | ),
198 | ),
199 | (
200 | DataFrame(
201 | [[nan, 2.0, nan, 8.0]] * len(dates),
202 | columns=[0, 1, 2, "cash"],
203 | index=dates,
204 | ),
205 | DataFrame(
206 | [[0.2, 0.2, nan, nan]] * len(dates),
207 | columns=[
208 | "max_long",
209 | "median_long",
210 | "median_short",
211 | "max_short",
212 | ],
213 | index=dates,
214 | ),
215 | ),
216 | ]
217 | )
218 | def test_max_median_exposure(self, positions, expected):
219 | alloc_summary = get_max_median_position_concentration(positions)
220 | assert_frame_equal(expected, alloc_summary)
221 |
222 | __location__ = os.path.realpath(
223 | os.path.join(os.getcwd(), os.path.dirname(__file__))
224 | )
225 |
226 | test_returns = read_csv(
227 | gzip.open(__location__ + "/test_data/test_returns.csv.gz"),
228 | index_col=0,
229 | parse_dates=True,
230 | )
231 | test_returns = to_series(to_utc(test_returns))
232 | test_txn = to_utc(
233 | read_csv(
234 | gzip.open(__location__ + "/test_data/test_txn.csv.gz"),
235 | index_col=0,
236 | parse_dates=True,
237 | )
238 | )
239 | test_pos = to_utc(
240 | read_csv(
241 | gzip.open(__location__ + "/test_data/test_pos.csv.gz"),
242 | index_col=0,
243 | parse_dates=True,
244 | )
245 | )
246 |
247 | @parameterized.expand(
248 | [
249 | (test_pos, test_txn, False),
250 | (test_pos.resample("1W").last(), test_txn, True),
251 | ]
252 | )
253 | def test_detect_intraday(self, positions, transactions, expected):
254 | detected = detect_intraday(positions, transactions, threshold=0.25)
255 | assert detected == expected
256 |
257 | @parameterized.expand(
258 | [
259 | ("infer", test_returns, test_pos, test_txn, test_pos),
260 | (False, test_returns, test_pos, test_txn, test_pos),
261 | ]
262 | )
263 | def test_check_intraday(self, estimate, returns, positions, transactions, expected):
264 | detected = check_intraday(estimate, returns, positions, transactions)
265 | assert_frame_equal(detected, expected)
266 |
267 | @parameterized.expand(
268 | [
269 | (test_returns, test_pos, test_txn, (1506, 8)),
270 | (
271 | test_returns,
272 | test_pos.resample("1W").last(),
273 | test_txn,
274 | (1819, 8),
275 | ),
276 | ]
277 | )
278 | def test_estimate_intraday(self, returns, positions, transactions, expected):
279 | intraday_pos = estimate_intraday(returns, positions, transactions)
280 | assert intraday_pos.shape == expected
281 |
--------------------------------------------------------------------------------
/tests/test_round_trips.py:
--------------------------------------------------------------------------------
1 | from parameterized import parameterized
2 |
3 | from unittest import TestCase
4 |
5 | from pandas import (
6 | Series,
7 | DataFrame,
8 | DatetimeIndex,
9 | date_range,
10 | Timedelta,
11 | read_csv,
12 | )
13 | from pandas.testing import assert_frame_equal
14 |
15 | import os
16 | import gzip
17 |
18 | from pyfolio.round_trips import (
19 | extract_round_trips,
20 | add_closing_transactions,
21 | _groupby_consecutive,
22 | )
23 |
24 |
25 | class RoundTripTestCase(TestCase):
26 | dates = date_range(start="2015-01-01", freq="D", periods=20)
27 | dates_intraday = date_range(start="2015-01-01", freq="2BH", periods=8)
28 |
29 | @parameterized.expand(
30 | [
31 | (
32 | DataFrame(
33 | data=[
34 | [2, 10.0, "A"],
35 | [2, 20.0, "A"],
36 | [-2, 20.0, "A"],
37 | [-2, 10.0, "A"],
38 | ],
39 | columns=["amount", "price", "symbol"],
40 | index=dates_intraday[:4],
41 | ),
42 | DataFrame(
43 | data=[
44 | [4, 15.0, "A"],
45 | [-4, 15.0, "A"],
46 | ],
47 | columns=["amount", "price", "symbol"],
48 | index=dates_intraday[[0, 2]],
49 | ).rename_axis("dt", axis="index"),
50 | ),
51 | (
52 | DataFrame(
53 | data=[
54 | [2, 10.0, "A"],
55 | [2, 20.0, "A"],
56 | [2, 20.0, "A"],
57 | [2, 10.0, "A"],
58 | ],
59 | columns=["amount", "price", "symbol"],
60 | index=dates_intraday[[0, 1, 4, 5]],
61 | ),
62 | DataFrame(
63 | data=[
64 | [4, 15.0, "A"],
65 | [4, 15.0, "A"],
66 | ],
67 | columns=["amount", "price", "symbol"],
68 | index=dates_intraday[[0, 4]],
69 | ).rename_axis("dt", axis="index"),
70 | ),
71 | ]
72 | )
73 | def test_groupby_consecutive(self, transactions, expected):
74 | grouped_txn = _groupby_consecutive(transactions)
75 | assert_frame_equal(
76 | grouped_txn.sort_index(axis="columns"),
77 | expected.sort_index(axis="columns"),
78 | )
79 |
80 | @parameterized.expand(
81 | [
82 | # Simple round-trip
83 | (
84 | DataFrame(
85 | data=[[2, 10.0, "A"], [-2, 15.0, "A"]],
86 | columns=["amount", "price", "symbol"],
87 | index=dates[:2],
88 | ),
89 | DataFrame(
90 | data=[
91 | [
92 | dates[0],
93 | dates[1],
94 | Timedelta(days=1),
95 | 10.0,
96 | 0.5,
97 | True,
98 | "A",
99 | ]
100 | ],
101 | columns=[
102 | "open_dt",
103 | "close_dt",
104 | "duration",
105 | "pnl",
106 | "rt_returns",
107 | "long",
108 | "symbol",
109 | ],
110 | index=[0],
111 | ),
112 | ),
113 | # Round-trip with left-over txn that shouldn't be counted
114 | (
115 | DataFrame(
116 | data=[[2, 10.0, "A"], [2, 15.0, "A"], [-9, 10.0, "A"]],
117 | columns=["amount", "price", "symbol"],
118 | index=dates[:3],
119 | ),
120 | DataFrame(
121 | data=[
122 | [
123 | dates[0],
124 | dates[2],
125 | Timedelta(days=2),
126 | -10.0,
127 | -0.2,
128 | True,
129 | "A",
130 | ]
131 | ],
132 | columns=[
133 | "open_dt",
134 | "close_dt",
135 | "duration",
136 | "pnl",
137 | "rt_returns",
138 | "long",
139 | "symbol",
140 | ],
141 | index=[0],
142 | ),
143 | ),
144 | # Round-trip with sell that crosses 0 and should be split
145 | (
146 | DataFrame(
147 | data=[[2, 10.0, "A"], [-4, 15.0, "A"], [3, 20.0, "A"]],
148 | columns=["amount", "price", "symbol"],
149 | index=dates[:3],
150 | ),
151 | DataFrame(
152 | data=[
153 | [
154 | dates[0],
155 | dates[1],
156 | Timedelta(days=1),
157 | 10.0,
158 | 0.5,
159 | True,
160 | "A",
161 | ],
162 | [
163 | dates[1],
164 | dates[2],
165 | Timedelta(days=1),
166 | -10,
167 | (-1.0 / 3),
168 | False,
169 | "A",
170 | ],
171 | ],
172 | columns=[
173 | "open_dt",
174 | "close_dt",
175 | "duration",
176 | "pnl",
177 | "rt_returns",
178 | "long",
179 | "symbol",
180 | ],
181 | index=[0, 1],
182 | ),
183 | ),
184 | # Round-trip that does not cross 0
185 | (
186 | DataFrame(
187 | data=[[4, 10.0, "A"], [-2, 15.0, "A"], [2, 20.0, "A"]],
188 | columns=["amount", "price", "symbol"],
189 | index=dates[:3],
190 | ),
191 | DataFrame(
192 | data=[
193 | [
194 | dates[0],
195 | dates[1],
196 | Timedelta(days=1),
197 | 10.0,
198 | 0.5,
199 | True,
200 | "A",
201 | ]
202 | ],
203 | columns=[
204 | "open_dt",
205 | "close_dt",
206 | "duration",
207 | "pnl",
208 | "rt_returns",
209 | "long",
210 | "symbol",
211 | ],
212 | index=[0],
213 | ),
214 | ),
215 | # Round-trip that does not cross 0 and has portfolio value
216 | (
217 | DataFrame(
218 | data=[[4, 10.0, "A"], [-2, 15.0, "A"], [2, 20.0, "A"]],
219 | columns=["amount", "price", "symbol"],
220 | index=dates[:3],
221 | ),
222 | DataFrame(
223 | data=[
224 | [
225 | dates[0],
226 | dates[1],
227 | Timedelta(days=1),
228 | 10.0,
229 | 0.5,
230 | True,
231 | "A",
232 | 0.1,
233 | ]
234 | ],
235 | columns=[
236 | "open_dt",
237 | "close_dt",
238 | "duration",
239 | "pnl",
240 | "rt_returns",
241 | "long",
242 | "symbol",
243 | "returns",
244 | ],
245 | index=[0],
246 | ),
247 | Series([100.0, 100.0, 100.0], index=dates[:3]),
248 | ),
249 | ]
250 | )
251 | def test_extract_round_trips(self, transactions, expected, portfolio_value=None):
252 | round_trips = extract_round_trips(transactions, portfolio_value=portfolio_value)
253 |
254 | assert_frame_equal(
255 | round_trips.sort_index(axis="columns"),
256 | expected.sort_index(axis="columns"),
257 | )
258 |
259 | def test_add_closing_trades(self):
260 | dates = date_range(start="2015-01-01", periods=20)
261 | transactions = DataFrame(
262 | data=[[2, 10, "A"], [-5, 10, "A"], [-1, 10, "B"]],
263 | columns=["amount", "price", "symbol"],
264 | index=dates[:3],
265 | )
266 | positions = DataFrame(
267 | data=[[20, 10, 0], [-30, 10, 30], [-60, 0, 30]],
268 | columns=["A", "B", "cash"],
269 | index=dates[:3],
270 | )
271 |
272 | expected_ix = dates[:3].append(DatetimeIndex([dates[2] + Timedelta(seconds=1)]))
273 | expected = DataFrame(
274 | data=[
275 | ["A", 2, 10],
276 | ["A", -5, 10],
277 | ["B", -1, 10.0],
278 | [
279 | "A",
280 | 3,
281 | 20.0,
282 | ],
283 | ],
284 | columns=["symbol", "amount", "price"],
285 | index=expected_ix,
286 | )
287 |
288 | transactions_closed = add_closing_transactions(positions, transactions)
289 | assert_frame_equal(transactions_closed, expected)
290 |
291 | def test_txn_pnl_matches_round_trip_pnl(self):
292 | __location__ = os.path.realpath(
293 | os.path.join(os.getcwd(), os.path.dirname(__file__))
294 | )
295 |
296 | test_txn = read_csv(
297 | gzip.open(__location__ + "/test_data/test_txn.csv.gz"),
298 | index_col=0,
299 | parse_dates=True,
300 | )
301 | test_pos = read_csv(
302 | gzip.open(__location__ + "/test_data/test_pos.csv.gz"),
303 | index_col=0,
304 | parse_dates=True,
305 | )
306 |
307 | transactions_closed = add_closing_transactions(test_pos, test_txn)
308 | transactions_closed["txn_dollars"] = (
309 | transactions_closed.amount * -1.0 * transactions_closed.price
310 | )
311 | round_trips = extract_round_trips(transactions_closed)
312 |
313 | self.assertAlmostEqual(
314 | round_trips.pnl.sum(), transactions_closed.txn_dollars.sum()
315 | )
316 |
--------------------------------------------------------------------------------
/tests/test_tears.py:
--------------------------------------------------------------------------------
1 | import functools
2 | from pathlib import Path
3 | import gzip
4 | import inspect
5 | import os
6 | import warnings
7 | from contextlib import contextmanager
8 | from unittest import TestCase
9 |
10 | import matplotlib
11 | import matplotlib.pyplot as plt
12 | from pandas import read_csv
13 | from parameterized import parameterized
14 |
15 | from pyfolio.tears import (
16 | create_full_tear_sheet,
17 | create_simple_tear_sheet,
18 | create_returns_tear_sheet,
19 | create_position_tear_sheet,
20 | create_txn_tear_sheet,
21 | create_round_trip_tear_sheet,
22 | create_interesting_times_tear_sheet,
23 | )
24 | from pyfolio.utils import to_utc, to_series
25 |
26 |
27 | @contextmanager
28 | def _cleanup_cm():
29 | orig_units_registry = matplotlib.units.registry.copy()
30 | try:
31 | with warnings.catch_warnings(), matplotlib.rc_context():
32 | yield
33 | finally:
34 | matplotlib.units.registry.clear()
35 | matplotlib.units.registry.update(orig_units_registry)
36 | plt.close("all")
37 |
38 |
39 | def cleanup(style=None):
40 | """
41 | A decorator to ensure that any global state is reset before
42 | running a test.
43 |
44 | Parameters
45 | ----------
46 | style : str, dict, or list, optional
47 | The style(s) to apply. Defaults to ``["classic",
48 | "_classic_test_patch"]``.
49 | """
50 |
51 | # If cleanup is used without arguments, *style* will be a callable, and we
52 | # pass it directly to the wrapper generator. If cleanup if called with an
53 | # argument, it is a string naming a style, and the function will be passed
54 | # as an argument to what we return. This is a confusing, but somewhat
55 | # standard, pattern for writing a decorator with optional arguments.
56 |
57 | def make_cleanup(func):
58 | if inspect.isgeneratorfunction(func):
59 |
60 | @functools.wraps(func)
61 | def wrapped_callable(*args, **kwargs):
62 | with _cleanup_cm(), matplotlib.style.context(style):
63 | yield from func(*args, **kwargs)
64 |
65 | else:
66 |
67 | @functools.wraps(func)
68 | def wrapped_callable(*args, **kwargs):
69 | with _cleanup_cm(), matplotlib.style.context(style):
70 | func(*args, **kwargs)
71 |
72 | return wrapped_callable
73 |
74 | if callable(style):
75 | result = make_cleanup(style)
76 | # Default of mpl_test_settings fixture and image_comparison too.
77 | style = ["classic", "_classic_test_patch"]
78 | return result
79 | else:
80 | return make_cleanup
81 |
82 |
83 | class PositionsTestCase(TestCase):
84 | TEST_DATA = Path(__file__).parent / "test_data"
85 | # __location__ = os.path.realpath(
86 | # os.path.join(os.getcwd(), os.path.dirname(__file__))
87 | # )
88 |
89 | test_returns = read_csv(
90 | gzip.open(TEST_DATA / "test_returns.csv.gz"),
91 | index_col=0,
92 | parse_dates=True,
93 | )
94 | test_returns = to_series(to_utc(test_returns))
95 | test_txn = to_utc(
96 | read_csv(
97 | gzip.open(TEST_DATA / "test_txn.csv.gz"),
98 | index_col=0,
99 | parse_dates=True,
100 | )
101 | )
102 | test_pos = to_utc(
103 | read_csv(
104 | gzip.open(TEST_DATA / "test_pos.csv.gz"),
105 | index_col=0,
106 | parse_dates=True,
107 | )
108 | )
109 |
110 | @parameterized.expand(
111 | [
112 | ({},),
113 | ({"slippage": 1},),
114 | ({"live_start_date": test_returns.index[-20]},),
115 | ({"round_trips": True},),
116 | ({"hide_positions": True},),
117 | ({"cone_std": 1},),
118 | ({"bootstrap": True},),
119 | ]
120 | )
121 | @cleanup
122 | def test_create_full_tear_sheet_breakdown(self, kwargs):
123 | create_full_tear_sheet(
124 | self.test_returns,
125 | positions=self.test_pos,
126 | transactions=self.test_txn,
127 | benchmark_rets=self.test_returns,
128 | **kwargs,
129 | )
130 |
131 | @parameterized.expand(
132 | [
133 | ({},),
134 | ({"slippage": 1},),
135 | ({"live_start_date": test_returns.index[-20]},),
136 | ]
137 | )
138 | @cleanup
139 | def test_create_simple_tear_sheet_breakdown(self, kwargs):
140 | create_simple_tear_sheet(
141 | self.test_returns,
142 | positions=self.test_pos,
143 | transactions=self.test_txn,
144 | **kwargs,
145 | )
146 |
147 | @parameterized.expand(
148 | [
149 | ({},),
150 | ({"live_start_date": test_returns.index[-20]},),
151 | ({"cone_std": 1},),
152 | ({"bootstrap": True},),
153 | ]
154 | )
155 | @cleanup
156 | def test_create_returns_tear_sheet_breakdown(self, kwargs):
157 | create_returns_tear_sheet(
158 | self.test_returns, benchmark_rets=self.test_returns, **kwargs
159 | )
160 |
161 | @parameterized.expand(
162 | [
163 | ({},),
164 | ({"hide_positions": True},),
165 | ({"show_and_plot_top_pos": 0},),
166 | ({"show_and_plot_top_pos": 1},),
167 | ]
168 | )
169 | @cleanup
170 | def test_create_position_tear_sheet_breakdown(self, kwargs):
171 | create_position_tear_sheet(self.test_returns, self.test_pos, **kwargs)
172 |
173 | @parameterized.expand(
174 | [
175 | ({},),
176 | ({"unadjusted_returns": test_returns},),
177 | ]
178 | )
179 | @cleanup
180 | def test_create_txn_tear_sheet_breakdown(self, kwargs):
181 | create_txn_tear_sheet(self.test_returns, self.test_pos, self.test_txn, **kwargs)
182 |
183 | @parameterized.expand(
184 | [
185 | ({},),
186 | ({"sector_mappings": {}},),
187 | ]
188 | )
189 | @cleanup
190 | def test_create_round_trip_tear_sheet_breakdown(self, kwargs):
191 | create_round_trip_tear_sheet(
192 | self.test_returns, self.test_pos, self.test_txn, **kwargs
193 | )
194 |
195 | @parameterized.expand(
196 | [
197 | ({},),
198 | ({"legend_loc": 1},),
199 | ]
200 | )
201 | @cleanup
202 | def test_create_interesting_times_tear_sheet_breakdown(self, kwargs):
203 | create_interesting_times_tear_sheet(
204 | self.test_returns, self.test_returns, **kwargs
205 | )
206 |
--------------------------------------------------------------------------------
/tests/test_timeseries.py:
--------------------------------------------------------------------------------
1 | import os
2 | from unittest import TestCase, skipIf
3 | from parameterized import parameterized
4 | from numpy.testing import assert_allclose, assert_almost_equal
5 | from pandas.testing import assert_series_equal
6 | import numpy as np
7 | import pandas as pd
8 |
9 | from pyfolio import timeseries
10 | from pyfolio.utils import to_utc, to_series, pandas_one_point_three_or_less
11 |
12 | import gzip
13 |
14 | DECIMAL_PLACES = 8
15 |
16 |
17 | class TestDrawdown(TestCase):
18 | drawdown_list = np.array([100, 90, 75]) / 10.0
19 | dt = pd.date_range("2000-1-3", periods=3, freq="D")
20 |
21 | drawdown_serie = pd.Series(drawdown_list, index=dt)
22 |
23 | @parameterized.expand([(drawdown_serie,)])
24 | def test_get_max_drawdown_begins_first_day(self, px):
25 | rets = px.pct_change()
26 | drawdowns = timeseries.gen_drawdown_table(rets, top=1)
27 | self.assertEqual(drawdowns.loc[0, "Net drawdown in %"], 25)
28 |
29 | drawdown_list = (
30 | np.array(
31 | [
32 | 100,
33 | 110,
34 | 120,
35 | 150,
36 | 180,
37 | 200,
38 | 100,
39 | 120,
40 | 160,
41 | 180,
42 | 200,
43 | 300,
44 | 400,
45 | 500,
46 | 600,
47 | 800,
48 | 900,
49 | 1000,
50 | 650,
51 | 600,
52 | ]
53 | )
54 | / 10.0
55 | )
56 | dt = pd.date_range("2000-1-3", periods=20, freq="D")
57 |
58 | drawdown_serie = pd.Series(drawdown_list, index=dt)
59 |
60 | @parameterized.expand(
61 | [
62 | (
63 | drawdown_serie,
64 | pd.Timestamp("2000-01-08"),
65 | pd.Timestamp("2000-01-09"),
66 | pd.Timestamp("2000-01-13"),
67 | 50,
68 | pd.Timestamp("2000-01-20"),
69 | pd.Timestamp("2000-01-22"),
70 | None,
71 | 40,
72 | )
73 | ]
74 | )
75 | def test_gen_drawdown_table_relative(
76 | self,
77 | px,
78 | first_expected_peak,
79 | first_expected_valley,
80 | first_expected_recovery,
81 | first_net_drawdown,
82 | second_expected_peak,
83 | second_expected_valley,
84 | second_expected_recovery,
85 | second_net_drawdown,
86 | ):
87 |
88 | rets = px.pct_change()
89 |
90 | drawdowns = timeseries.gen_drawdown_table(rets, top=2)
91 |
92 | self.assertEqual(
93 | np.round(drawdowns.loc[0, "Net drawdown in %"]), first_net_drawdown
94 | )
95 | self.assertEqual(drawdowns.loc[0, "Peak date"], first_expected_peak)
96 | self.assertEqual(drawdowns.loc[0, "Valley date"], first_expected_valley)
97 | self.assertEqual(drawdowns.loc[0, "Recovery date"], first_expected_recovery)
98 |
99 | self.assertEqual(
100 | np.round(drawdowns.loc[1, "Net drawdown in %"]),
101 | second_net_drawdown,
102 | )
103 | self.assertEqual(drawdowns.loc[1, "Peak date"], second_expected_peak)
104 | self.assertEqual(drawdowns.loc[1, "Valley date"], second_expected_valley)
105 | self.assertTrue(pd.isnull(drawdowns.loc[1, "Recovery date"]))
106 |
107 | px_list_1 = np.array([100, 120, 100, 80, 70, 110, 180, 150]) / 100.0 # Simple
108 | px_list_2 = (
109 | np.array([100, 120, 100, 80, 70, 80, 90, 90]) / 100.0
110 | ) # Ends in drawdown
111 | dt = pd.date_range("2000-1-3", periods=8, freq="D")
112 |
113 | @parameterized.expand(
114 | [
115 | (
116 | pd.Series(px_list_1, index=dt),
117 | pd.Timestamp("2000-1-4"),
118 | pd.Timestamp("2000-1-7"),
119 | pd.Timestamp("2000-1-9"),
120 | ),
121 | (
122 | pd.Series(px_list_2, index=dt),
123 | pd.Timestamp("2000-1-4"),
124 | pd.Timestamp("2000-1-7"),
125 | None,
126 | ),
127 | ]
128 | )
129 | def test_get_max_drawdown(
130 | self, px, expected_peak, expected_valley, expected_recovery
131 | ):
132 | rets = px.pct_change().iloc[1:]
133 |
134 | peak, valley, recovery = timeseries.get_max_drawdown(rets)
135 | # Need to use isnull because the result can be NaN, NaT, etc.
136 | (
137 | self.assertTrue(pd.isnull(peak))
138 | if expected_peak is None
139 | else self.assertEqual(peak, expected_peak)
140 | )
141 | (
142 | self.assertTrue(pd.isnull(valley))
143 | if expected_valley is None
144 | else self.assertEqual(valley, expected_valley)
145 | )
146 | (
147 | self.assertTrue(pd.isnull(recovery))
148 | if expected_recovery is None
149 | else self.assertEqual(recovery, expected_recovery)
150 | )
151 |
152 | @parameterized.expand(
153 | [
154 | (
155 | pd.Series(px_list_2, index=dt),
156 | pd.Timestamp("2000-1-4"),
157 | pd.Timestamp("2000-1-7"),
158 | None,
159 | None,
160 | ),
161 | (
162 | pd.Series(px_list_1, index=dt),
163 | pd.Timestamp("2000-1-4"),
164 | pd.Timestamp("2000-1-7"),
165 | pd.Timestamp("2000-1-9"),
166 | 4,
167 | ),
168 | ]
169 | )
170 | def test_gen_drawdown_table(
171 | self,
172 | px,
173 | expected_peak,
174 | expected_valley,
175 | expected_recovery,
176 | expected_duration,
177 | ):
178 | rets = px.pct_change().iloc[1:]
179 |
180 | drawdowns = timeseries.gen_drawdown_table(rets, top=1)
181 | (
182 | self.assertTrue(pd.isnull(drawdowns.loc[0, "Peak date"]))
183 | if expected_peak is None
184 | else self.assertEqual(drawdowns.loc[0, "Peak date"], expected_peak)
185 | )
186 | (
187 | self.assertTrue(pd.isnull(drawdowns.loc[0, "Valley date"]))
188 | if expected_valley is None
189 | else self.assertEqual(drawdowns.loc[0, "Valley date"], expected_valley)
190 | )
191 | (
192 | self.assertTrue(pd.isnull(drawdowns.loc[0, "Recovery date"]))
193 | if expected_recovery is None
194 | else self.assertEqual(drawdowns.loc[0, "Recovery date"], expected_recovery)
195 | )
196 | (
197 | self.assertTrue(pd.isnull(drawdowns.loc[0, "Duration"]))
198 | if expected_duration is None
199 | else self.assertEqual(drawdowns.loc[0, "Duration"], expected_duration)
200 | )
201 |
202 | def test_drawdown_overlaps(self):
203 | rand = np.random.RandomState(1337)
204 | n_samples = 252 * 5
205 | spy_returns = pd.Series(
206 | rand.standard_t(3.1, n_samples),
207 | pd.date_range("2005-01-02", periods=n_samples),
208 | )
209 | spy_drawdowns = timeseries.gen_drawdown_table(spy_returns, top=20).sort_values(
210 | by="Peak date"
211 | )
212 | # Compare the recovery date of each drawdown with the peak of the next
213 | # Last pair might contain a NaT if drawdown didn't finish, so ignore it
214 | pairs = list(
215 | zip(
216 | spy_drawdowns["Recovery date"],
217 | spy_drawdowns["Peak date"].shift(-1),
218 | )
219 | )[:-1]
220 | self.assertGreater(len(pairs), 0)
221 | for recovery, peak in pairs:
222 | if not pd.isnull(recovery):
223 | self.assertLessEqual(recovery, peak)
224 |
225 | @parameterized.expand(
226 | [
227 | (
228 | pd.Series(px_list_1, index=dt),
229 | 1,
230 | [
231 | (
232 | pd.Timestamp("2000-01-03 00:00:00"),
233 | pd.Timestamp("2000-01-03 00:00:00"),
234 | pd.Timestamp("2000-01-03 00:00:00"),
235 | )
236 | ],
237 | )
238 | ]
239 | )
240 | def test_top_drawdowns(self, returns, top, expected):
241 | self.assertEqual(timeseries.get_top_drawdowns(returns, top=top), expected)
242 |
243 |
244 | class TestVariance(TestCase):
245 | @parameterized.expand([(1e7, 0.5, 1, 1, -10000000.0)])
246 | def test_var_cov_var_normal(self, P, c, mu, sigma, expected):
247 | self.assertEqual(timeseries.var_cov_var_normal(P, c, mu, sigma), expected)
248 |
249 |
250 | class TestNormalize(TestCase):
251 | dt = pd.date_range("2000-1-3", periods=8, freq="D")
252 | px_list = [1.0, 1.2, 1.0, 0.8, 0.7, 0.8, 0.8, 0.8]
253 |
254 | @parameterized.expand(
255 | [
256 | (
257 | pd.Series(np.array(px_list) * 100, index=dt),
258 | pd.Series(px_list, index=dt),
259 | )
260 | ]
261 | )
262 | def test_normalize(self, returns, expected):
263 | self.assertTrue(timeseries.normalize(returns).equals(expected))
264 |
265 |
266 | class TestStats(TestCase):
267 | simple_rets = pd.Series(
268 | [0.1] * 3 + [0] * 497, pd.date_range("2000-1-3", periods=500, freq="D")
269 | )
270 |
271 | simple_week_rets = pd.Series(
272 | [0.1] * 3 + [0] * 497,
273 | pd.date_range("2000-1-31", periods=500, freq="W"),
274 | )
275 |
276 | simple_month_rets = pd.Series(
277 | [0.1] * 3 + [0] * 497,
278 | pd.date_range("2000-1-31", periods=500, freq="M"),
279 | )
280 |
281 | simple_benchmark = pd.Series(
282 | [0.03] * 4 + [0] * 496,
283 | pd.date_range("2000-1-1", periods=500, freq="D"),
284 | )
285 | px_list = np.array([10, -10, 10]) / 100.0 # Ends in drawdown
286 | dt = pd.date_range("2000-1-3", periods=3, freq="D")
287 |
288 | px_list_2 = [1.0, 1.2, 1.0, 0.8, 0.7, 0.8, 0.8, 0.8]
289 | dt_2 = pd.date_range("2000-1-3", periods=8, freq="D")
290 |
291 | @parameterized.expand(
292 | [
293 | (
294 | simple_rets[:5],
295 | 2,
296 | [np.nan, np.inf, np.inf, 11.224972160321, np.nan],
297 | )
298 | ]
299 | )
300 | @skipIf(pandas_one_point_three_or_less, "pandas<=1.4 returns np.inf not np.nan")
301 | def test_sharpe_2(self, returns, rolling_sharpe_window, expected):
302 | np.testing.assert_array_almost_equal(
303 | timeseries.rolling_sharpe(returns, rolling_sharpe_window).to_numpy(),
304 | np.asarray(expected),
305 | )
306 |
307 | @parameterized.expand([(simple_rets[:5], simple_benchmark, 2, 0)])
308 | def test_beta(self, returns, benchmark_rets, rolling_window, expected):
309 | actual = timeseries.rolling_beta(
310 | returns,
311 | benchmark_rets,
312 | rolling_window=rolling_window,
313 | ).values.tolist()[2]
314 |
315 | np.testing.assert_almost_equal(actual, expected)
316 |
317 |
318 | class TestCone(TestCase):
319 | def test_bootstrap_cone_against_linear_cone_normal_returns(self):
320 | random_seed = 100
321 | np.random.seed(random_seed)
322 | days_forward = 200
323 | cone_stdevs = (1.0, 1.5, 2.0)
324 | mu = 0.005
325 | sigma = 0.002
326 | rets = pd.Series(np.random.normal(mu, sigma, 10000))
327 |
328 | midline = np.cumprod(1 + (rets.mean() * np.ones(days_forward)))
329 | stdev = rets.std() * midline * np.sqrt(np.arange(days_forward) + 1)
330 |
331 | normal_cone = pd.DataFrame(columns=pd.Index([], dtype="float64"))
332 | for s in cone_stdevs:
333 | normal_cone[s] = midline + s * stdev
334 | normal_cone[-s] = midline - s * stdev
335 |
336 | bootstrap_cone = timeseries.forecast_cone_bootstrap(
337 | rets,
338 | days_forward,
339 | cone_stdevs,
340 | starting_value=1,
341 | random_seed=random_seed,
342 | num_samples=10000,
343 | )
344 |
345 | for col, vals in bootstrap_cone.items():
346 | expected = normal_cone[col].values
347 | assert_allclose(vals.values, expected, rtol=0.005)
348 |
349 |
350 | class TestBootstrap(TestCase):
351 | @parameterized.expand(
352 | [
353 | (0.0, 1.0, 1000),
354 | (1.0, 2.0, 500),
355 | (-1.0, 0.1, 10),
356 | ]
357 | )
358 | def test_calc_bootstrap(self, true_mean, true_sd, n):
359 | """Compare bootstrap distribution of the mean to sampling distribution
360 | of the mean.
361 |
362 | """
363 | np.random.seed(123)
364 | func = np.mean
365 | returns = pd.Series((np.random.randn(n) * true_sd) + true_mean)
366 |
367 | samples = timeseries.calc_bootstrap(func, returns, n_samples=10000)
368 |
369 | # Calculate statistics of sampling distribution of the mean
370 | mean_of_mean = np.mean(returns)
371 | sd_of_mean = np.std(returns) / np.sqrt(n)
372 |
373 | assert_almost_equal(
374 | np.mean(samples),
375 | mean_of_mean,
376 | 3,
377 | "Mean of bootstrap does not match theoretical mean of"
378 | "sampling distribution",
379 | )
380 |
381 | assert_almost_equal(
382 | np.std(samples),
383 | sd_of_mean,
384 | 3,
385 | "SD of bootstrap does not match theoretical SD of" "sampling distribution",
386 | )
387 |
388 |
389 | class TestGrossLev(TestCase):
390 | __location__ = os.path.realpath(
391 | os.path.join(os.getcwd(), os.path.dirname(__file__))
392 | )
393 |
394 | test_pos = to_utc(
395 | pd.read_csv(
396 | gzip.open(__location__ + "/test_data/test_pos.csv.gz"),
397 | index_col=0,
398 | parse_dates=True,
399 | )
400 | )
401 | test_gross_lev = pd.read_csv(
402 | gzip.open(__location__ + "/test_data/test_gross_lev.csv.gz"),
403 | index_col=0,
404 | parse_dates=True,
405 | )
406 | test_gross_lev = to_series(to_utc(test_gross_lev))
407 |
408 | def test_gross_lev_calculation(self):
409 | assert_series_equal(
410 | timeseries.gross_lev(self.test_pos)["2004-02-01":],
411 | self.test_gross_lev["2004-02-01":],
412 | check_names=False,
413 | )
414 |
--------------------------------------------------------------------------------
/tests/test_txn.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import pandas as pd
4 |
5 | from pandas.testing import assert_series_equal
6 |
7 | from pyfolio.txn import get_turnover, adjust_returns_for_slippage
8 |
9 |
10 | class TransactionsTestCase(TestCase):
11 | def test_get_turnover(self):
12 | """
13 | Tests turnover using a 20 day period.
14 |
15 | With no transactions, the turnover should be 0.
16 |
17 | with 200% of the AGB traded each day, the daily
18 | turnover rate should be 2.0.
19 | """
20 | dates = pd.date_range(start="2015-01-01", freq="D", periods=20)
21 |
22 | # In this test, there is one sid (0) and a cash column
23 | positions = pd.DataFrame(
24 | [[10.0, 10.0]] * len(dates), columns=[0, "cash"], index=dates
25 | )
26 |
27 | # Set every other non-cash position to 40
28 | positions[0][::2] = 40
29 |
30 | transactions = pd.DataFrame(
31 | data=[], columns=["sid", "amount", "price", "symbol"], index=dates
32 | )
33 |
34 | # Test with no transactions
35 | expected = pd.Series([0.0] * len(dates), index=dates)
36 | result = get_turnover(positions, transactions).asfreq("D")
37 | assert_series_equal(result, expected)
38 |
39 | transactions = pd.DataFrame(
40 | data=[[1, 1, 10, 0]] * len(dates) + [[2, -1, 10, 0]] * len(dates),
41 | columns=["sid", "amount", "price", "symbol"],
42 | index=dates.append(dates),
43 | ).sort_index()
44 |
45 | # Turnover is more on day 1, because the day 0 AGB is set to zero
46 | # in get_turnover. On most days, we get 0.8 because we have 20
47 | # transacted and mean(10, 40) = 25, so 20/25.
48 | expected = pd.Series([1.0] + [0.8] * (len(dates) - 1), index=dates)
49 | result = get_turnover(positions, transactions).asfreq("D")
50 |
51 | assert_series_equal(result, expected)
52 |
53 | # Test with denominator = 'portfolio_value'
54 | result = get_turnover(
55 | positions, transactions, denominator="portfolio_value"
56 | ).asfreq("D")
57 |
58 | # Our portfolio value alternates between $20 and $50 so turnover
59 | # should alternate between 20/20 = 1.0 and 20/50 = 0.4.
60 | expected = pd.Series([0.4, 1.0] * (int((len(dates) - 1) / 2) + 1), index=dates)
61 |
62 | assert_series_equal(result, expected)
63 |
64 | def test_adjust_returns_for_slippage(self):
65 | dates = pd.date_range(start="2015-01-01", freq="D", periods=20)
66 |
67 | positions = pd.DataFrame(
68 | [[0.0, 10.0]] * len(dates), columns=[0, "cash"], index=dates
69 | )
70 |
71 | # 100% total, 50% average daily turnover
72 | transactions = pd.DataFrame(
73 | data=[[1, 1, 10, "A"]] * len(dates),
74 | columns=["sid", "amount", "price", "symbol"],
75 | index=dates,
76 | )
77 |
78 | returns = pd.Series([0.05] * len(dates), index=dates)
79 | # 0.001% slippage per dollar traded
80 | slippage_bps = 10
81 | expected = pd.Series([0.049] * len(dates), index=dates)
82 |
83 | result = adjust_returns_for_slippage(
84 | returns, positions, transactions, slippage_bps
85 | )
86 |
87 | assert_series_equal(result, expected)
88 |
--------------------------------------------------------------------------------