├── .gitattributes ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.md ├── bayesalpha ├── __init__.py ├── _version.py ├── author_model.py ├── base.py ├── dists.py ├── load.py ├── plotting.py ├── returns_model.py └── serialize.py ├── docs └── bayesalpha.gif ├── examples ├── author_model_example.ipynb └── factor_gains.ipynb ├── jenkins.sh ├── setup.cfg ├── setup.py ├── tests ├── test_cov.py └── test_data │ ├── author_model_test_returns.csv │ └── author_model_test_sharpes.csv ├── tox.ini └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | bayesalpha/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### JetBrains template 3 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 4 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 5 | 6 | # User-specific stuff: 7 | .idea/**/workspace.xml 8 | .idea/**/tasks.xml 9 | .idea/dictionaries 10 | 11 | # Sensitive or high-churn files: 12 | .idea/**/dataSources/ 13 | .idea/**/dataSources.ids 14 | .idea/**/dataSources.xml 15 | .idea/**/dataSources.local.xml 16 | .idea/**/sqlDataSources.xml 17 | .idea/**/dynamic.xml 18 | .idea/**/uiDesigner.xml 19 | 20 | # Gradle: 21 | .idea/**/gradle.xml 22 | .idea/**/libraries 23 | 24 | # CMake 25 | cmake-build-debug/ 26 | 27 | # Mongo Explorer plugin: 28 | .idea/**/mongoSettings.xml 29 | 30 | ## File-based project format: 31 | *.iws 32 | 33 | ## Plugin-specific files: 34 | 35 | # IntelliJ 36 | out/ 37 | 38 | # mpeltonen/sbt-idea plugin 39 | .idea_modules/ 40 | 41 | # JIRA plugin 42 | atlassian-ide-plugin.xml 43 | 44 | # Cursive Clojure plugin 45 | .idea/replstate.xml 46 | 47 | # Crashlytics plugin (for Android Studio and IntelliJ) 48 | com_crashlytics_export_strings.xml 49 | crashlytics.properties 50 | crashlytics-build.properties 51 | fabric.properties 52 | ### Python template 53 | # Byte-compiled / optimized / DLL files 54 | __pycache__/ 55 | *.py[cod] 56 | *$py.class 57 | 58 | # C extensions 59 | *.so 60 | 61 | # Distribution / packaging 62 | .Python 63 | build/ 64 | develop-eggs/ 65 | dist/ 66 | downloads/ 67 | eggs/ 68 | .eggs/ 69 | lib/ 70 | lib64/ 71 | parts/ 72 | sdist/ 73 | var/ 74 | wheels/ 75 | *.egg-info/ 76 | .installed.cfg 77 | *.egg 78 | MANIFEST 79 | 80 | # PyInstaller 81 | # Usually these files are written by a python script from a template 82 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 83 | *.manifest 84 | *.spec 85 | 86 | # Installer logs 87 | pip-log.txt 88 | pip-delete-this-directory.txt 89 | 90 | # Unit test / coverage reports 91 | htmlcov/ 92 | .tox/ 93 | .coverage 94 | .coverage.* 95 | .cache 96 | nosetests.xml 97 | coverage.xml 98 | *.cover 99 | .hypothesis/ 100 | 101 | # Translations 102 | *.mo 103 | *.pot 104 | 105 | # Django stuff: 106 | *.log 107 | local_settings.py 108 | 109 | # Flask stuff: 110 | instance/ 111 | .webassets-cache 112 | 113 | # Scrapy stuff: 114 | .scrapy 115 | 116 | # Sphinx documentation 117 | docs/_build/ 118 | 119 | # PyBuilder 120 | target/ 121 | 122 | # Jupyter Notebook 123 | .ipynb_checkpoints 124 | 125 | # pyenv 126 | .python-version 127 | 128 | # celery beat schedule file 129 | celerybeat-schedule 130 | 131 | # SageMath parsed files 132 | *.sage.py 133 | 134 | # Environments 135 | .env 136 | .venv 137 | env/ 138 | venv/ 139 | ENV/ 140 | env.bak/ 141 | venv.bak/ 142 | 143 | # Spyder project settings 144 | .spyderproject 145 | .spyproject 146 | 147 | # Rope project settings 148 | .ropeproject 149 | 150 | # mkdocs documentation 151 | /site 152 | 153 | # mypy 154 | .mypy_cache/ 155 | ### VirtualEnv template 156 | # Virtualenv 157 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 158 | [Bb]in 159 | [Ii]nclude 160 | [Ll]ib 161 | [Ll]ib64 162 | [Ll]ocal 163 | [Ss]cripts 164 | pyvenv.cfg 165 | pip-selfcheck.json 166 | 167 | 168 | .DS_Store 169 | .pytest_cache 170 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | ======= 2 | License 3 | ======= 4 | 5 | BayesAlpha is distributed under the Apache License, Version 2.0 6 | 7 | Copyright (c) 2018-2020 Quantopian Inc. 8 | All rights reserved. 9 | 10 | Apache License 11 | Version 2.0, January 2004 12 | http://www.apache.org/licenses/ 13 | 14 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 15 | 16 | 1. Definitions. 17 | 18 | "License" shall mean the terms and conditions for use, reproduction, 19 | and distribution as defined by Sections 1 through 9 of this document. 20 | 21 | "Licensor" shall mean the copyright owner or entity authorized by 22 | the copyright owner that is granting the License. 23 | 24 | "Legal Entity" shall mean the union of the acting entity and all 25 | other entities that control, are controlled by, or are under common 26 | control with that entity. For the purposes of this definition, 27 | "control" means (i) the power, direct or indirect, to cause the 28 | direction or management of such entity, whether by contract or 29 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 30 | outstanding shares, or (iii) beneficial ownership of such entity. 31 | 32 | "You" (or "Your") shall mean an individual or Legal Entity 33 | exercising permissions granted by this License. 34 | 35 | "Source" form shall mean the preferred form for making modifications, 36 | including but not limited to software source code, documentation 37 | source, and configuration files. 38 | 39 | "Object" form shall mean any form resulting from mechanical 40 | transformation or translation of a Source form, including but 41 | not limited to compiled object code, generated documentation, 42 | and conversions to other media types. 43 | 44 | "Work" shall mean the work of authorship, whether in Source or 45 | Object form, made available under the License, as indicated by a 46 | copyright notice that is included in or attached to the work 47 | (an example is provided in the Appendix below). 48 | 49 | "Derivative Works" shall mean any work, whether in Source or Object 50 | form, that is based on (or derived from) the Work and for which the 51 | editorial revisions, annotations, elaborations, or other modifications 52 | represent, as a whole, an original work of authorship. For the purposes 53 | of this License, Derivative Works shall not include works that remain 54 | separable from, or merely link (or bind by name) to the interfaces of, 55 | the Work and Derivative Works thereof. 56 | 57 | "Contribution" shall mean any work of authorship, including 58 | the original version of the Work and any modifications or additions 59 | to that Work or Derivative Works thereof, that is intentionally 60 | submitted to Licensor for inclusion in the Work by the copyright owner 61 | or by an individual or Legal Entity authorized to submit on behalf of 62 | the copyright owner. For the purposes of this definition, "submitted" 63 | means any form of electronic, verbal, or written communication sent 64 | to the Licensor or its representatives, including but not limited to 65 | communication on electronic mailing lists, source code control systems, 66 | and issue tracking systems that are managed by, or on behalf of, the 67 | Licensor for the purpose of discussing and improving the Work, but 68 | excluding communication that is conspicuously marked or otherwise 69 | designated in writing by the copyright owner as "Not a Contribution." 70 | 71 | "Contributor" shall mean Licensor and any individual or Legal Entity 72 | on behalf of whom a Contribution has been received by Licensor and 73 | subsequently incorporated within the Work. 74 | 75 | 2. Grant of Copyright License. Subject to the terms and conditions of 76 | this License, each Contributor hereby grants to You a perpetual, 77 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 78 | copyright license to reproduce, prepare Derivative Works of, 79 | publicly display, publicly perform, sublicense, and distribute the 80 | Work and such Derivative Works in Source or Object form. 81 | 82 | 3. Grant of Patent License. Subject to the terms and conditions of 83 | this License, each Contributor hereby grants to You a perpetual, 84 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 85 | (except as stated in this section) patent license to make, have made, 86 | use, offer to sell, sell, import, and otherwise transfer the Work, 87 | where such license applies only to those patent claims licensable 88 | by such Contributor that are necessarily infringed by their 89 | Contribution(s) alone or by combination of their Contribution(s) 90 | with the Work to which such Contribution(s) was submitted. If You 91 | institute patent litigation against any entity (including a 92 | cross-claim or counterclaim in a lawsuit) alleging that the Work 93 | or a Contribution incorporated within the Work constitutes direct 94 | or contributory patent infringement, then any patent licenses 95 | granted to You under this License for that Work shall terminate 96 | as of the date such litigation is filed. 97 | 98 | 4. Redistribution. You may reproduce and distribute copies of the 99 | Work or Derivative Works thereof in any medium, with or without 100 | modifications, and in Source or Object form, provided that You 101 | meet the following conditions: 102 | 103 | (a) You must give any other recipients of the Work or 104 | Derivative Works a copy of this License; and 105 | 106 | (b) You must cause any modified files to carry prominent notices 107 | stating that You changed the files; and 108 | 109 | (c) You must retain, in the Source form of any Derivative Works 110 | that You distribute, all copyright, patent, trademark, and 111 | attribution notices from the Source form of the Work, 112 | excluding those notices that do not pertain to any part of 113 | the Derivative Works; and 114 | 115 | (d) If the Work includes a "NOTICE" text file as part of its 116 | distribution, then any Derivative Works that You distribute must 117 | include a readable copy of the attribution notices contained 118 | within such NOTICE file, excluding those notices that do not 119 | pertain to any part of the Derivative Works, in at least one 120 | of the following places: within a NOTICE text file distributed 121 | as part of the Derivative Works; within the Source form or 122 | documentation, if provided along with the Derivative Works; or, 123 | within a display generated by the Derivative Works, if and 124 | wherever such third-party notices normally appear. The contents 125 | of the NOTICE file are for informational purposes only and 126 | do not modify the License. You may add Your own attribution 127 | notices within Derivative Works that You distribute, alongside 128 | or as an addendum to the NOTICE text from the Work, provided 129 | that such additional attribution notices cannot be construed 130 | as modifying the License. 131 | 132 | You may add Your own copyright statement to Your modifications and 133 | may provide additional or different license terms and conditions 134 | for use, reproduction, or distribution of Your modifications, or 135 | for any such Derivative Works as a whole, provided Your use, 136 | reproduction, and distribution of the Work otherwise complies with 137 | the conditions stated in this License. 138 | 139 | 5. Submission of Contributions. Unless You explicitly state otherwise, 140 | any Contribution intentionally submitted for inclusion in the Work 141 | by You to the Licensor shall be under the terms and conditions of 142 | this License, without any additional terms or conditions. 143 | Notwithstanding the above, nothing herein shall supersede or modify 144 | the terms of any separate license agreement you may have executed 145 | with Licensor regarding such Contributions. 146 | 147 | 6. Trademarks. This License does not grant permission to use the trade 148 | names, trademarks, service marks, or product names of the Licensor, 149 | except as required for reasonable and customary use in describing the 150 | origin of the Work and reproducing the content of the NOTICE file. 151 | 152 | 7. Disclaimer of Warranty. Unless required by applicable law or 153 | agreed to in writing, Licensor provides the Work (and each 154 | Contributor provides its Contributions) on an "AS IS" BASIS, 155 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 156 | implied, including, without limitation, any warranties or conditions 157 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 158 | PARTICULAR PURPOSE. You are solely responsible for determining the 159 | appropriateness of using or redistributing the Work and assume any 160 | risks associated with Your exercise of permissions under this License. 161 | 162 | 8. Limitation of Liability. In no event and under no legal theory, 163 | whether in tort (including negligence), contract, or otherwise, 164 | unless required by applicable law (such as deliberate and grossly 165 | negligent acts) or agreed to in writing, shall any Contributor be 166 | liable to You for damages, including any direct, indirect, special, 167 | incidental, or consequential damages of any character arising as a 168 | result of this License or out of the use or inability to use the 169 | Work (including but not limited to damages for loss of goodwill, 170 | work stoppage, computer failure or malfunction, or any and all 171 | other commercial damages or losses), even if such Contributor 172 | has been advised of the possibility of such damages. 173 | 174 | 9. Accepting Warranty or Additional Liability. While redistributing 175 | the Work or Derivative Works thereof, You may choose to offer, 176 | and charge a fee for, acceptance of support, warranty, indemnity, 177 | or other liability obligations and/or rights consistent with this 178 | License. However, in accepting such obligations, You may act only 179 | on Your own behalf and on Your sole responsibility, not on behalf 180 | of any other Contributor, and only if You agree to indemnify, 181 | defend, and hold each Contributor harmless for any liability 182 | incurred by, or claims asserted against, such Contributor by reason 183 | of your accepting any such warranty or additional liability. 184 | 185 | END OF TERMS AND CONDITIONS 186 | 187 | APPENDIX: How to apply the Apache License to your work. 188 | 189 | To apply the Apache License to your work, attach the following 190 | boilerplate notice, with the fields enclosed by brackets "[]" 191 | replaced with your own identifying information. (Don't include 192 | the brackets!) The text should be enclosed in the appropriate 193 | comment syntax for the file format. We also recommend that a 194 | file or class name and description of purpose be included on the 195 | same "printed page" as the copyright notice for easier 196 | identification within third-party archives. 197 | 198 | Copyright 2020 The PyMC Developers 199 | 200 | Licensed under the Apache License, Version 2.0 (the "License"); 201 | you may not use this file except in compliance with the License. 202 | You may obtain a copy of the License at 203 | 204 | http://www.apache.org/licenses/LICENSE-2.0 205 | 206 | Unless required by applicable law or agreed to in writing, software 207 | distributed under the License is distributed on an "AS IS" BASIS, 208 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 209 | See the License for the specific language governing permissions and 210 | limitations under the License. 211 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include bayesalpha/_version.py 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![bayesalpha](docs/bayesalpha.gif) 2 | 3 | # BayesAlpha 4 | 5 | Bayesian models for alpha estimation. 6 | 7 | This project is no longer actively developed but pull requests will be evaluated. 8 | 9 | ## Models 10 | 11 | There are currently two models: 12 | 13 | - the **returns model**, which ingests a returns-stream. It computes (among 14 | other things) a forwards-looking gains parameter (which is basically a 15 | Sharpe ratio). Of interest is `P(gains > 0)`; that is, the probability that 16 | the algorithm will make money. Originally authored by Adrian Seyboldt. 17 | 18 | - the **author model**, which ingests the in-sample Sharpe ratios of user-run 19 | backtests. It computes (among other things) average Sharpe delivered at a 20 | population-, author- and algorithm-level. Originally authored by George Ho. 21 | 22 | ## Installation and Usage 23 | 24 | To install: 25 | 26 | ```bash 27 | git clone git@github.com:quantopian/bayesalpha.git 28 | cd bayesalpha 29 | pip install -e . 30 | ``` 31 | 32 | To use (this snippet should demonstrate 95% of all use cases): 33 | 34 | ```python 35 | import bayesalpha as ba 36 | 37 | # Fit returns model 38 | trace = ba.fit_returns_population(data, ...) 39 | trace = ba.fit_returns_single(data, ...) 40 | 41 | # Fit author model 42 | trace = ba.fit_authors(data, ...) 43 | 44 | # Save to netcdf file 45 | trace.save('foo.nc') 46 | del trace 47 | 48 | # Load from netcdf file 49 | trace = ba.load('foo.nc') 50 | ``` 51 | -------------------------------------------------------------------------------- /bayesalpha/__init__.py: -------------------------------------------------------------------------------- 1 | from .load import load 2 | from .returns_model import fit_returns_single, fit_returns_population 3 | from .author_model import fit_authors 4 | 5 | from ._version import get_versions 6 | __version__ = get_versions()['version'] 7 | del get_versions 8 | -------------------------------------------------------------------------------- /bayesalpha/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 10 | 11 | """Git implementation of _version.py.""" 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master)" 27 | git_full = "676f4f194ad20211fd040d3b0c6e82969aafb87e" 28 | git_date = "2020-06-29 20:07:27 +0200" 29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 30 | return keywords 31 | 32 | 33 | class VersioneerConfig: 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440" 44 | cfg.tag_prefix = "" 45 | cfg.parentdir_prefix = "bayesalpha-" 46 | cfg.versionfile_source = "bayesalpha/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | """Exception raised if a method is not valid for the current scenario.""" 53 | 54 | 55 | LONG_VERSION_PY = {} 56 | HANDLERS = {} 57 | 58 | 59 | def register_vcs_handler(vcs, method): # decorator 60 | """Decorator to mark a method as the handler for a particular VCS.""" 61 | def decorate(f): 62 | """Store f in HANDLERS[vcs][method].""" 63 | if vcs not in HANDLERS: 64 | HANDLERS[vcs] = {} 65 | HANDLERS[vcs][method] = f 66 | return f 67 | return decorate 68 | 69 | 70 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 71 | env=None): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 80 | stdout=subprocess.PIPE, 81 | stderr=(subprocess.PIPE if hide_stderr 82 | else None)) 83 | break 84 | except EnvironmentError: 85 | e = sys.exc_info()[1] 86 | if e.errno == errno.ENOENT: 87 | continue 88 | if verbose: 89 | print("unable to run %s" % dispcmd) 90 | print(e) 91 | return None, None 92 | else: 93 | if verbose: 94 | print("unable to find command, tried %s" % (commands,)) 95 | return None, None 96 | stdout = p.communicate()[0].strip() 97 | if sys.version_info[0] >= 3: 98 | stdout = stdout.decode() 99 | if p.returncode != 0: 100 | if verbose: 101 | print("unable to run %s (error)" % dispcmd) 102 | print("stdout was %s" % stdout) 103 | return None, p.returncode 104 | return stdout, p.returncode 105 | 106 | 107 | def versions_from_parentdir(parentdir_prefix, root, verbose): 108 | """Try to determine the version from the parent directory name. 109 | 110 | Source tarballs conventionally unpack into a directory that includes both 111 | the project name and a version string. We will also support searching up 112 | two directory levels for an appropriately named parent directory 113 | """ 114 | rootdirs = [] 115 | 116 | for i in range(3): 117 | dirname = os.path.basename(root) 118 | if dirname.startswith(parentdir_prefix): 119 | return {"version": dirname[len(parentdir_prefix):], 120 | "full-revisionid": None, 121 | "dirty": False, "error": None, "date": None} 122 | else: 123 | rootdirs.append(root) 124 | root = os.path.dirname(root) # up a level 125 | 126 | if verbose: 127 | print("Tried directories %s but none started with prefix %s" % 128 | (str(rootdirs), parentdir_prefix)) 129 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 130 | 131 | 132 | @register_vcs_handler("git", "get_keywords") 133 | def git_get_keywords(versionfile_abs): 134 | """Extract version information from the given file.""" 135 | # the code embedded in _version.py can just fetch the value of these 136 | # keywords. When used from setup.py, we don't want to import _version.py, 137 | # so we do it with a regexp instead. This function is not used from 138 | # _version.py. 139 | keywords = {} 140 | try: 141 | f = open(versionfile_abs, "r") 142 | for line in f.readlines(): 143 | if line.strip().startswith("git_refnames ="): 144 | mo = re.search(r'=\s*"(.*)"', line) 145 | if mo: 146 | keywords["refnames"] = mo.group(1) 147 | if line.strip().startswith("git_full ="): 148 | mo = re.search(r'=\s*"(.*)"', line) 149 | if mo: 150 | keywords["full"] = mo.group(1) 151 | if line.strip().startswith("git_date ="): 152 | mo = re.search(r'=\s*"(.*)"', line) 153 | if mo: 154 | keywords["date"] = mo.group(1) 155 | f.close() 156 | except EnvironmentError: 157 | pass 158 | return keywords 159 | 160 | 161 | @register_vcs_handler("git", "keywords") 162 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 163 | """Get version information from git keywords.""" 164 | if not keywords: 165 | raise NotThisMethod("no keywords at all, weird") 166 | date = keywords.get("date") 167 | if date is not None: 168 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 169 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 170 | # -like" string, which we must then edit to make compliant), because 171 | # it's been around since git-1.5.3, and it's too difficult to 172 | # discover which version we're using, or to work around using an 173 | # older one. 174 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 175 | refnames = keywords["refnames"].strip() 176 | if refnames.startswith("$Format"): 177 | if verbose: 178 | print("keywords are unexpanded, not using") 179 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 180 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 181 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 182 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 183 | TAG = "tag: " 184 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 185 | if not tags: 186 | # Either we're using git < 1.8.3, or there really are no tags. We use 187 | # a heuristic: assume all version tags have a digit. The old git %d 188 | # expansion behaves like git log --decorate=short and strips out the 189 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 190 | # between branches and tags. By ignoring refnames without digits, we 191 | # filter out many common branch names like "release" and 192 | # "stabilization", as well as "HEAD" and "master". 193 | tags = set([r for r in refs if re.search(r'\d', r)]) 194 | if verbose: 195 | print("discarding '%s', no digits" % ",".join(refs - tags)) 196 | if verbose: 197 | print("likely tags: %s" % ",".join(sorted(tags))) 198 | for ref in sorted(tags): 199 | # sorting will prefer e.g. "2.0" over "2.0rc1" 200 | if ref.startswith(tag_prefix): 201 | r = ref[len(tag_prefix):] 202 | if verbose: 203 | print("picking %s" % r) 204 | return {"version": r, 205 | "full-revisionid": keywords["full"].strip(), 206 | "dirty": False, "error": None, 207 | "date": date} 208 | # no suitable tags, so version is "0+unknown", but full hex is still there 209 | if verbose: 210 | print("no suitable tags, using unknown + full revision id") 211 | return {"version": "0+unknown", 212 | "full-revisionid": keywords["full"].strip(), 213 | "dirty": False, "error": "no suitable tags", "date": None} 214 | 215 | 216 | @register_vcs_handler("git", "pieces_from_vcs") 217 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 218 | """Get version from 'git describe' in the root of the source tree. 219 | 220 | This only gets called if the git-archive 'subst' keywords were *not* 221 | expanded, and _version.py hasn't already been rewritten with a short 222 | version string, meaning we're inside a checked out source tree. 223 | """ 224 | GITS = ["git"] 225 | if sys.platform == "win32": 226 | GITS = ["git.cmd", "git.exe"] 227 | 228 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 229 | hide_stderr=True) 230 | if rc != 0: 231 | if verbose: 232 | print("Directory %s not under git control" % root) 233 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 234 | 235 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 236 | # if there isn't one, this yields HEX[-dirty] (no NUM) 237 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 238 | "--always", "--long", 239 | "--match", "%s*" % tag_prefix], 240 | cwd=root) 241 | # --long was added in git-1.5.5 242 | if describe_out is None: 243 | raise NotThisMethod("'git describe' failed") 244 | describe_out = describe_out.strip() 245 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 246 | if full_out is None: 247 | raise NotThisMethod("'git rev-parse' failed") 248 | full_out = full_out.strip() 249 | 250 | pieces = {} 251 | pieces["long"] = full_out 252 | pieces["short"] = full_out[:7] # maybe improved later 253 | pieces["error"] = None 254 | 255 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 256 | # TAG might have hyphens. 257 | git_describe = describe_out 258 | 259 | # look for -dirty suffix 260 | dirty = git_describe.endswith("-dirty") 261 | pieces["dirty"] = dirty 262 | if dirty: 263 | git_describe = git_describe[:git_describe.rindex("-dirty")] 264 | 265 | # now we have TAG-NUM-gHEX or HEX 266 | 267 | if "-" in git_describe: 268 | # TAG-NUM-gHEX 269 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 270 | if not mo: 271 | # unparseable. Maybe git-describe is misbehaving? 272 | pieces["error"] = ("unable to parse git-describe output: '%s'" 273 | % describe_out) 274 | return pieces 275 | 276 | # tag 277 | full_tag = mo.group(1) 278 | if not full_tag.startswith(tag_prefix): 279 | if verbose: 280 | fmt = "tag '%s' doesn't start with prefix '%s'" 281 | print(fmt % (full_tag, tag_prefix)) 282 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 283 | % (full_tag, tag_prefix)) 284 | return pieces 285 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 286 | 287 | # distance: number of commits since tag 288 | pieces["distance"] = int(mo.group(2)) 289 | 290 | # commit: short hex revision ID 291 | pieces["short"] = mo.group(3) 292 | 293 | else: 294 | # HEX: no tags 295 | pieces["closest-tag"] = None 296 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 297 | cwd=root) 298 | pieces["distance"] = int(count_out) # total number of commits 299 | 300 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 301 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 302 | cwd=root)[0].strip() 303 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 304 | 305 | return pieces 306 | 307 | 308 | def plus_or_dot(pieces): 309 | """Return a + if we don't already have one, else return a .""" 310 | if "+" in pieces.get("closest-tag", ""): 311 | return "." 312 | return "+" 313 | 314 | 315 | def render_pep440(pieces): 316 | """Build up version string, with post-release "local version identifier". 317 | 318 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 319 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 320 | 321 | Exceptions: 322 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 323 | """ 324 | if pieces["closest-tag"]: 325 | rendered = pieces["closest-tag"] 326 | if pieces["distance"] or pieces["dirty"]: 327 | rendered += plus_or_dot(pieces) 328 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 329 | if pieces["dirty"]: 330 | rendered += ".dirty" 331 | else: 332 | # exception #1 333 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 334 | pieces["short"]) 335 | if pieces["dirty"]: 336 | rendered += ".dirty" 337 | return rendered 338 | 339 | 340 | def render_pep440_pre(pieces): 341 | """TAG[.post.devDISTANCE] -- No -dirty. 342 | 343 | Exceptions: 344 | 1: no tags. 0.post.devDISTANCE 345 | """ 346 | if pieces["closest-tag"]: 347 | rendered = pieces["closest-tag"] 348 | if pieces["distance"]: 349 | rendered += ".post.dev%d" % pieces["distance"] 350 | else: 351 | # exception #1 352 | rendered = "0.post.dev%d" % pieces["distance"] 353 | return rendered 354 | 355 | 356 | def render_pep440_post(pieces): 357 | """TAG[.postDISTANCE[.dev0]+gHEX] . 358 | 359 | The ".dev0" means dirty. Note that .dev0 sorts backwards 360 | (a dirty tree will appear "older" than the corresponding clean one), 361 | but you shouldn't be releasing software with -dirty anyways. 362 | 363 | Exceptions: 364 | 1: no tags. 0.postDISTANCE[.dev0] 365 | """ 366 | if pieces["closest-tag"]: 367 | rendered = pieces["closest-tag"] 368 | if pieces["distance"] or pieces["dirty"]: 369 | rendered += ".post%d" % pieces["distance"] 370 | if pieces["dirty"]: 371 | rendered += ".dev0" 372 | rendered += plus_or_dot(pieces) 373 | rendered += "g%s" % pieces["short"] 374 | else: 375 | # exception #1 376 | rendered = "0.post%d" % pieces["distance"] 377 | if pieces["dirty"]: 378 | rendered += ".dev0" 379 | rendered += "+g%s" % pieces["short"] 380 | return rendered 381 | 382 | 383 | def render_pep440_old(pieces): 384 | """TAG[.postDISTANCE[.dev0]] . 385 | 386 | The ".dev0" means dirty. 387 | 388 | Eexceptions: 389 | 1: no tags. 0.postDISTANCE[.dev0] 390 | """ 391 | if pieces["closest-tag"]: 392 | rendered = pieces["closest-tag"] 393 | if pieces["distance"] or pieces["dirty"]: 394 | rendered += ".post%d" % pieces["distance"] 395 | if pieces["dirty"]: 396 | rendered += ".dev0" 397 | else: 398 | # exception #1 399 | rendered = "0.post%d" % pieces["distance"] 400 | if pieces["dirty"]: 401 | rendered += ".dev0" 402 | return rendered 403 | 404 | 405 | def render_git_describe(pieces): 406 | """TAG[-DISTANCE-gHEX][-dirty]. 407 | 408 | Like 'git describe --tags --dirty --always'. 409 | 410 | Exceptions: 411 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 412 | """ 413 | if pieces["closest-tag"]: 414 | rendered = pieces["closest-tag"] 415 | if pieces["distance"]: 416 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 417 | else: 418 | # exception #1 419 | rendered = pieces["short"] 420 | if pieces["dirty"]: 421 | rendered += "-dirty" 422 | return rendered 423 | 424 | 425 | def render_git_describe_long(pieces): 426 | """TAG-DISTANCE-gHEX[-dirty]. 427 | 428 | Like 'git describe --tags --dirty --always -long'. 429 | The distance/hash is unconditional. 430 | 431 | Exceptions: 432 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 433 | """ 434 | if pieces["closest-tag"]: 435 | rendered = pieces["closest-tag"] 436 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 437 | else: 438 | # exception #1 439 | rendered = pieces["short"] 440 | if pieces["dirty"]: 441 | rendered += "-dirty" 442 | return rendered 443 | 444 | 445 | def render(pieces, style): 446 | """Render the given version pieces into the requested style.""" 447 | if pieces["error"]: 448 | return {"version": "unknown", 449 | "full-revisionid": pieces.get("long"), 450 | "dirty": None, 451 | "error": pieces["error"], 452 | "date": None} 453 | 454 | if not style or style == "default": 455 | style = "pep440" # the default 456 | 457 | if style == "pep440": 458 | rendered = render_pep440(pieces) 459 | elif style == "pep440-pre": 460 | rendered = render_pep440_pre(pieces) 461 | elif style == "pep440-post": 462 | rendered = render_pep440_post(pieces) 463 | elif style == "pep440-old": 464 | rendered = render_pep440_old(pieces) 465 | elif style == "git-describe": 466 | rendered = render_git_describe(pieces) 467 | elif style == "git-describe-long": 468 | rendered = render_git_describe_long(pieces) 469 | else: 470 | raise ValueError("unknown style '%s'" % style) 471 | 472 | return {"version": rendered, "full-revisionid": pieces["long"], 473 | "dirty": pieces["dirty"], "error": None, 474 | "date": pieces.get("date")} 475 | 476 | 477 | def get_versions(): 478 | """Get version information or return default if unable to do so.""" 479 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 480 | # __file__, we can work backwards from there to the root. Some 481 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 482 | # case we can only use expanded keywords. 483 | 484 | cfg = get_config() 485 | verbose = cfg.verbose 486 | 487 | try: 488 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 489 | verbose) 490 | except NotThisMethod: 491 | pass 492 | 493 | try: 494 | root = os.path.realpath(__file__) 495 | # versionfile_source is the relative path from the top of the source 496 | # tree (where the .git directory might live) to this file. Invert 497 | # this to find the root from __file__. 498 | for i in cfg.versionfile_source.split('/'): 499 | root = os.path.dirname(root) 500 | except NameError: 501 | return {"version": "0+unknown", "full-revisionid": None, 502 | "dirty": None, 503 | "error": "unable to find root of source tree", 504 | "date": None} 505 | 506 | try: 507 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 508 | return render(pieces, cfg.style) 509 | except NotThisMethod: 510 | pass 511 | 512 | try: 513 | if cfg.parentdir_prefix: 514 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 515 | except NotThisMethod: 516 | pass 517 | 518 | return {"version": "0+unknown", "full-revisionid": None, 519 | "dirty": None, 520 | "error": "unable to compute version", "date": None} 521 | -------------------------------------------------------------------------------- /bayesalpha/author_model.py: -------------------------------------------------------------------------------- 1 | """ Models the distribution of in-sample Sharpe ratios realized by authors. """ 2 | 3 | import random 4 | import warnings 5 | import json 6 | from datetime import datetime 7 | 8 | import numpy as np 9 | import pandas as pd 10 | from sklearn.preprocessing import LabelEncoder 11 | from sklearn.covariance import LedoitWolf 12 | import pymc3 as pm 13 | import theano.tensor as tt 14 | import xarray as xr 15 | from .serialize import to_xarray 16 | from ._version import get_versions 17 | from .base import BayesAlphaResult 18 | 19 | AUTHOR_MODEL_TYPE = 'author-model' 20 | APPROX_BDAYS_PER_YEAR = 252 21 | 22 | 23 | class AuthorModelBuilder(object): 24 | """ Class to build the author model. """ 25 | 26 | def __init__(self, sharpes, returns): 27 | """ 28 | Initialize AuthorModelBuilder object. 29 | 30 | Parameters 31 | ---------- 32 | sharpes : pd.DataFrame 33 | Long-format DataFrame of in-sample Sharpe ratios (from user-run 34 | backtests), indexed by user, algorithm and code ID. 35 | Note that currently, backtests are deduplicated based on code id. 36 | See fit_authors for more information. 37 | """ 38 | self.num_authors = sharpes.meta_user_id.nunique() 39 | self.num_algos = sharpes.meta_algorithm_id.nunique() 40 | # For num_backtests, nunique() and count() should be the same 41 | self.num_backtests = sharpes.meta_code_id.nunique() 42 | 43 | # Which algos correspond to which authors? 44 | df = (sharpes.loc[:, ['meta_user_id', 'meta_algorithm_id']] 45 | .drop_duplicates(subset='meta_algorithm_id', keep='first') 46 | .reset_index() 47 | .meta_user_id 48 | .astype(str)) 49 | self.author_to_algo_encoding = LabelEncoder().fit_transform(df) 50 | 51 | # Which backtests correspond to which algos? 52 | df = sharpes.meta_algorithm_id.astype(str) 53 | self.algo_to_backtest_encoding = LabelEncoder().fit_transform(df) 54 | 55 | # Which backtests correspond to which authors? 56 | df = sharpes.meta_user_id.astype(str) 57 | self.author_to_backtest_encoding = LabelEncoder().fit_transform(df) 58 | 59 | # Construct correlation matrix. 60 | # 0 is a better estimate for mean returns than the sample mean! 61 | returns_ = returns / returns.std() 62 | self.corr = LedoitWolf(assume_centered=True).fit(returns_).covariance_ 63 | 64 | self.model = self._build_model(sharpes, self.corr) 65 | 66 | self.coords = { 67 | 'meta_user_id': sharpes.meta_user_id.drop_duplicates().values, 68 | 'meta_algorithm_id': sharpes.meta_algorithm_id.drop_duplicates().values, 69 | 'meta_code_id': sharpes.meta_code_id.values 70 | } 71 | 72 | self.dims = { 73 | 'mu_global': (), 74 | 'mu_author': ('meta_user_id', ), 75 | 'mu_author_raw': ('meta_user_id', ), 76 | 'mu_author_sd': (), 77 | 'mu_algo': ('meta_algorithm_id', ), 78 | 'mu_algo_raw': ('meta_algorithm_id', ), 79 | 'mu_algo_sd': (), 80 | 'mu_backtest': ('meta_code_id', ), 81 | 'sigma_backtest': ('meta_code_id', ), 82 | 'alpha_author': ('meta_user_id', ), 83 | 'alpha_algo': ('meta_algorithm_id', ) 84 | } 85 | 86 | def _build_model(self, sharpes, corr): 87 | """ 88 | Build the entire author model (in one function). The model is 89 | sufficiently simple to specify entirely in one function. 90 | 91 | Parameters 92 | ---------- 93 | sharpes : pd.DataFrame 94 | Long-format DataFrame of in-sample Sharpe ratios (from user-run 95 | backtests), indexed by user, algorithm and code ID. 96 | Note that currently, backtests are deduplicated based on code id. 97 | See fit_authors for more information. 98 | corr : np.ndarray 99 | Correlation matrix of returns streams (from backtests), estimated 100 | using Ledoit-Wolf shrinkage. 101 | See fit_authors for more information. 102 | """ 103 | with pm.Model() as model: 104 | mu_global = pm.Normal('mu_global', mu=0, sd=3) 105 | 106 | mu_author_sd = pm.HalfNormal('mu_author_sd', sd=1) 107 | mu_author_raw = pm.Normal('mu_author_raw', mu=0, sd=1, 108 | shape=self.num_authors) 109 | mu_author = pm.Deterministic('mu_author', 110 | mu_author_sd * mu_author_raw) 111 | 112 | mu_algo_sd = pm.HalfNormal('mu_algo_sd', sd=1) 113 | mu_algo_raw = pm.Normal('mu_algo_raw', mu=0, sd=1, 114 | shape=self.num_algos) 115 | mu_algo = pm.Deterministic('mu_algo', mu_algo_sd * mu_algo_raw) 116 | 117 | mu_backtest = \ 118 | pm.Deterministic('mu_backtest', 119 | mu_global 120 | + mu_author[self.author_to_backtest_encoding] 121 | + mu_algo[self.algo_to_backtest_encoding]) 122 | 123 | sigma_backtest = pm.Deterministic( 124 | 'sigma_backtest', 125 | tt.sqrt(APPROX_BDAYS_PER_YEAR / sharpes.meta_trading_days) 126 | ) 127 | 128 | cov = corr * sigma_backtest[:, None] * sigma_backtest[None, :] 129 | 130 | alpha_author = pm.Deterministic('alpha_author', 131 | mu_global + mu_author) 132 | 133 | alpha_algo = \ 134 | pm.Deterministic('alpha_algo', 135 | mu_global 136 | + mu_author[self.author_to_algo_encoding] 137 | + mu_algo) 138 | 139 | sharpe = pm.MvNormal('sharpe', 140 | mu=mu_backtest, 141 | cov=cov, 142 | shape=self.num_backtests, 143 | observed=sharpes.sharpe_ratio) 144 | 145 | return model 146 | 147 | 148 | class AuthorModelResult(BayesAlphaResult): 149 | def rebuild_model(self, sharpes=None, returns=None): 150 | """ Return an AuthorModelBuilder that recreates the original model. """ 151 | if sharpes is None: 152 | sharpes = (self.trace 153 | ._sharpes 154 | .to_pandas() 155 | .reset_index() 156 | .copy()) 157 | 158 | if returns is None: 159 | returns = (self.trace 160 | ._returns 161 | .to_pandas() 162 | .copy()) 163 | 164 | return AuthorModelBuilder(sharpes, returns) 165 | 166 | 167 | def fit_authors(sharpes, 168 | returns, 169 | sampler_type='mcmc', 170 | sampler_args=None, 171 | seed=None, 172 | save_data=True, 173 | **params): 174 | """ 175 | Fit author model to population of authors, with algos and backtests. 176 | 177 | Parameters 178 | ---------- 179 | sharpes : pd.DataFrame 180 | Long-format DataFrame of in-sample Sharpe ratios (from user-run 181 | backtests), indexed by user, algorithm and code ID. 182 | Note that currently, backtests are deduplicated based on code id. 183 | :: 184 | meta_user_id meta_algorithm_id meta_code_id meta_trading_days sharpe_ratio 185 | 0 abcdef123456 ghijkl789123 abcdef000000 136 0.919407 186 | 1 abcdef123456 ghijkl789123 abcdef000001 271 1.129353 187 | 2 abcdef123456 ghijkl789123 abcdef000002 229 -0.005934 188 | 189 | returns : pd.DataFrame 190 | Wide-format DataFrame of in-sample returns of user-run backtests, 191 | indexed by time. Columns are code ids, rows are time (the format of 192 | time does not matter). 193 | :: 194 | abcd1234 efgh5678 ijkl9123 195 | 2013-06-03 -0.000326 0.002815 0.002110 196 | 2013-06-04 0.000326 -0.000135 -0.001211 197 | 2013-06-05 0.000326 0.001918 0.002911 198 | 199 | sampler_type : str 200 | Whether to use Markov chain Monte Carlo or variational inference. 201 | Either 'mcmc' or 'vi'. Defaults to 'mcmc'. 202 | sampler_args : dict 203 | Additional parameters for `pm.sample`. 204 | save_data : bool 205 | Whether to store the dataset in the result object. 206 | seed : int 207 | Seed for random number generation in PyMC3. 208 | """ 209 | if params: 210 | raise ValueError('Unnecessary kwargs passed to fit_authors.') 211 | 212 | if sampler_type not in {'mcmc', 'vi'}: 213 | raise ValueError("sampler_type not in {'mcmc', 'vi'}") 214 | 215 | # Check data 216 | _check_data(sharpes, returns) 217 | 218 | if seed is None: 219 | seed = int(random.getrandbits(31)) 220 | else: 221 | seed = int(seed) 222 | 223 | builder = AuthorModelBuilder(sharpes, returns) 224 | model, coords, dims = builder.model, builder.coords, builder.dims 225 | 226 | timestamp = datetime.isoformat(datetime.now()) 227 | 228 | with model: 229 | args = {} if sampler_args is None else sampler_args 230 | 231 | with warnings.catch_warnings(record=True) as warns: 232 | if sampler_type == 'mcmc': 233 | trace = pm.sample(**args) 234 | else: 235 | trace = pm.fit(**args).sample(args.get('draws', 500)) 236 | 237 | if warns: 238 | warnings.warn('Problems during sampling. Inspect `result.warnings`.') 239 | 240 | trace = to_xarray(trace, coords, dims) 241 | # Author model takes no parameters, so this will always be empty. 242 | trace.attrs['params'] = json.dumps(params) 243 | trace.attrs['timestamp'] = timestamp 244 | trace.attrs['warnings'] = json.dumps([str(warn) for warn in warns]) 245 | trace.attrs['seed'] = seed 246 | trace.attrs['model-version'] = get_versions()['version'] 247 | trace.attrs['model-type'] = AUTHOR_MODEL_TYPE 248 | 249 | if save_data: 250 | # Store the data in long format to avoid creating more dimensions 251 | trace['_sharpes'] = xr.DataArray(sharpes, dims=['sharpes_index', 252 | 'sharpes_columns']) 253 | trace['_returns'] = xr.DataArray(returns, dims=['returns_index', 254 | 'returns_columns']) 255 | 256 | return AuthorModelResult(trace) 257 | 258 | 259 | def _check_data(sharpes, returns): 260 | """ 261 | Run basic sanity checks on the data set. 262 | 263 | Parameters 264 | ---------- 265 | sharpes : pd.DataFrame 266 | Long-format DataFrame of in-sample Sharpe ratios (from user-run 267 | backtests), indexed by user, algorithm and code ID. 268 | Note that currently, backtests are deduplicated based on code id. 269 | See fit_authors for more information. 270 | returns : pd.DataFrame 271 | Wide-format DataFrame of in-sample returns of user-run backtests, 272 | indexed by time. Columns are code ids, rows are time (the format of 273 | time does not matter). 274 | See fit_authors for more information. 275 | """ 276 | 277 | # FIXME deduplicating based on code id is not perfect. Ideally we would 278 | # deduplicate on backtest id. 279 | if sharpes.meta_code_id.nunique() != sharpes.shape[0]: 280 | warnings.warn('Data set contains duplicate backtests.') 281 | 282 | if (sharpes.groupby('meta_algorithm_id')['sharpe_ratio'] 283 | .count() < 5).any(): 284 | warnings.warn('Data set contains algorithms with fewer than 5 ' 285 | 'backtests.') 286 | 287 | if (sharpes.groupby('meta_user_id')['meta_algorithm_id'].nunique() < 5).any(): 288 | warnings.warn('Data set contains users with fewer than 5 algorithms.') 289 | 290 | if ((sharpes.sharpe_ratio > 20) 291 | | (sharpes.sharpe_ratio < -20)).any(): 292 | raise ValueError('`sharpes` contains unrealistic values: greater than ' 293 | '20 in magnitude.') 294 | 295 | if pd.isnull(sharpes).any().any(): 296 | raise ValueError('`sharpes` contains NaNs.') 297 | 298 | # FIXME remove this check once all feature factory features are debugged. 299 | if (sharpes == -99999).any().any(): 300 | raise ValueError('`sharpes` contains -99999s.') 301 | 302 | if pd.isnull(returns).any().any(): 303 | raise ValueError('`returns` contains NaNs.') 304 | 305 | if returns.columns.duplicated().any(): 306 | raise ValueError('`returns` contains duplicated code ids.') 307 | 308 | if len(sharpes.meta_code_id) != len(returns.columns): 309 | raise ValueError('`sharpes` and `returns` are different lengths.') 310 | 311 | if not set(sharpes.meta_code_id) == set(returns.columns): 312 | raise ValueError('`sharpes` and `returns` are the same length, but ' 313 | 'contain different code ids.') 314 | 315 | if not (sharpes.meta_code_id == returns.columns).all(): 316 | raise ValueError('`sharpes` and `returns` contain the same code ids, ' 317 | 'but are ordered differently.') 318 | -------------------------------------------------------------------------------- /bayesalpha/base.py: -------------------------------------------------------------------------------- 1 | """ Base classes shared across all models. """ 2 | 3 | import json 4 | import hashlib 5 | 6 | 7 | class BayesAlphaResult(object): 8 | """ A wrapper around a PyMC3 trace as a xarray Dataset. """ 9 | 10 | def __init__(self, trace): 11 | self._trace = trace 12 | 13 | def save(self, filename, group=None, **args): 14 | """Save the results to a netcdf file.""" 15 | self._trace.to_netcdf(filename, group=group, **args) 16 | 17 | @classmethod 18 | def _load(cls, trace): 19 | return cls(trace=trace) 20 | 21 | def rebuild_model(self, **kwargs): 22 | pass 23 | 24 | @property 25 | def trace(self): 26 | return self._trace 27 | 28 | @property 29 | def params(self): 30 | return json.loads(self._trace.attrs['params']) 31 | 32 | @property 33 | def timestamp(self): 34 | return self._trace.attrs['timestamp'] 35 | 36 | @property 37 | def model_version(self): 38 | return self._trace.attrs['model-version'] 39 | 40 | @property 41 | def model_type(self): 42 | return self._trace.attrs['model-type'] 43 | 44 | @property 45 | def params_hash(self): 46 | params = json.dumps(self.params, sort_keys=True) 47 | hasher = hashlib.sha256(params.encode()) 48 | return hasher.hexdigest()[:16] 49 | 50 | @property 51 | def ok(self): 52 | return len(self.warnings) == 0 53 | 54 | @property 55 | def warnings(self): 56 | return json.loads(self._trace.attrs['warnings']) 57 | 58 | @property 59 | def seed(self): 60 | return self._trace.attrs['seed'] 61 | 62 | @property 63 | def id(self): 64 | hasher = hashlib.sha256() 65 | hasher.update(self.params_hash.encode()) 66 | hasher.update(self.model_version.encode()) 67 | hasher.update(str(self.seed).encode()) 68 | return hasher.hexdigest()[:16] 69 | 70 | def raise_ok(self): 71 | if not self.ok: 72 | warnings = self.warnings 73 | raise RuntimeError('Problems during sampling: %s' % warnings) 74 | -------------------------------------------------------------------------------- /bayesalpha/dists.py: -------------------------------------------------------------------------------- 1 | import theano.tensor as tt 2 | import theano 3 | import theano.tensor.extra_ops 4 | import theano.sparse 5 | import theano.scalar 6 | import pymc3 as pm 7 | import numpy as np 8 | from scipy import sparse, interpolate 9 | from pymc3.distributions.distribution import draw_values 10 | from pymc3.distributions.dist_math import bound 11 | 12 | 13 | class NormalNonZero(pm.Normal): 14 | def logp(self, value): 15 | all_logp = super(NormalNonZero, self).logp(value) 16 | return tt.switch(tt.eq(value, 0), 0., all_logp) 17 | 18 | 19 | class ScaledSdMvNormalNonZero(pm.MvNormal): 20 | def __init__(self, *args, **kwargs): 21 | self.scale_sd = kwargs.pop('scale_sd') 22 | assert not args 23 | self._mu = kwargs.pop('mu') 24 | if isinstance(self._mu, tt.Variable): 25 | kwargs['mu'] = tt.zeros_like(self._mu) 26 | else: 27 | kwargs['mu'] = np.zeros_like(self._mu) 28 | super(ScaledSdMvNormalNonZero, self).__init__(**kwargs) 29 | 30 | def logp(self, value): 31 | scale_sd = self.scale_sd 32 | mu = self._mu 33 | 34 | # properly broadcast values to work in unified way 35 | if scale_sd.ndim == 0: 36 | scale_sd = tt.repeat(scale_sd, value.shape[-1]) 37 | if scale_sd.ndim == 1: 38 | scale_sd = scale_sd[None, :] 39 | 40 | detfix = -tt.log(scale_sd).sum(axis=-1) 41 | z = (value - mu)/scale_sd 42 | logp = super(ScaledSdMvNormalNonZero, self).logp(z) + detfix 43 | logp = tt.switch(tt.eq(value, 0).any(-1), 0., logp) 44 | return logp 45 | 46 | def random(self, point=None, size=None): 47 | r = super(ScaledSdMvNormalNonZero, self).random(point=point, size=size) 48 | shape = r.shape 49 | scale_sd, mu = draw_values([self.scale_sd, self._mu], point=point) 50 | if scale_sd.ndim == 0: 51 | scale_sd = np.repeat(scale_sd, r.shape[-1]) 52 | if scale_sd.ndim == 1: 53 | scale_sd = scale_sd[None, :] 54 | r *= scale_sd 55 | r += mu 56 | # reshape back just in case 57 | return r.reshape(shape) 58 | 59 | 60 | class GPExponential(pm.Continuous): 61 | def __init__(self, mu, alpha, sigma, *args, **kwargs): 62 | self._mu = tt.as_tensor_variable(mu) 63 | self._alpha = tt.as_tensor_variable(alpha) 64 | self._sigma = tt.as_tensor_variable(sigma) 65 | self.mean = self.median = self.mode = mu 66 | super(GPExponential, self).__init__(*args, **kwargs) 67 | 68 | def logp(self, value): 69 | mu, alpha, sigma = self._mu, self._alpha, self._sigma 70 | value = value.reshape((-1, value.shape[-1])) 71 | k, n = value.shape # TODO other shapes! 72 | 73 | delta = (value - mu) / sigma[..., None] 74 | 75 | corr = tt.exp(-alpha) 76 | mdiag_tau = - corr / (1 - corr ** 2) 77 | # diag_tau_middle = 1 - 2 * corr * mdiag_tau 78 | diag_tau_first = 1 - corr * mdiag_tau 79 | 80 | # Compute the cholesky decomposition of tau 81 | diag_chol = tt.sqrt(diag_tau_first) 82 | mdiag_chol = mdiag_tau / diag_chol 83 | 84 | if sigma.ndim == 1: 85 | logdet = 2 * k * n * np.log(diag_chol) / sigma 86 | else: 87 | logdet = 2 * n * (np.log(diag_chol) / sigma).sum() 88 | delta_trans = diag_chol * delta 89 | delta_trans = tt.set_subtensor( 90 | delta_trans[:, 1:], 91 | delta_trans[:, 1:] + mdiag_chol * delta[:, :-1] 92 | ) 93 | 94 | return -0.5 * (logdet + (delta_trans ** 2).sum()) 95 | 96 | 97 | def bspline_basis(n, eval_points, degree=3): 98 | n_knots = n + degree + 1 99 | knots = np.linspace(0, 1, n_knots - 2 * degree) 100 | knots = np.r_[[0] * degree, knots, [1] * degree] 101 | basis_funcs = interpolate.BSpline(knots, np.eye(n), k=degree) 102 | Bx = basis_funcs(eval_points) 103 | return sparse.csr_matrix(Bx) 104 | 105 | 106 | # The following is adapted from theano.sparse.basic, to fix Theano/Theano#6522 107 | 108 | 109 | class Dot(theano.gof.op.Op): 110 | # See doc in instance of this Op or function after this class definition. 111 | __props__ = () 112 | 113 | def __str__(self): 114 | return "Sparse" + self.__class__.__name__ 115 | 116 | def infer_shape(self, node, shapes): 117 | xshp, yshp = shapes 118 | x, y = node.inputs 119 | if x.ndim == 2 and y.ndim == 2: 120 | return [(xshp[0], yshp[1])] 121 | if x.ndim == 1 and y.ndim == 2: 122 | return [(yshp[1],)] 123 | if x.ndim == 2 and y.ndim == 1: 124 | return [(xshp[0],)] 125 | if x.ndim == 1 and y.ndim == 1: 126 | return [()] 127 | raise NotImplementedError() 128 | 129 | def make_node(self, x, y): 130 | dtype_out = theano.scalar.upcast(x.dtype, y.dtype) 131 | 132 | # Sparse dot product should have at least one sparse variable 133 | # as input. If the other one is not sparse, it has to be converted 134 | # into a tensor. 135 | if isinstance(x, sparse.spmatrix): 136 | x = theano.sparse.as_sparse_variable(x) 137 | if isinstance(y, sparse.spmatrix): 138 | y = theano.sparse.as_sparse_variable(y) 139 | x_is_sparse_var = theano.sparse.basic._is_sparse_variable(x) 140 | y_is_sparse_var = theano.sparse.basic._is_sparse_variable(y) 141 | 142 | if not x_is_sparse_var and not y_is_sparse_var: 143 | raise TypeError( 144 | "Sparse dot product should have at least one " 145 | "sparse variable as inputs, but the inputs are " 146 | "%s (%s) and %s (%s)." % (x, x.type, y, y.type)) 147 | 148 | if x_is_sparse_var: 149 | broadcast_x = (False,) * x.ndim 150 | else: 151 | x = tt.as_tensor_variable(x) 152 | broadcast_x = x.type.broadcastable 153 | assert y.format in ["csr", "csc"] 154 | if x.ndim not in (1, 2): 155 | raise TypeError( 156 | 'theano.sparse.Dot: input 0 (0-indexed) must have ndim of ' 157 | '1 or 2, %d given.' % x.ndim) 158 | 159 | if y_is_sparse_var: 160 | broadcast_y = (False,) * y.ndim 161 | else: 162 | y = tt.as_tensor_variable(y) 163 | broadcast_y = y.type.broadcastable 164 | assert x.format in ["csr", "csc"] 165 | if y.ndim not in (1, 2): 166 | raise TypeError( 167 | 'theano.sparse.Dot: input 1 (1-indexed) must have ndim of ' 168 | '1 or 2, %d given.' % y.ndim) 169 | 170 | if len(broadcast_y) == 2: 171 | broadcast_out = broadcast_x[:-1] + broadcast_y[1:] 172 | elif len(broadcast_y) == 1: 173 | broadcast_out = broadcast_x[:-1] 174 | return theano.gof.Apply( 175 | self, [x, y], [tt.tensor(dtype=dtype_out, 176 | broadcastable=broadcast_out)]) 177 | 178 | def perform(self, node, inputs, out): 179 | x, y = inputs 180 | out = out[0] 181 | x_is_sparse = theano.sparse.basic._is_sparse(x) 182 | y_is_sparse = theano.sparse.basic._is_sparse(y) 183 | 184 | if not x_is_sparse and not y_is_sparse: 185 | raise TypeError(x) 186 | 187 | rval = x * y 188 | 189 | if x_is_sparse and y_is_sparse: 190 | rval = rval.toarray() 191 | 192 | out[0] = theano._asarray(rval, dtype=node.outputs[0].dtype) 193 | 194 | def grad(self, inputs, gout): 195 | (x, y) = inputs 196 | (gz,) = gout 197 | assert (theano.sparse.basic._is_sparse_variable(x) 198 | or theano.sparse.basic._is_sparse_variable(y)) 199 | rval = [] 200 | 201 | if theano.sparse.basic._is_dense_variable(y): 202 | rval.append(tt.dot(gz, y.T)) 203 | else: 204 | rval.append(dot(gz, y.T)) 205 | if theano.sparse.basic._is_dense_variable(x): 206 | rval.append(tt.dot(x.T, gz)) 207 | else: 208 | rval.append(dot(x.T, gz)) 209 | 210 | return rval 211 | 212 | 213 | _dot = Dot() 214 | 215 | 216 | def dot(x, y): 217 | """ 218 | Operation for efficiently calculating the dot product when 219 | one or all operands is sparse. Supported format are CSC and CSR. 220 | The output of the operation is dense. 221 | 222 | Parameters 223 | ---------- 224 | x 225 | Sparse or dense matrix variable. 226 | y 227 | Sparse or dense matrix variable. 228 | 229 | Returns 230 | ------- 231 | The dot product `x`.`y` in a dense format. 232 | 233 | Notes 234 | ----- 235 | The grad implemented is regular, i.e. not structured. 236 | 237 | At least one of `x` or `y` must be a sparse matrix. 238 | 239 | When the operation has the form dot(csr_matrix, dense) 240 | the gradient of this operation can be performed inplace 241 | by UsmmCscDense. This leads to significant speed-ups. 242 | 243 | """ 244 | 245 | if hasattr(x, 'getnnz'): 246 | x = theano.sparse.as_sparse_variable(x) 247 | if hasattr(y, 'getnnz'): 248 | y = theano.sparse.as_sparse_variable(y) 249 | 250 | x_is_sparse_variable = theano.sparse.basic._is_sparse_variable(x) 251 | y_is_sparse_variable = theano.sparse.basic._is_sparse_variable(y) 252 | 253 | if not x_is_sparse_variable and not y_is_sparse_variable: 254 | raise TypeError() 255 | 256 | return _dot(x, y) 257 | 258 | 259 | class BatchedMatrixInverse(tt.Op): 260 | """Computes the inverse of a matrix :math:`A`. 261 | 262 | Given a square matrix :math:`A`, ``matrix_inverse`` returns a square 263 | matrix :math:`A_{inv}` such that the dot product :math:`A \cdot A_{inv}` 264 | and :math:`A_{inv} \cdot A` equals the identity matrix :math:`I`. 265 | 266 | Notes 267 | ----- 268 | When possible, the call to this op will be optimized to the call 269 | of ``solve``. 270 | 271 | """ 272 | 273 | __props__ = () 274 | 275 | def __init__(self): 276 | pass 277 | 278 | def make_node(self, x): 279 | x = tt.as_tensor_variable(x) 280 | assert x.dim == 3 281 | return tt.Apply(self, [x], [x.type()]) 282 | 283 | def perform(self, node, inputs, outputs): 284 | (x,) = inputs 285 | (z,) = outputs 286 | z[0] = np.linalg.inv(x).astype(x.dtype) 287 | 288 | def grad(self, inputs, g_outputs): 289 | r"""The gradient function should return 290 | 291 | .. math:: V\frac{\partial X^{-1}}{\partial X}, 292 | 293 | where :math:`V` corresponds to ``g_outputs`` and :math:`X` to 294 | ``inputs``. Using the `matrix cookbook 295 | `_, 296 | one can deduce that the relation corresponds to 297 | 298 | .. math:: (X^{-1} \cdot V^{T} \cdot X^{-1})^T. 299 | 300 | """ 301 | x, = inputs 302 | xi = self.__call__(x) 303 | gz, = g_outputs 304 | # TT.dot(gz.T,xi) 305 | gx = tt.batched_dot(xi, gz.transpose(0, 2, 1)) 306 | gx = tt.batched_dot(gx, xi) 307 | gx = -gx.transpose(0, 2, 1) 308 | return [gx] 309 | 310 | def R_op(self, inputs, eval_points): 311 | r"""The gradient function should return 312 | 313 | .. math:: \frac{\partial X^{-1}}{\partial X}V, 314 | 315 | where :math:`V` corresponds to ``g_outputs`` and :math:`X` to 316 | ``inputs``. Using the `matrix cookbook 317 | `_, 318 | one can deduce that the relation corresponds to 319 | 320 | .. math:: X^{-1} \cdot V \cdot X^{-1}. 321 | 322 | """ 323 | x, = inputs 324 | xi = self.__call__(x) 325 | ev, = eval_points 326 | 327 | if ev is None: 328 | return [None] 329 | 330 | r = tt.batched_dot(xi, ev) 331 | r = tt.batched_dot(r, xi) 332 | r = -r 333 | return [r] 334 | 335 | def infer_shape(self, node, shapes): 336 | return shapes 337 | 338 | 339 | batched_matrix_inverse = BatchedMatrixInverse() 340 | 341 | 342 | class EQCorrMvNormal(pm.Continuous): 343 | def __init__(self, mu, std, corr, clust, nonzero=True, *args, **kwargs): 344 | super(EQCorrMvNormal, self).__init__(*args, **kwargs) 345 | self.mu, self.std, self.corr, self.clust = map( 346 | tt.as_tensor_variable, [mu, std, corr, clust] 347 | ) 348 | self.nonzero = nonzero 349 | 350 | def logp(self, x): 351 | # -1/2 (x-mu) @ Sigma^-1 @ (x-mu)^T - 1/2 log(2pi^k|Sigma|) 352 | # Sigma = diag(std) @ Corr @ diag(std) 353 | # Sigma^-1 = diag(std^-1) @ Corr^-1 @ diag(std^-1) 354 | # Corr is a block matrix of special form 355 | # +----------+ 356 | # Corr = [[ | 1, b1, b1|, 0, 0, 0,..., 0] 357 | # [ |b1, 1, b1|, 0, 0, 0,..., 0] 358 | # [ |b1, b1, 1|, 0, 0, 0,..., 0] 359 | # +-----------+----------+ 360 | # [ 0, 0, 0, | 1, b2, b2|,..., 0] 361 | # [ 0, 0, 0, |b2, 1, b2|,..., 0] 362 | # [ 0, 0, 0, |b2, b2, 1|,..., 0] 363 | # +----------+ 364 | # [ ... ] 365 | # [ 0, 0, 0, 0, 0, 0 ,..., 1]] 366 | # 367 | # Corr = [[B1, 0, 0, ..., 0] 368 | # [ 0, B2, 0, ..., 0] 369 | # [ 0, 0, B3, ..., 0] 370 | # [ ... ] 371 | # [ 0, 0, 0, ..., Bk]] 372 | # 373 | # Corr^-1 = [[B1^-1, 0, 0, ..., 0] 374 | # [ 0, B2^-1, 0, ..., 0] 375 | # [ 0, 0, B3^-1, ..., 0] 376 | # [ ... ] 377 | # [ 0, 0, 0, ..., Bk^-1]] 378 | # 379 | # |B| matrix of rank r is easy 380 | # https://math.stackexchange.com/a/1732839 381 | # Let D = eye(r) * (1-b) 382 | # Then B = D + b * ones((r, r)) 383 | # |B| = (1-b) ** r + b * r * (1-b) ** (r-1) 384 | # |B| = (1.-b) ** (r-1) * (1. + b * (r - 1)) 385 | # log(|B|) = log(1-b)*(r-1) + log1p(b*(r-1)) 386 | # 387 | # Inverse B^-1 is easy as well 388 | # https://math.stackexchange.com/a/1766118 389 | # let 390 | # c = 1/b + r*1/(1-b) 391 | # (B^-1)ii = 1/(1-b) - 1/(c*(1-b)**2) 392 | # (B^-1)ij = - 1/(c*(1-b)**2) 393 | # 394 | # assuming 395 | # z = (x - mu) / std 396 | # we have det fix 397 | # detfix = -sum(log(std)) 398 | # 399 | # now we need to compute z @ Corr^-1 @ z^T 400 | # note that B can be unique per timestep 401 | # so we need z_t @ Corr_t^-1 @ z_t^T in perfect 402 | # z_t @ Corr_t^-1 @ z_t^T is a sum of block terms 403 | # quad = z_ct @ B_ct^-1 @ z_ct^T = (B^-1)_iict * sum(z_ct**2) + (B^-1)_ijct*sum_{i!=j}(z_ict * z_jct) 404 | # 405 | # finally all terms are computed explicitly 406 | # logp = detfix - 1/2 * ( quad + log(pi*2) * k + log(|B|) ) 407 | 408 | x = tt.as_tensor_variable(x) 409 | clust_ids, clust_pos, clust_counts = \ 410 | tt.extra_ops.Unique(return_inverse=True, 411 | return_counts=True)(self.clust) 412 | clust_order = tt.argsort(clust_pos) 413 | mu = self.mu 414 | corr = self.corr[..., clust_ids] 415 | std = self.std 416 | if std.ndim == 0: 417 | std = tt.repeat(std, x.shape[-1]) 418 | if std.ndim == 1: 419 | std = std[None, :] 420 | if corr.ndim == 1: 421 | corr = corr[None, :] 422 | z = (x - mu)/std 423 | z = z[..., clust_order] 424 | detfix = -tt.log(std).sum(-1) 425 | # following the notation above 426 | r = clust_counts 427 | b = corr 428 | # detB = (1.-b) ** (r-1) * (1. + b * (r - 1)) 429 | logdetB = tt.log1p(-b) * (r-1) + tt.log1p(b * (r - 1)) 430 | c = 1 / b + r / (1. - b) 431 | invBij = -1./(c*(1.-b)**2) 432 | invBii = 1./(1.-b) + invBij 433 | invBij = tt.repeat(invBij, clust_counts, axis=-1) 434 | invBii = tt.repeat(invBii, clust_counts, axis=-1) 435 | 436 | # to compute (Corr^-1)_ijt*sum_{i!=j}(z_it * z_jt) 437 | # we use masked cross products 438 | mask = tt.arange(x.shape[-1])[None, :] 439 | mask = tt.repeat(mask, x.shape[-1], axis=0) 440 | mask = tt.maximum(mask, mask.T) 441 | block_end_pos = tt.cumsum(r) 442 | block_end_pos = tt.repeat(block_end_pos, clust_counts) 443 | mask = tt.lt(mask, block_end_pos) 444 | mask = tt.and_(mask, mask.T) 445 | mask = tt.fill_diagonal(mask.astype('float32'), 0.) 446 | # type: tt.TensorVariable 447 | 448 | invBiizizi_sum = ((z**2) * invBii).sum(-1) 449 | invBijzizj_sum = ( 450 | (z.dimshuffle(0, 1, 'x') 451 | * mask.dimshuffle('x', 0, 1) 452 | * z.dimshuffle(0, 'x', 1)) 453 | * invBij.dimshuffle(0, 1, 'x') 454 | ).sum([-1, -2]) 455 | quad = invBiizizi_sum + invBijzizj_sum 456 | k = pm.floatX(x.shape[-1]) 457 | logp = ( 458 | detfix 459 | - .5 * ( 460 | quad 461 | + pm.floatX(np.log(np.pi*2)) * k 462 | + logdetB.sum(-1) 463 | ) 464 | ) 465 | if self.nonzero: 466 | logp = tt.switch(tt.eq(x, 0).any(-1), 0., logp) 467 | return bound(logp, 468 | tt.gt(corr, -1.), 469 | tt.lt(corr, 1.), 470 | tt.gt(std, 0.), 471 | broadcast_conditions=False) 472 | 473 | def random(self, point=None, size=None): 474 | mu, std, corr, clust = draw_values( 475 | [self.mu, self.std, self.corr, self.clust], 476 | point=point 477 | ) 478 | return self.st_random(mu, std, 479 | corr, clust, 480 | size=size, 481 | _dist_shape=self.shape) 482 | 483 | @staticmethod 484 | def st_random(mu, std, corr, clust, size=None, _dist_shape=None): 485 | mu, std, corr, clust = map(np.asarray, [mu, std, corr, clust]) 486 | size = pm.distributions.distribution.to_tuple(size) 487 | _dist_shape = pm.distributions.distribution.to_tuple(_dist_shape) 488 | k = mu.shape[-1] 489 | if corr.ndim == 1: 490 | corr = corr[None, :] 491 | dist_shape = np.broadcast( 492 | np.zeros(_dist_shape), 493 | mu, std, 494 | np.zeros((corr.shape[0], k)) 495 | ).shape 496 | 497 | out_shape = size + dist_shape 498 | if std.ndim == 0: 499 | std = np.repeat(std, k) 500 | if std.ndim == 1: 501 | std = std[None, :] 502 | clust_ids, clust_pos, clust_counts = np.unique( 503 | clust, return_inverse=True, return_counts=True 504 | ) 505 | # inner representation for clusters 506 | clust_order = np.argsort(clust_pos) 507 | # this order aligns means and std with block matrix representation 508 | # so first step is to apply this ordering for means and std 509 | mu = mu[..., clust_order] 510 | std = std[..., clust_order] 511 | # expected output order of clusters 512 | # inverse permutation 513 | inv_clust_order = np.zeros_like(clust_order) 514 | for i in range(len(clust_order)): 515 | inv_clust_order[clust_order[i]] = i 516 | 517 | corr = corr[..., clust_ids] 518 | block_end_pos = np.cumsum(clust_counts) 519 | block_end_pos = np.repeat(block_end_pos, clust_counts) 520 | mask = np.arange(k)[None, :] 521 | mask = np.repeat(mask, k, axis=0) 522 | mask = np.maximum(mask, mask.T) 523 | mask = (mask < block_end_pos) & (mask < block_end_pos).T 524 | corr = np.repeat(corr, clust_counts, axis=-1)[..., None] 525 | corr = corr * mask[None, :] 526 | corr[:, np.arange(k), np.arange(k)] = 1 527 | std = std[..., None] 528 | cov = std * corr * std.swapaxes(-1, -2) 529 | chol = np.linalg.cholesky(cov) 530 | standard_normal = np.random.standard_normal(tuple(size) + dist_shape) 531 | # we need dot product for last dim with possibly many chols 532 | # in simple case we do z @ chol.T 533 | # as it done row by col we do not transpose chol 534 | # before elemwise multiplication 535 | sample = mu + np.sum(standard_normal[..., None, :] * chol, -1) 536 | # recall old ordering 537 | # we also get rid of unused dimension 538 | return sample[..., inv_clust_order].reshape(out_shape) 539 | -------------------------------------------------------------------------------- /bayesalpha/load.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | from .returns_model import ReturnsModelResult 3 | from .author_model import AuthorModelResult 4 | 5 | 6 | def load(filename, group=None): 7 | trace = xr.open_dataset(filename, group=group) 8 | model_type = trace.attrs.get('model-type') 9 | if model_type == 'returns-model': 10 | return ReturnsModelResult._load(trace) 11 | elif model_type == 'author-model': 12 | return AuthorModelResult._load(trace) 13 | # Default to returns model, so we can still load old traces 14 | elif model_type is None: 15 | return ReturnsModelResult._load(trace) 16 | else: 17 | ValueError('Unknown model type: {}.'.format(model_type)) 18 | -------------------------------------------------------------------------------- /bayesalpha/plotting.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import warnings 3 | import functools 4 | try: 5 | import matplotlib.pyplot as plt 6 | import seaborn as sns 7 | _has_mpl = True 8 | except ImportError: 9 | warnings.warn('Could not import matplotlib: Plotting unavailable.') 10 | _has_mpl = False 11 | plt = None 12 | sns = None 13 | 14 | 15 | def _require_mpl(func): 16 | @functools.wraps(func) 17 | def inner(*args, **kwargs): 18 | if not _has_mpl: 19 | raise RuntimeError('Matplotlib is unavailable.') 20 | return func(*args, **kwargs) 21 | 22 | return inner 23 | 24 | 25 | def _get_height(k): 26 | return 2 + 14 * (1 - np.exp(-0.02 * k)) 27 | 28 | 29 | @_require_mpl 30 | def plot_horizontal_dots(vals, sort=True, ax=None, title=None, **kwargs): 31 | if ax is None: 32 | height = _get_height(len(vals)) 33 | _, ax = plt.subplots(1, 1, figsize=(4, height)) 34 | 35 | if sort: 36 | vals = vals.sort_values() 37 | 38 | y = -np.arange(len(vals)) 39 | 40 | plot_kwargs = dict(xlim=(0, 1), 41 | yticks=y, 42 | yticklabels=vals.index, 43 | ylim=(-len(y) + .5, .5)) 44 | 45 | plot_kwargs.update(kwargs) 46 | 47 | ax.grid(axis='x', color='w', zorder=-5) 48 | ax.scatter(vals.values, y, marker='d', zorder=5) 49 | ax.axvline(0.5, alpha=0.3, color='black') 50 | if title: 51 | ax.set_title(title) 52 | 53 | locs = y 54 | 55 | ax.set(**plot_kwargs) 56 | 57 | if len(vals) > 1: 58 | ax.barh(locs, [max(ax.get_xticks())] * len(locs), 59 | height=(locs[1] - locs[0]), 60 | color=['lightgray', 'w'], 61 | zorder=-10, alpha=.25) 62 | 63 | return ax 64 | 65 | 66 | @_require_mpl 67 | def plot_correlations(corr_xarray, corr_threshold=.33, ax=None, 68 | cmap=None, **heatmap_kwargs): 69 | k = len(corr_xarray.coords['algo']) 70 | if ax is None: 71 | w, h = _get_height(k)*2 + 1, _get_height(k)*2 72 | fig, ax = plt.subplots(2, 2, figsize=(w, h)) 73 | else: 74 | fig = None 75 | _cmap = dict( 76 | mean='bwr', 77 | std='magma', 78 | prob='pink', 79 | snr='PuOr' 80 | ) 81 | if cmap is not None: 82 | _cmap.update(cmap) 83 | ax = ax.flat 84 | mean = corr_xarray.mean(['chain', 'sample']).values 85 | std = corr_xarray.std(['chain', 'sample']).values 86 | prob = (abs(corr_xarray) > corr_threshold).mean(['chain', 'sample']).values 87 | snr = mean / std 88 | snr[range(k), range(k)] = 0 89 | mask = np.zeros(mean.shape, dtype=np.bool) 90 | mask[np.triu_indices_from(mask)] = True 91 | kwargs = dict(mask=mask, square=True, linewidths=.5) 92 | kwargs.update(heatmap_kwargs) 93 | sns.heatmap(mean, ax=ax[0], vmin=-1, vmax=1, cmap=_cmap['mean'], **kwargs) 94 | ax[0].set_title('E[corr]') 95 | sns.heatmap(std, ax=ax[1], cmap=_cmap['std'], **kwargs) 96 | ax[1].set_title('std[corr]') 97 | sns.heatmap(prob, ax=ax[2], vmin=0, vmax=1, cmap=_cmap['prob'], **kwargs) 98 | ax[2].set_title('P(|corr|>{})'.format(corr_threshold)) 99 | sns.heatmap(snr, ax=ax[3], center=0, cmap=_cmap['snr'], **kwargs) 100 | ax[3].set_title('E[corr]/std[corr]') 101 | for i in range(len(ax)): 102 | ax[i].set_xticklabels(corr_xarray.coords['algo'].values, rotation=90) 103 | ax[i].set_yticklabels(corr_xarray.coords['algo'].values, rotation=0) 104 | if fig is not None: 105 | fig.tight_layout() 106 | return ax.base 107 | -------------------------------------------------------------------------------- /bayesalpha/returns_model.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | import warnings 3 | 4 | import numpy as np 5 | from scipy import sparse, stats 6 | import theano 7 | import theano.sparse 8 | import theano.tensor as tt 9 | import pymc3 as pm 10 | import json 11 | import xarray as xr 12 | from datetime import datetime 13 | import random 14 | import pandas as pd 15 | import empyrical 16 | 17 | from .base import BayesAlphaResult 18 | 19 | try: 20 | import cvxpy 21 | if cvxpy.__version__.split('.') < ['1', '0', '0']: 22 | cvxpy = None 23 | except ImportError: 24 | cvxpy = None 25 | 26 | from bayesalpha.dists import ( 27 | bspline_basis, 28 | GPExponential, 29 | NormalNonZero, 30 | ScaledSdMvNormalNonZero 31 | ) 32 | from bayesalpha.dists import dot as sparse_dot 33 | from bayesalpha.serialize import to_xarray 34 | from bayesalpha._version import get_versions 35 | import bayesalpha.plotting 36 | 37 | _PARAM_DEFAULTS = { 38 | 'shrinkage': 'skew-neg2-normal', 39 | 'corr_type': 'diag', 40 | 'gains_time': False, 41 | } 42 | 43 | RETURNS_MODEL_TYPE = 'returns-model' 44 | 45 | 46 | class ReturnsModelBuilder(object): 47 | def __init__(self, data, algos, factors=None, predict=False, 48 | gains_factors=None, **params): 49 | data = data.fillna(0.) 50 | self._predict = predict 51 | self.data = data 52 | # The build functions pop parameters they use 53 | self.params = params.copy() 54 | self.algos = algos 55 | if factors is None: 56 | factors = pd.DataFrame(index=data.index, columns=[]) 57 | factors.columns.name = 'factor' 58 | 59 | if gains_factors is None: 60 | gains_factors = pd.DataFrame(index=data.index, columns=[]) 61 | gains_factors.columns.name = 'gains_factor' 62 | 63 | # The build functions add items when appropriate 64 | self.coords = { 65 | 'algo': data.columns, 66 | 'time': data.index, 67 | 'factor': factors.columns, 68 | 'gains_factor': gains_factors.columns, 69 | } 70 | # The build functions add items when appropriate 71 | self.dims = {} 72 | self.n_algos = len(data.columns) 73 | self.n_time = len(data.index) 74 | self.n_factors = len(factors.columns) 75 | self.n_gains_factors = len(gains_factors.columns) 76 | self.factors = factors 77 | self.gains_factors = gains_factors 78 | 79 | if (not self._predict 80 | and factors is not None 81 | and not factors.index.equals(data.index)): 82 | raise ValueError('Factors must have the same index as data.') 83 | 84 | if (not self._predict 85 | and gains_factors is not None 86 | and not gains_factors.index.equals(data.index)): 87 | raise ValueError('Gains factors must have the same index as data.') 88 | 89 | self.model = pm.Model() 90 | with self.model: 91 | in_sample = self._build_in_sample() 92 | Bx_log_vlt, Bx_gains = self._build_splines() 93 | 94 | vlt = self._build_volatility(Bx_log_vlt) 95 | 96 | gains = self._build_gains_mu(in_sample) 97 | if self.params.pop('gains_time', False): 98 | gains_time = self._build_gains_time(Bx_gains) 99 | gains = gains + gains_time 100 | 101 | if len(gains_factors.columns) > 0 and not self._predict: 102 | factors_gains = self._build_gains_factors() 103 | gains = gains + factors_gains 104 | 105 | mu = gains * vlt 106 | if len(factors.columns) > 0 and not self._predict: 107 | factors_mu = self._build_returns_factors() 108 | mu = mu + factors_mu 109 | 110 | self._build_likelihood(mu, vlt, observed=data.values.T) 111 | 112 | if self.params: 113 | raise ValueError('Unused params: %s' % params.keys()) 114 | 115 | def _build_in_sample(self): 116 | data, algos = self.data, self.algos 117 | is_author_is = np.zeros(data.shape, dtype=np.int8) 118 | for i, algo in enumerate(data): 119 | isnull = pd.isnull(algos.created_at.loc[algo]) 120 | if algo not in algos.index or isnull: 121 | raise ValueError('No `created_at` value for algo %s' % algo) 122 | is_author_is[:, i] = data.index < algos.created_at.loc[algo] 123 | return is_author_is 124 | 125 | def _build_splines(self): 126 | data = self.data 127 | n, k = data.shape 128 | 129 | duration = data.index[-1] - data.index[0] 130 | 131 | # Find knot positions for gains and vlt splines 132 | n_knots_vlt = duration.days // 5 133 | time_vlt = np.linspace(0, 1, n) 134 | Bx_log_vlt = bspline_basis(n_knots_vlt, time_vlt) 135 | Bx_log_vlt = sparse.csr_matrix(Bx_log_vlt) 136 | 137 | n_knots_gains = duration.days // 10 138 | time_gains = np.linspace(0, 1, n) 139 | Bx_gains = bspline_basis(n_knots_gains, time_gains) 140 | Bx_gains = sparse.csr_matrix(Bx_gains) 141 | 142 | Bx_log_vlt = theano.sparse.as_sparse_variable(Bx_log_vlt) 143 | Bx_gains = theano.sparse.as_sparse_variable(Bx_gains) 144 | 145 | self.coords['time_raw_gains'] = list(range(n_knots_gains)) 146 | self.coords['time_raw_vlt'] = list(range(n_knots_vlt)) 147 | 148 | return Bx_log_vlt, Bx_gains 149 | 150 | def _build_log_volatility_mean(self): 151 | self.corr_type = corr_type = self.params.pop('corr_type') 152 | k = self.n_algos 153 | if corr_type == 'diag': 154 | log_vlt_mu = pm.Normal('log_vlt_mu', mu=-6, sd=0.5, shape=k) 155 | elif corr_type == 'dense': 156 | vlt_mu_dist = pm.Lognormal.dist(mu=-2, sd=0.5, shape=k) 157 | chol_cov_packed = pm.LKJCholeskyCov( 158 | 'chol_cov_packed_mu', n=k, eta=2, sd_dist=vlt_mu_dist) 159 | chol_cov = pm.expand_packed_triangular(k, chol_cov_packed) / np.exp(4) 160 | cov = tt.dot(chol_cov, chol_cov.T) 161 | variance_mu = tt.diag(cov) 162 | corr = cov / tt.sqrt(variance_mu[:, None] * variance_mu[None, :]) 163 | pm.Deterministic('chol_cov_mu', chol_cov) 164 | pm.Deterministic('cov_mu', cov) 165 | pm.Deterministic('corr_mu', corr) 166 | # important, add new coordinate 167 | self.coords['algo_chol'] = pd.RangeIndex(k * (k + 1) // 2) 168 | self.coords['algo_'] = self.coords['algo'] 169 | self.dims['chol_cov_packed_mu'] = ('algo_chol',) 170 | self.dims['cov_mu'] = ('algo', 'algo_') 171 | self.dims['corr_mu'] = ('algo', 'algo_') 172 | self.dims['chol_cov_mu'] = ('algo', 'algo_') 173 | log_vlt_mu = pm.Deterministic('log_vlt_mu', tt.log(variance_mu) / 2.) 174 | else: 175 | raise NotImplementedError 176 | self.dims['log_vlt_mu'] = ('algo',) 177 | return log_vlt_mu 178 | 179 | def _build_log_volatility_time(self): 180 | k = self.n_algos 181 | n_knots_vlt = len(self.coords['time_raw_vlt']) 182 | 183 | log_vlt_time_alpha = pm.HalfNormal('log_vlt_time_alpha', sd=0.1) 184 | log_vlt_time_sd = pm.HalfNormal('log_vlt_time_sd', sd=0.5, shape=k) 185 | self.dims['log_vlt_time_sd'] = ('algo',) 186 | log_vlt_time_raw = GPExponential( 187 | 'log_vlt_time_raw', mu=0, alpha=log_vlt_time_alpha, 188 | sigma=1, shape=(k, n_knots_vlt)) 189 | self.dims['log_vlt_time_raw'] = ('algo', 'time_raw_vlt') 190 | return log_vlt_time_sd[:, None] * log_vlt_time_raw 191 | 192 | def _build_volatility(self, Bx_log_vlt): 193 | log_vlt_mu = self._build_log_volatility_mean() 194 | log_vlt_time_raw = self._build_log_volatility_time() 195 | log_vlt_time = sparse_dot(Bx_log_vlt, log_vlt_time_raw.T).T 196 | log_vlt = log_vlt_time + log_vlt_mu[:, None] 197 | pm.Deterministic('log_vlt_time', log_vlt_time) 198 | pm.Deterministic('log_vlt', log_vlt) 199 | self.dims['log_vlt_time'] = ('algo', 'time') 200 | self.dims['log_vlt'] = ('algo', 'time') 201 | vlt = tt.exp(log_vlt) 202 | return vlt 203 | 204 | def _build_gains_mu(self, is_author_is): 205 | self.dims.update({ 206 | 'gains_theta': ('algo',), 207 | 'gains_eta': ('algo',), 208 | 'author_is': ('algo',), 209 | 'author_is_raw': ('algo',), 210 | 'gains': ('algo',), 211 | 'gains_raw': ('algo',), 212 | }) 213 | shrinkage = self.params.pop('shrinkage') 214 | k = self.n_algos 215 | 216 | # Define shrinkage model on the long-term gains 217 | if shrinkage == 'exponential-mix': 218 | gains_sd = pm.HalfNormal('gains_sd', sd=0.2) 219 | pm.Deterministic('log_gains_sd', tt.log(gains_sd)) 220 | gains_theta = pm.Exponential('gains_theta', lam=1, shape=k) 221 | gains_eta = pm.Normal('gains_eta', shape=k) 222 | 223 | author_is = pm.Normal('author_is', shape=k) 224 | gains = gains_sd * gains_theta * gains_eta 225 | gains = pm.Deterministic('gains', gains) 226 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 227 | elif shrinkage == 'exponential': 228 | gains_sd = pm.HalfNormal('gains_sd', sd=0.1) 229 | pm.Deterministic('log_gains_sd', tt.log(gains_sd)) 230 | gains_raw = pm.Laplace('gains_raw', mu=0, b=1, shape=k) 231 | 232 | author_is = pm.Normal('author_is', shape=k) 233 | gains = pm.Deterministic('gains', gains_sd * gains_raw) 234 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 235 | elif shrinkage == 'student': 236 | gains_sd = pm.HalfNormal('gains_sd', sd=0.2) 237 | pm.Deterministic('log_gains_sd', tt.log(gains_sd)) 238 | gains_raw = pm.StudentT('gains_raw', nu=4, mu=0, sd=1, shape=k) 239 | 240 | author_is = pm.Normal('author_is', shape=k) 241 | gains = pm.Deterministic('gains', gains_sd * gains_raw) 242 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 243 | elif shrinkage == 'normal': 244 | gains_sd = pm.HalfNormal('gains_sd', sd=0.1) 245 | pm.Deterministic('log_gains_sd', tt.log(gains_sd)) 246 | gains_mu = pm.Normal('gains_mu', mu=0.05, sd=0.1) 247 | gains_raw = pm.Normal('gains_raw', shape=k) 248 | 249 | author_is = pm.HalfNormal('author_is', shape=k, sd=0.1) 250 | gains = pm.Deterministic('gains', gains_sd * gains_raw + gains_mu) 251 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 252 | elif shrinkage == 'skew-neg2-normal': 253 | gains_sd = pm.HalfNormal('gains_sd', sd=0.1) 254 | pm.Deterministic('log_gains_sd', tt.log(gains_sd)) 255 | gains_mu = pm.Normal('gains_mu', sd=0.1) 256 | gains_raw = pm.SkewNormal( 257 | 'gains_raw', sd=1, mu=0, alpha=-4, shape=k) 258 | 259 | author_is = pm.Normal('author_is', shape=k, sd=0.4, mu=0.0) 260 | gains = pm.Deterministic('gains', gains_sd * gains_raw + gains_mu) 261 | gains_all = ( 262 | (1 - is_author_is) * gains[None, :] 263 | + author_is[None, :] * is_author_is 264 | ) 265 | elif shrinkage == 'skew-normal': 266 | gains_sd = pm.HalfNormal('gains_sd', sd=0.1) 267 | pm.Deterministic('log_gains_sd', tt.log(gains_sd)) 268 | gains_alpha = pm.Normal('gains_alpha', sd=0.3) 269 | gains_mu = pm.Normal('gains_mu', mu=0.05, sd=0.1) 270 | gains_raw = pm.SkewNormal( 271 | 'gains_raw', sd=1, mu=0, alpha=gains_alpha, shape=k) 272 | 273 | author_is = pm.Normal('author_is', shape=k) 274 | gains = pm.Deterministic('gains', gains_sd * gains_raw + gains_mu) 275 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 276 | 277 | elif shrinkage == 'trace-exponential': 278 | mu = self.params.pop('log_gains_sd_trace_mu') 279 | sd = self.params.pop('log_gains_sd_trace_sd') 280 | log_gains_sd = pm.Normal('log_gains_sd', mu=mu, sd=sd) 281 | gains_sd = pm.Deterministic('gains_sd', tt.exp(log_gains_sd)) 282 | gains_raw = pm.Laplace('gains_raw', mu=0, b=1, shape=k) 283 | author_is = pm.Normal('author_is', shape=k) 284 | gains = pm.Deterministic('gains', gains_sd * gains_raw) 285 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 286 | elif shrinkage == 'trace-normal': 287 | mu = self.params.pop('log_gains_sd_trace_mu') 288 | sd = self.params.pop('log_gains_sd_trace_sd') 289 | log_gains_sd = pm.Normal('log_gains_sd', mu=mu, sd=sd) 290 | gains_sd = pm.Deterministic('gains_sd', tt.exp(log_gains_sd)) 291 | gains_raw = pm.Normal('gains_raw', shape=k) 292 | author_is = pm.Normal('author_is', shape=k) 293 | gains = pm.Deterministic('gains', gains_sd * gains_raw) 294 | gains_all = gains[None, :] + author_is[None, :] * is_author_is 295 | else: 296 | raise ValueError('Unknown gains model: %s' % shrinkage) 297 | 298 | return gains_all.T 299 | 300 | def _build_gains_time(self, Bx_gains): 301 | self.dims.update({ 302 | 'gains_time_sd_raw': ('algo',), 303 | 'gains_time_sd': ('algo',), 304 | 'gains_time_raw': ('algo', 'time_raw_gains'), 305 | 'gains_time': ('algo', 'time'), 306 | }) 307 | k = self.n_algos 308 | n_knots_gains = len(self.coords['time_raw_gains']) 309 | 310 | gains_time_alpha = pm.HalfNormal('gains_time_alpha', sd=0.1) 311 | if 'log_gains_time_sd_sd_trace_mu' in self.params: 312 | mu = self.params.pop('log_gains_time_sd_sd_trace_mu') 313 | sd = self.params.pop('log_gains_time_sd_sd_trace_sd') 314 | log_gains_time_sd_sd = pm.Normal( 315 | 'log_gains_time_sd_sd', mu=mu, sd=sd) 316 | gains_time_sd_sd = pm.Deterministic( 317 | 'gains_time_sd_sd', tt.exp(log_gains_time_sd_sd)) 318 | else: 319 | gains_time_sd_sd = pm.HalfStudentT( 320 | 'gains_time_sd_sd', nu=3, sd=0.1) 321 | pm.Deterministic('log_gains_time_sd_sd', tt.log(gains_time_sd_sd)) 322 | gains_time_sd_raw = pm.HalfNormal('gains_time_sd_raw', shape=k) 323 | gains_time_sd = pm.Deterministic( 324 | 'gains_time_sd', gains_time_sd_sd * gains_time_sd_raw) 325 | gains_time_raw = GPExponential( 326 | 'gains_time_raw', mu=0, alpha=gains_time_alpha, 327 | sigma=1, shape=(k, n_knots_gains)) 328 | gains_time = gains_time_sd[:, None] * gains_time_raw 329 | gains_time = sparse_dot(Bx_gains, gains_time.T).T 330 | 331 | pm.Deterministic('gains_time', gains_time) 332 | return gains_time 333 | 334 | def _build_likelihood(self, mu, sd, observed): 335 | corr_type = self.corr_type 336 | if corr_type == 'diag': 337 | NormalNonZero('y', mu=mu, sd=sd, observed=observed) 338 | elif corr_type == 'dense': 339 | # mu, sd --`shape`-- (algo, time) 340 | # mv needs (time, algo) 341 | ScaledSdMvNormalNonZero( 342 | 'y', mu=mu.T, 343 | chol=self.model.named_vars['chol_cov_mu'], 344 | scale_sd=tt.exp(self.model.named_vars['log_vlt_time'].T), 345 | observed=observed.T) 346 | else: 347 | raise NotImplementedError 348 | if self._predict: 349 | self.dims['mu'] = ('algo', 'time') 350 | pm.Deterministic('mu', mu) 351 | if self._predict: 352 | self.dims['vlt'] = ('algo', 'time') 353 | pm.Deterministic('vlt', sd) 354 | 355 | def _build_gains_factors(self): 356 | self.dims.update({ 357 | 'gains_factor_algo': ('gains_factor', 'algo'), 358 | 'gains_factor_algo_raw': ('gains_factor', 'algo'), 359 | 'gains_factor_algo_sd': ('gains_factor',), 360 | }) 361 | gains_factors = self.gains_factors 362 | n_algos, n_gains_factors = self.n_algos, self.n_gains_factors 363 | sd = pm.HalfNormal('gains_factor_algo_sd', sd=0.4, 364 | shape=n_gains_factors) 365 | raw = pm.StudentT('gains_factor_algo_raw', nu=7, mu=0, sd=1, 366 | shape=(n_gains_factors, n_algos)) 367 | vals = sd[:, None] * raw 368 | pm.Deterministic('gains_factor_algo', vals) 369 | return (vals[:, None, :] * gains_factors.values.T[:, :, None]).sum(0).T 370 | 371 | def _build_returns_factors(self): 372 | self.dims.update({ 373 | 'factor_algo': ('factor', 'algo'), 374 | }) 375 | factors = self.factors 376 | n_algos, n_factors = self.n_algos, self.n_factors 377 | factor_algo = pm.StudentT('factor_algo', nu=3, mu=0, sd=2, 378 | shape=(n_factors, n_algos)) 379 | return (factor_algo[:, None, :] 380 | * factors.values.T[:, :, None]).sum(0).T 381 | 382 | def make_predict_function(self, factor_scale_halflife=None): 383 | if not self._predict: 384 | raise ValueError('Model was not built for predictions.') 385 | 386 | if factor_scale_halflife is not None: 387 | factor_scales = (self.factors 388 | .ewm(halflife=factor_scale_halflife) 389 | .std() 390 | .iloc[-1]) 391 | 392 | n_gains = len(self.coords['time_raw_gains']) 393 | n_vlt = len(self.coords['time_raw_vlt']) 394 | n_algos = self.n_algos 395 | resample_vars = { 396 | 'log_vlt_time_raw': lambda: np.random.randn(n_algos, n_vlt), 397 | 'gains_time_raw': lambda: np.random.randn(n_algos, n_gains), 398 | } 399 | 400 | if self.corr_type == 'diag': 401 | compute_vars = ['mu', 'vlt'] 402 | elif self.corr_type == 'dense': 403 | compute_vars = ['mu', 'log_vlt_time', 'chol_cov_mu'] 404 | else: 405 | raise NotImplementedError('Unkown correlation type.') 406 | 407 | delete_vars = ['gains_time', 'log_vlt', 'mu', 'vlt', 'log_vlt_time'] 408 | input_vars = [var.name for var in self.model.unobserved_RVs 409 | if (not var.name.endswith('_') 410 | and var.name not in delete_vars)] 411 | outputs = [getattr(self.model, var) for var in compute_vars] 412 | inputs = [getattr(self.model, var) for var in input_vars] 413 | # downcast inputs if needed 414 | vals_func = theano.function(inputs, outputs, on_unused_input='ignore', 415 | allow_input_downcast=True) 416 | 417 | algos = self.coords['algo'] 418 | factors = self.coords['factor'] 419 | time = self.coords['time'] 420 | 421 | def predict(point): 422 | if factor_scale_halflife is not None: 423 | factor_exposures = point['factor_algo'] 424 | 425 | for var, draw in resample_vars.items(): 426 | point[var] = draw() 427 | point = {var: point[var] for var in input_vars} 428 | 429 | if self.corr_type == 'diag': 430 | mu, sd = vals_func(**point) 431 | returns = stats.norm(loc=mu, scale=sd).rvs() 432 | elif self.corr_type == 'dense': 433 | mu, log_vlt_time, chol = vals_func(**point) 434 | returns = np.random.randn(len(algos), len(time)) 435 | returns = np.dot(chol, returns) 436 | returns[...] *= np.exp(log_vlt_time) 437 | returns[...] += mu 438 | 439 | if factor_scale_halflife is not None and len(factor_scales) > 0: 440 | factor_rets = np.random.randn(len(factor_scales), len(time)) 441 | factor_rets = factor_rets * factor_scales[:, None] 442 | factor_rets = factor_rets[None, :, :] * factor_exposures.T[:, :, None] 443 | factor_rets = factor_rets.sum(1) 444 | returns[...] += factor_rets 445 | exposures = xr.DataArray(factor_exposures, coords=[factors, algos]) 446 | else: 447 | exposures = None 448 | 449 | returns = xr.DataArray(returns, coords=[algos, time]) 450 | 451 | return xr.Dataset({'returns': returns, 'exposures': exposures}) 452 | 453 | return predict 454 | 455 | 456 | class ReturnsModelResult(BayesAlphaResult): 457 | def plot_prob(self, 458 | algos=None, 459 | ax=None, 460 | sort=True, 461 | rope=False, 462 | rope_upper=.05, 463 | rope_lower=None, 464 | title=None): 465 | if rope: 466 | prob_func = partial(self.gains_rope, rope_upper, lower=rope_lower) 467 | else: 468 | prob_func = self.gains_pos_prob 469 | 470 | if algos is not None: 471 | vals = prob_func().loc[algos] 472 | else: 473 | vals = prob_func() 474 | 475 | xlabel = 'P(gains ~ 0)' if rope else 'P(gains > 0)' 476 | 477 | ax = bayesalpha.plotting.plot_horizontal_dots( 478 | vals, 479 | sort=sort, 480 | ax=ax, 481 | xlabel=xlabel, 482 | title=title 483 | ) 484 | 485 | return ax 486 | 487 | def plot_corr(self, algos=None, corr_threshold=.33, 488 | ax=None, cmap=None, **heatmap_kwargs): 489 | corr = self.trace['corr_mu'] 490 | if algos is not None: 491 | corr = corr.loc[dict(algo=algos, algo_=algos)] 492 | return bayesalpha.plotting.plot_correlations( 493 | corr_xarray=corr, ax=ax, corr_threshold=corr_threshold, 494 | cmap=cmap, **heatmap_kwargs) 495 | 496 | def gains_pos_prob(self): 497 | return ( 498 | (self.trace['gains'] > 0) 499 | .mean(['sample', 'chain']) 500 | .to_series() 501 | .rename('gains_pos_prob')) 502 | 503 | def gains_rope(self, upper, lower=None): 504 | if lower is None: 505 | lower = -upper 506 | return ( 507 | ((self.trace['gains'] > lower) & (self.trace['gains'] < upper)) 508 | .mean(['sample', 'chain']) 509 | .to_series() 510 | .rename('gains_rope')) 511 | 512 | def _points(self, include_transformed=True): 513 | for chain in self.trace.chain: 514 | for sample in self.trace.sample: 515 | vals = self.trace.sel(chain=chain, sample=sample) 516 | data = {} 517 | for var in self.trace.data_vars: 518 | if not include_transformed and var.startswith('_'): 519 | continue 520 | data[var] = vals[var].values 521 | yield (chain, sample), data 522 | 523 | def _random_point_iter(self, include_transformed=True): 524 | while True: 525 | chain = np.random.randint(len(self.trace.chain)) 526 | sample = np.random.randint(len(self.trace.sample)) 527 | data = {} 528 | vals = self.trace.isel(chain=chain, sample=sample) 529 | for var in self.trace.data_vars: 530 | if not include_transformed and var.startswith('_'): 531 | continue 532 | data[var] = vals[var].values 533 | yield data 534 | 535 | def rebuild_model(self, data=None, algos=None, factors=None, 536 | gains_factors=None, **extra_params): 537 | """Return a ReturnsModelBuilder that recreates the original model.""" 538 | if data is None: 539 | data = self.trace._data.to_pandas().copy() 540 | if algos is None: 541 | algos = self.trace._algos.to_pandas().copy() 542 | if factors is None: 543 | factors = self.trace._factors.to_pandas().copy() 544 | if gains_factors is None: 545 | gains_factors = self.trace._gains_factors.to_pandas().copy() 546 | params = self.params.copy() 547 | params.update(extra_params) 548 | return ReturnsModelBuilder(data, algos, factors=factors, 549 | gains_factors=gains_factors, **params) 550 | 551 | def _make_prediction_model(self, n_days): 552 | start = pd.Timestamp(self.trace.time[-1].values) 553 | index = pd.date_range(start, periods=n_days, freq='B', name='time') 554 | columns = self.trace.algo 555 | 556 | data = pd.DataFrame(index=index, columns=columns) 557 | data.values[...] = 0. 558 | algos = self.trace.algo.to_pandas().copy().to_frame() 559 | algos['created_at'] = start 560 | return self.rebuild_model(data, algos, predict=True) 561 | 562 | def predict(self, n_days, n_repl=None, factor_scale_halflife=None): 563 | model = self._make_prediction_model(n_days) 564 | predict_func = model.make_predict_function(factor_scale_halflife) 565 | coords = [self.trace.chain, self.trace.sample, 566 | self.trace.algo, model.coords['time']] 567 | coords_exposures = [self.trace.chain, self.trace.sample, 568 | self.trace.factor, self.trace.algo] 569 | if n_repl is not None: 570 | repl_coord = pd.RangeIndex(n_repl, name='sim_repl') 571 | coords.append(repl_coord) 572 | coords_exposures.append(repl_coord) 573 | shape = [len(vals) for vals in coords] 574 | 575 | returns_data = np.zeros(shape) 576 | exposure_data = np.zeros([len(v) for v in coords_exposures]) 577 | returns = xr.DataArray(returns_data, coords=coords) 578 | exposures = xr.DataArray(exposure_data, coords=coords_exposures) 579 | for (chain, sample), point in self._points(include_transformed=False): 580 | if n_repl is None: 581 | prediction = predict_func(point) 582 | returns.loc[chain, sample, :, :] = prediction.returns 583 | exposures.loc[chain, sample, :, :] = prediction.exposures 584 | else: 585 | for repl in repl_coord: 586 | prediction = predict_func(point) 587 | returns.loc[chain, sample, :, :, repl] = prediction.returns 588 | exposures.loc[chain, sample, :, :, repl] = prediction.exposures 589 | return xr.Dataset({'returns': returns, 'exposures': exposures}) 590 | 591 | def predict_value(self, n_days, n_repl=None, factor_scale_halflife=None): 592 | model = self._make_prediction_model(n_days) 593 | predict_func = model.make_predict_function(factor_scale_halflife) 594 | coords = [self.trace.chain, self.trace.sample, self.trace.algo] 595 | if n_repl is not None: 596 | repl_coord = pd.RangeIndex(n_repl, name='sim_repl') 597 | coords.append(repl_coord) 598 | shape = [len(vals) for vals in coords] 599 | 600 | prediction_data = np.zeros(shape) 601 | predictions = xr.DataArray(prediction_data, coords=coords) 602 | for (chain, sample), point in self._points(include_transformed=False): 603 | if n_repl is None: 604 | returns = predict_func(point).to_pandas().T 605 | cum_returns = returns.apply(empyrical.cum_returns_final) 606 | predictions.loc[chain, sample, :] = cum_returns 607 | else: 608 | for repl in repl_coord: 609 | returns = predict_func(point).to_pandas().T 610 | cum_returns = returns.apply(empyrical.cum_returns_final) 611 | predictions.loc[chain, sample, :, repl] = cum_returns 612 | return predictions 613 | 614 | def prediction_iter(self, n_days): 615 | model = self._make_prediction_model(n_days) 616 | predict_func = model.make_predict_function() 617 | for point in self._random_point_iter(include_transformed=False): 618 | yield predict_func(point) 619 | 620 | 621 | class Optimizer(object): 622 | def __init__(self, predictions, utility='isoelastic', lmda=None, 623 | factor_penalty=None, max_weights=None, exposure_limit=None, 624 | exposure_penalty=None): 625 | """Compute a portfolio based on model predictions. 626 | 627 | Parameters 628 | ---------- 629 | predictions : xr.Dataset 630 | Predictions as returned by fit.predict_value 631 | utility : ['isoelastic', 'exp'], default='isoelastic' 632 | The utility function to use. 633 | lmda : float 634 | Risk aversion parameter. This value can be overridden 635 | by passing a different value to `solve`. 636 | factor_penalty : float 637 | Add a penalty during the optimization for portfolios that have 638 | exposure to risk factors. This uses the estimates of risk exposure 639 | from the regression in bayesalpha. High values mean that we are 640 | willing to take hits on the predicted portfolio in order to 641 | decrease risk exposure. 642 | max_weights : list 643 | A maximum weight for each algo. 644 | exposure_limit : float 645 | A hard limit for risk exposures of each weighted algo in the 646 | portfolio. This uses the position based risk exposure passed in as 647 | `predictions.position_exposures`, and limits the maximum risk 648 | exposure of each algo over that time period. 649 | exposure_penalty : float 650 | This also uses the position based exposure, but adds a quadratic 651 | penalty term during optimization instead of a hard limit. 652 | """ 653 | if cvxpy is None: 654 | raise RuntimeError('Optimization requires cvxpy>=1.0') 655 | self._predictions = predictions 656 | self._problem = self._build_problem(lmda, utility, factor_penalty, 657 | exposure_limit, exposure_penalty) 658 | if max_weights is None: 659 | max_weights = [1] * len(predictions.algo) 660 | self._max_weights = max_weights 661 | 662 | def _build_problem(self, lmda_vals, utility, factor_penalty=None, 663 | exposure_limit=None, exposure_penalty=None): 664 | n_predict = (len(self._predictions.chain) 665 | * len(self._predictions.sample) 666 | * len(self._predictions.sim_repl)) 667 | n_algos = len(self._predictions.algo) 668 | n_factors = len(self._predictions.factor) 669 | lmda = cvxpy.Parameter(name='lambda', nonneg=True) 670 | returns = cvxpy.Parameter(shape=(n_predict, n_algos), name='returns') 671 | max_weights = cvxpy.Parameter(shape=(n_algos), name='max_weights') 672 | weights = cvxpy.Variable(shape=(n_algos,), name='weights') 673 | portfolio_returns = returns * weights 674 | if utility == 'exp': 675 | risk = cvxpy.log_sum_exp(-lmda * portfolio_returns) 676 | elif utility == 'isoelastic': 677 | risk = cvxpy.log_sum_exp(-lmda * cvxpy.log(portfolio_returns)) 678 | else: 679 | raise ValueError('Unknown utility: %s' % utility) 680 | 681 | if factor_penalty is not None: 682 | penalty = cvxpy.Parameter(shape=(), name='factor_penalty', nonneg=True) 683 | self._factor_penalty_p = penalty 684 | for i in range(n_factors): 685 | exposures = cvxpy.Parameter(shape=(n_predict, n_algos), 686 | name='exposures_%s' % i) 687 | exposures.value = self._predictions.exposures.isel(factor=i).stack( 688 | prediction=('chain', 'sample', 'sim_repl')).values.T 689 | risk_factor = cvxpy.sum_squares(exposures * weights) 690 | risk = risk + penalty * risk_factor 691 | 692 | if exposure_penalty is not None: 693 | penalty = cvxpy.Parameter(shape=(), name='exposure_penalty', nonneg=True) 694 | self._exposure_penalty_p = penalty 695 | exposure_data = self._predictions.position_exposures 696 | n_history = len(exposure_data.time_hist) 697 | exposures = cvxpy.Parameter(shape=(n_history, n_algos), 698 | name='position_exposures') 699 | exposures.value = exposure_data.values 700 | risk_factor = cvxpy.sum_squares(exposures * weights) 701 | risk = risk + penalty * risk_factor * n_predict / n_history 702 | 703 | constraints = [cvxpy.sum(weights) == 1, weights >= 0, weights <= max_weights] 704 | if exposure_limit is not None: 705 | limit = cvxpy.Parameter(name='exposure_limit', nonneg=True) 706 | self._exposure_limit = limit 707 | limit.value = exposure_limit 708 | exposures_lower = cvxpy.Parameter(shape=(n_algos,), name='exposures_lower') 709 | exposures_upper = cvxpy.Parameter(shape=(n_algos,), name='exposures_upper') 710 | exposure_data = self._predictions.position_exposures 711 | exposures_lower.value = exposure_data.sel(quantile='lower').values 712 | exposures_upper.value = exposure_data.sel(quantile='upper').values 713 | lower = cvxpy.sum(weights * exposures_lower) >= -limit 714 | upper = cvxpy.sum(weights * exposures_upper) <= limit 715 | constraints.extend([lower, upper]) 716 | 717 | problem = cvxpy.Problem(cvxpy.Minimize(risk), constraints) 718 | 719 | if lmda_vals is not None: 720 | lmda.value = lmda_vals 721 | predictions = self._predictions.cum_final.stack( 722 | prediction=('chain', 'sample', 'sim_repl')) 723 | # +1 because we want the final wealth, when we start with 724 | # a unit of money. 725 | returns.value = predictions.values.T + 1 726 | 727 | self._lmda_p = lmda 728 | self._factor_weights_p = None 729 | self._weights_v = weights 730 | self._max_weights_v = max_weights 731 | return problem 732 | 733 | def solve(self, lmda=None, factor_penalty=None, max_weights=None, 734 | exposure_limit=None, exposure_penalty=None, **kwargs): 735 | """Find the optimal weights for the portfolio.""" 736 | if lmda is not None: 737 | self._lmda_p.value = lmda 738 | if exposure_penalty is not None: 739 | self._exposure_penalty_p.value = exposure_penalty 740 | if factor_penalty is not None: 741 | self._factor_penalty_p.value = factor_penalty 742 | if exposure_limit is not None: 743 | self._exposure_limit.value = exposure_limit 744 | if max_weights is not None: 745 | self._max_weights_v.value = max_weights 746 | else: 747 | self._max_weights_v.value = np.array(self._max_weights) 748 | self._problem.solve(**kwargs) 749 | if self._problem.status != 'optimal': 750 | raise ValueError('Optimization did not converge.') 751 | weights = self._weights_v.value.ravel().copy() 752 | algos = self._predictions.algo 753 | return xr.DataArray(weights, coords=[algos], name='weights') 754 | 755 | 756 | def fit_returns_population(data, algos=None, sampler_args=None, save_data=True, 757 | seed=None, factors=None, gains_factors=None, 758 | sampler_type='mcmc', **params): 759 | """Fit the model to daily returns. 760 | 761 | Parameters 762 | ---------- 763 | data : pd.DataFrame 764 | The dataframe containing the returns. Columns are the different 765 | algos, rows the days. If an algorithm doesn't do anything on a day, 766 | that value can be NaN. 767 | algos : pd.DataFrame 768 | Dataframe containing metadata about the algorithms. It must contain 769 | a column 'created_at', with the dates when the algorithm was created. 770 | All later daily returns are interpreted as author-out-of-sample. 771 | sampler_args : dict 772 | Additional parameters for `pm.sample`. 773 | save_data : bool 774 | Whether to store the dataset in the result object. 775 | seed : int 776 | Seed for random number generation in PyMC3. 777 | """ 778 | if sampler_type not in {'mcmc', 'vi'}: 779 | raise ValueError("sampler_type not in {'mcmc', 'vi'}") 780 | _check_data(data) 781 | params_ = _PARAM_DEFAULTS.copy() 782 | params_.update(params) 783 | params = params_ 784 | 785 | if algos is None: 786 | algos = pd.DataFrame(columns=['created_at'], index=data.columns) 787 | algos['created_at'] = data.index[0] 788 | 789 | if sampler_args is None: 790 | sampler_args = {} 791 | else: 792 | sampler_args = sampler_args.copy() 793 | if seed is None: 794 | seed = int(random.getrandbits(31)) 795 | else: 796 | seed = int(seed) 797 | if 'random_seed' in sampler_args: 798 | raise ValueError('Can not specify `random_seed`.') 799 | sampler_args['random_seed'] = seed 800 | 801 | builder = ReturnsModelBuilder(data, algos, factors=factors, 802 | gains_factors=gains_factors, **params) 803 | model, coords, dims = builder.model, builder.coords, builder.dims 804 | 805 | timestamp = datetime.isoformat(datetime.now()) 806 | with model: 807 | args = {} if sampler_args is None else sampler_args 808 | with warnings.catch_warnings(record=True) as warns: 809 | if sampler_type == 'mcmc': 810 | trace = pm.sample(**args) 811 | else: 812 | trace = pm.fit(**args).sample(args.get('draws', 500)) 813 | if warns: 814 | warnings.warn('Problems during sampling. Inspect `result.warnings`.') 815 | trace = to_xarray(trace, coords, dims) 816 | trace.attrs['params'] = json.dumps(params) 817 | trace.attrs['timestamp'] = timestamp 818 | trace.attrs['warnings'] = json.dumps([str(warn) for warn in warns]) 819 | trace.attrs['seed'] = seed 820 | trace.attrs['model-version'] = get_versions()['version'] 821 | trace.attrs['model-type'] = RETURNS_MODEL_TYPE 822 | 823 | if save_data: 824 | trace.coords['algodata'] = algos.columns 825 | trace['_data'] = (('time', 'algo'), data) 826 | try: 827 | trace['_algos'] = (('algo', 'algodata'), algos.loc[data.columns]) 828 | except ValueError: 829 | warnings.warn('Could not save algo metadata, skipping.') 830 | try: 831 | trace['_factors'] = (('time', 'factor'), builder.factors) 832 | except ValueError: 833 | warnings.warn('Could not save algo metadata, skipping.') 834 | try: 835 | trace['_gains_factors'] = (('time', 'gains_factor'), 836 | builder.gains_factors) 837 | except ValueError: 838 | warnings.warn('Could not save algo metadata, skipping.') 839 | return ReturnsModelResult(trace) 840 | 841 | 842 | _TRACE_PARAM_NAMES = { 843 | 'normal': ( 844 | 'trace-normal', 845 | ['log_gains_sd', 'log_gains_time_sd_sd']), 846 | 'exponential': ( 847 | 'trace-exponential', 848 | ['log_gains_sd', 'log_gains_time_sd_sd'] 849 | ) 850 | } 851 | 852 | 853 | _DEFAULT_SHRINKAGE = { 854 | 'default': ( 855 | 'trace-exponential', 856 | { 857 | 'log_gains_sd': (-3.5, 0.2), 858 | 'log_gains_time_sd_sd': (-3, 1), 859 | } 860 | ) 861 | } 862 | 863 | 864 | def fit_returns_single(data, algos=None, population_fit=None, 865 | sampler_args=None, seed=None, factors=None, **params): 866 | """Fit the model to algorithms and use an earlier run for hyperparameters. 867 | 868 | Use a model fit with a large number of algorithms to get estimates 869 | for global parameters -- for example those that inform about the 870 | distribution of gain parameters. 871 | 872 | Parameters 873 | ---------- 874 | data : pd.DataFrame 875 | The dataframe containing the returns. Columns are the different 876 | algos, rows the days. If an algorithm doesn't do anything on a day, 877 | that value can be NaN. 878 | algos : pd.DataFrame 879 | Dataframe containing metadata about the algorithms. It must contain 880 | a column 'created_at', with the dates when the algorithm was created. 881 | All later daily returns are interpreted as author-out-of-sample. 882 | population_fit : FitResult 883 | The result of a previous model fit using `fit_returns_population`. If 884 | this is not specified, all necessary parameters have to be specified as 885 | keyword arguments. 886 | sampler_args : dict 887 | Additional arguments for `pm.sample` 888 | seed : int 889 | Seed for random numbers during sampling. 890 | """ 891 | params = params.copy() 892 | 893 | if population_fit is None: 894 | trace_shrinkage = None 895 | trace_corr = None 896 | else: 897 | trace_shrinkage = population_fit.params['shrinkage'] 898 | trace_corr = population_fit.params['corr'] 899 | 900 | shrinkage = params.pop('shrinkage', trace_shrinkage) 901 | if shrinkage is None: 902 | raise ValueError('Either `shrinkage` or `population_fit` has to be ' 903 | 'specified.') 904 | if trace_shrinkage is not None and shrinkage != trace_shrinkage: 905 | raise ValueError('Can not use different shrinkage type in population ' 906 | 'and single algo fit.') 907 | 908 | if shrinkage in _TRACE_PARAM_NAMES: 909 | shrinkage, param_names = _TRACE_PARAM_NAMES[shrinkage] 910 | elif shrinkage in _DEFAULT_SHRINKAGE: 911 | warnings.warn('The default shrinkage is only a preview. The values ' 912 | '*will* change in the future.') 913 | shrinkage, param_defaults = _DEFAULT_SHRINKAGE[shrinkage] 914 | param_names = param_defaults.keys() 915 | for name in param_defaults: 916 | mu, sd = param_defaults[name] 917 | params.setdefault(name + '_trace_mu', float(mu)) 918 | params.setdefault(name + '_trace_sd', float(sd)) 919 | else: 920 | raise ValueError('Unknown shrinkage %s' % shrinkage) 921 | 922 | for name in param_names: 923 | name_mu = name + '_trace_mu' 924 | name_sd = name + '_trace_sd' 925 | if name_mu in params and name_sd in params: 926 | continue 927 | if population_fit is None: 928 | raise ValueError('population_fit or %s and %s must be specified.' 929 | % (name_mu, name_sd)) 930 | trace_vals = population_fit.trace[name] 931 | params.setdefault(name_mu, float(trace_vals.mean())) 932 | params.setdefault(name_sd, float(trace_vals.std())) 933 | 934 | fit = fit_returns_population(data, algos=algos, sampler_args=sampler_args, 935 | seed=seed, shrinkage=shrinkage, 936 | factors=factors, **params) 937 | if population_fit is not None: 938 | parent = population_fit.trace 939 | fit.trace.attrs['parent-params'] = parent.attrs['params'] 940 | fit.trace.attrs['parent-seed'] = parent.attrs['seed'] 941 | fit.trace.attrs['parent-version'] = parent.attrs['model-version'] 942 | fit.trace.attrs['parent-type'] = RETURNS_MODEL_TYPE 943 | fit.trace.attrs['parent-id'] = population_fit.id 944 | return fit 945 | 946 | 947 | def _check_data(data): 948 | if data.count().min() < 100: 949 | warnings.warn('The dataset contains algos with fewer than 100 ' 950 | 'observations.') 951 | if not data.index.dtype_str.startswith('datetime'): 952 | raise ValueError('Index of dataset must have a datetime dtype') 953 | if (np.abs(data) > 0.2).any().any(): 954 | raise ValueError('Dataset contains unrealistically large returns.') 955 | if (~np.isfinite(data.fillna(0.))).any().any(): 956 | raise ValueError('Dataset contains inf.') 957 | -------------------------------------------------------------------------------- /bayesalpha/serialize.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | 4 | 5 | def to_xarray(trace, coords, dims): 6 | """Convert a pymc3 trace to an xarray dataset. 7 | 8 | Parameters 9 | ---------- 10 | trace : pymc3 trace 11 | coords : dict 12 | A dictionary containing the values that are used as index. The key 13 | is the name of the dimension, the values are the index values. 14 | dims : dict[str, Tuple(str)] 15 | A mapping from pymc3 variables to a tuple corresponding to 16 | the shape of the variable, where the elements of the tuples are 17 | the names of the coordinate dimensions. 18 | 19 | Example 20 | ------- 21 | :: 22 | 23 | coords = { 24 | 'subject': ['Peter', 'Hans'], 25 | 'time': [Timestamp('2017-01-20'), Timestamp('2017-01-21')], 26 | 'treatment': ['sorafenib', 'whatever'] 27 | } 28 | dims = { 29 | 'subject_mu': ('subject',), 30 | 'effect': ('treatment',), 31 | 'interaction': ('time', 'treatment'), 32 | } 33 | """ 34 | coords = coords.copy() 35 | coords['sample'] = list(range(len(trace))) 36 | coords['chain'] = list(range(trace.nchains)) 37 | 38 | coords_ = {} 39 | for key, vals in coords.items(): 40 | coords_[key] = xr.IndexVariable((key,), data=vals) 41 | coords = coords_ 42 | 43 | data = xr.Dataset(coords=coords) 44 | for key in trace.varnames: 45 | if key.endswith('_'): 46 | continue 47 | dims_str = ('chain', 'sample') 48 | if key in dims: 49 | dims_str = dims_str + dims[key] 50 | vals = trace.get_values(key, combine=False, squeeze=False) 51 | vals = np.array(vals) 52 | data[key] = xr.DataArray(vals, {v: coords[v] for v in dims_str}, dims=dims_str) 53 | 54 | return data 55 | 56 | 57 | def xarray_hash(): 58 | pass 59 | -------------------------------------------------------------------------------- /docs/bayesalpha.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quantopian/bayesalpha/676f4f194ad20211fd040d3b0c6e82969aafb87e/docs/bayesalpha.gif -------------------------------------------------------------------------------- /examples/author_model_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Author Model Demo\n", 8 | "\n", 9 | "This notebook demonstrates how to use the author model from the notebook. To demonstrate, we analyze contest author data.\n", 10 | "\n", 11 | "Theoretically, it is as simple as:\n", 12 | "\n", 13 | "```python\n", 14 | "import pandas as pd\n", 15 | "import bayesalpha as ba\n", 16 | "data = pd.read_csv('foo.csv')\n", 17 | "ba.fit_authors(data)\n", 18 | "```" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "scrolled": true 26 | }, 27 | "outputs": [ 28 | { 29 | "name": "stderr", 30 | "output_type": "stream", 31 | "text": [ 32 | "/Users/georgeho/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 33 | " from ._conv import register_converters as _register_converters\n", 34 | "/Users/georgeho/anaconda3/lib/python3.6/site-packages/empyrical/utils.py:32: UserWarning: Unable to import pandas_datareader. Suppressing import error and continuing. All data reading functionality will raise errors; but has been deprecated and will be removed in a later version.\n", 35 | " warnings.warn(msg)\n" 36 | ] 37 | } 38 | ], 39 | "source": [ 40 | "'''\n", 41 | "When importing bayesalpha, if you get this error message:\n", 42 | "\n", 43 | " WARNING (theano.configdefaults): install mkl with `conda install mkl-service`: No module named 'mkl'\n", 44 | " \n", 45 | "It means that you don't have the low-level mkl linear algebra package.\n", 46 | "This means PyMC3 will run (significantly) slower.\n", 47 | "Research team usually uses conda, since that ships with mkl.\n", 48 | "'''\n", 49 | "\n", 50 | "import pandas as pd\n", 51 | "import bayesalpha as ba" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 2, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "# The data _must_ look like this!\n", 61 | "# Column names must match too. \n", 62 | "\n", 63 | "sharpes = pd.read_csv('../tests/test_data/author_model_test_sharpes.csv', index_col=0)\n", 64 | "returns = pd.read_csv('../tests/test_data/author_model_test_returns.csv', index_col=0)" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 3, 70 | "metadata": { 71 | "scrolled": false 72 | }, 73 | "outputs": [ 74 | { 75 | "data": { 76 | "text/html": [ 77 | "
\n", 78 | "\n", 91 | "\n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | "
meta_user_idmeta_algorithm_idmeta_code_idmeta_trading_dayssharpe_ratio
0aaaaaa111aaa111_0163-1.164508
1aaaaaa111aaa111_1960.593194
2aaaaaa111aaa111_2232-1.164254
3aaaaaa111aaa111_31180.278070
4aaaaaa111aaa111_42201.041695
\n", 145 | "
" 146 | ], 147 | "text/plain": [ 148 | " meta_user_id meta_algorithm_id meta_code_id meta_trading_days sharpe_ratio\n", 149 | "0 aaa aaa111 aaa111_0 163 -1.164508\n", 150 | "1 aaa aaa111 aaa111_1 96 0.593194\n", 151 | "2 aaa aaa111 aaa111_2 232 -1.164254\n", 152 | "3 aaa aaa111 aaa111_3 118 0.278070\n", 153 | "4 aaa aaa111 aaa111_4 220 1.041695" 154 | ] 155 | }, 156 | "execution_count": 3, 157 | "metadata": {}, 158 | "output_type": "execute_result" 159 | } 160 | ], 161 | "source": [ 162 | "sharpes.head()" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 4, 168 | "metadata": { 169 | "scrolled": false 170 | }, 171 | "outputs": [ 172 | { 173 | "data": { 174 | "text/html": [ 175 | "
\n", 176 | "\n", 189 | "\n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | " \n", 264 | " \n", 265 | " \n", 266 | " \n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | " \n", 287 | " \n", 288 | " \n", 289 | " \n", 290 | " \n", 291 | " \n", 292 | " \n", 293 | " \n", 294 | " \n", 295 | " \n", 296 | " \n", 297 | " \n", 298 | " \n", 299 | " \n", 300 | " \n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | "
aaa111_0aaa111_1aaa111_2aaa111_3aaa111_4aaa111_5aaa111_6aaa111_7aaa111_8aaa111_9...ddd666_328ddd666_329ddd666_330ddd666_331ddd666_332ddd666_333ddd666_334ddd666_335ddd666_336ddd666_337
0-0.035051-0.272576-0.4741800.0641620.2929780.0234710.0259420.100582-0.1742770.126081...-0.111784-0.268968-0.0756520.189964-0.5566800.014488-0.124629-0.0484400.3817560.176951
1-0.3159090.069661-0.015446-0.101020-0.2259750.204022-0.079135-0.104554-0.347470-0.121504...-0.0083420.248996-0.101882-0.652522-0.007524-0.272381-0.0668790.0416630.039876-0.011800
2-0.037757-0.1738740.404682-0.037672-0.6422190.2742400.169435-0.0981560.479511-0.330409...0.3271300.0304260.0147590.1344760.156697-0.3138640.104577-0.1090290.296487-0.012178
3-0.058816-0.190401-0.3679550.452142-0.450456-0.102206-0.281244-0.0398530.004766-0.070515...-0.2270590.3222270.306905-0.0864980.5135510.017932-0.408598-0.098558-0.111911-0.292455
4-0.145389-0.1443610.2908430.260297-0.212217-0.093912-0.3638120.436243-0.4934360.160006...-0.298150-0.206716-0.082190-0.118802-0.201424-0.2989390.073852-0.2982090.314797-0.004759
\n", 339 | "

5 rows × 338 columns

\n", 340 | "
" 341 | ], 342 | "text/plain": [ 343 | " aaa111_0 aaa111_1 aaa111_2 aaa111_3 aaa111_4 aaa111_5 aaa111_6 \\\n", 344 | "0 -0.035051 -0.272576 -0.474180 0.064162 0.292978 0.023471 0.025942 \n", 345 | "1 -0.315909 0.069661 -0.015446 -0.101020 -0.225975 0.204022 -0.079135 \n", 346 | "2 -0.037757 -0.173874 0.404682 -0.037672 -0.642219 0.274240 0.169435 \n", 347 | "3 -0.058816 -0.190401 -0.367955 0.452142 -0.450456 -0.102206 -0.281244 \n", 348 | "4 -0.145389 -0.144361 0.290843 0.260297 -0.212217 -0.093912 -0.363812 \n", 349 | "\n", 350 | " aaa111_7 aaa111_8 aaa111_9 ... ddd666_328 ddd666_329 \\\n", 351 | "0 0.100582 -0.174277 0.126081 ... -0.111784 -0.268968 \n", 352 | "1 -0.104554 -0.347470 -0.121504 ... -0.008342 0.248996 \n", 353 | "2 -0.098156 0.479511 -0.330409 ... 0.327130 0.030426 \n", 354 | "3 -0.039853 0.004766 -0.070515 ... -0.227059 0.322227 \n", 355 | "4 0.436243 -0.493436 0.160006 ... -0.298150 -0.206716 \n", 356 | "\n", 357 | " ddd666_330 ddd666_331 ddd666_332 ddd666_333 ddd666_334 ddd666_335 \\\n", 358 | "0 -0.075652 0.189964 -0.556680 0.014488 -0.124629 -0.048440 \n", 359 | "1 -0.101882 -0.652522 -0.007524 -0.272381 -0.066879 0.041663 \n", 360 | "2 0.014759 0.134476 0.156697 -0.313864 0.104577 -0.109029 \n", 361 | "3 0.306905 -0.086498 0.513551 0.017932 -0.408598 -0.098558 \n", 362 | "4 -0.082190 -0.118802 -0.201424 -0.298939 0.073852 -0.298209 \n", 363 | "\n", 364 | " ddd666_336 ddd666_337 \n", 365 | "0 0.381756 0.176951 \n", 366 | "1 0.039876 -0.011800 \n", 367 | "2 0.296487 -0.012178 \n", 368 | "3 -0.111911 -0.292455 \n", 369 | "4 0.314797 -0.004759 \n", 370 | "\n", 371 | "[5 rows x 338 columns]" 372 | ] 373 | }, 374 | "execution_count": 4, 375 | "metadata": {}, 376 | "output_type": "execute_result" 377 | } 378 | ], 379 | "source": [ 380 | "returns.head()" 381 | ] 382 | }, 383 | { 384 | "cell_type": "code", 385 | "execution_count": 5, 386 | "metadata": {}, 387 | "outputs": [ 388 | { 389 | "name": "stdout", 390 | "output_type": "stream", 391 | "text": [ 392 | "# authors:\t4\n", 393 | "# algos:\t15\n", 394 | "# backtests:\t338\n" 395 | ] 396 | } 397 | ], 398 | "source": [ 399 | "# Get some idea of how big our data set is\n", 400 | "num_authors = sharpes.meta_user_id.nunique()\n", 401 | "num_algos = sharpes.meta_algorithm_id.nunique()\n", 402 | "num_backtests = sharpes.meta_code_id.nunique()\n", 403 | "\n", 404 | "print('# authors:\\t{}'.format(num_authors),\n", 405 | " '# algos:\\t{}'.format(num_algos),\n", 406 | " '# backtests:\\t{}'.format(num_backtests),\n", 407 | " sep='\\n')" 408 | ] 409 | }, 410 | { 411 | "cell_type": "code", 412 | "execution_count": 6, 413 | "metadata": { 414 | "scrolled": false 415 | }, 416 | "outputs": [ 417 | { 418 | "name": "stderr", 419 | "output_type": "stream", 420 | "text": [ 421 | "/Users/georgeho/Desktop/bayesalpha/bayesalpha/author_model.py:288: UserWarning: Data set contains users with fewer than 5 algorithms.\n", 422 | " warnings.warn('Data set contains users with fewer than 5 algorithms.')\n", 423 | "Only 1 samples in chain.\n", 424 | "Auto-assigning NUTS sampler...\n", 425 | "Initializing NUTS using jitter+adapt_diag...\n", 426 | "Multiprocess sampling (2 chains in 2 jobs)\n", 427 | "NUTS: [mu_algo_raw, mu_algo_sd_log__, mu_author_raw, mu_author_sd_log__, mu_global]\n", 428 | "100%|██████████| 2/2 [00:01<00:00, 1.93it/s]\n", 429 | "The chain contains only diverging samples. The model is probably misspecified.\n", 430 | "The acceptance probability does not match the target. It is 5.250929150656001e-118, but should be close to 0.9. Try to increase the number of tuning steps.\n", 431 | "The acceptance probability does not match the target. It is 0.0, but should be close to 0.9. Try to increase the number of tuning steps.\n", 432 | "/Users/georgeho/Desktop/bayesalpha/bayesalpha/author_model.py:238: UserWarning: Problems during sampling. Inspect `result.warnings`.\n", 433 | " warnings.warn('Problems during sampling. Inspect `result.warnings`.')\n" 434 | ] 435 | } 436 | ], 437 | "source": [ 438 | "'''\n", 439 | "Try the default `sampler_args` and if necessary, change `sampler_args` to fine-tune the MCMC sampler.\n", 440 | "Talk to a Bayesian if you need help.\n", 441 | "\n", 442 | "Sampling usually takes a while.\n", 443 | "For reference: on QUACS, ingesting 30 authors, 900 algos, 40000 backtests,\n", 444 | "with PyMC3 running 4 chains in 4 jobs, takes around 15 minutes.\n", 445 | "'''\n", 446 | "\n", 447 | "trace = ba.fit_authors(sharpes,\n", 448 | " returns,\n", 449 | " sampler_args={\n", 450 | " # Setting 1 draw and 1 tune... this is an example, right?\n", 451 | " 'draws': 1,\n", 452 | " 'tune': 1,\n", 453 | " 'nuts_kwargs': {'target_accept': 0.90}\n", 454 | " },\n", 455 | " save_data=False \n", 456 | " )" 457 | ] 458 | }, 459 | { 460 | "cell_type": "code", 461 | "execution_count": 7, 462 | "metadata": {}, 463 | "outputs": [], 464 | "source": [ 465 | "# Save the resulting trace object as a netcdf file.\n", 466 | "trace.save('example.nc')" 467 | ] 468 | }, 469 | { 470 | "cell_type": "code", 471 | "execution_count": null, 472 | "metadata": {}, 473 | "outputs": [], 474 | "source": [] 475 | } 476 | ], 477 | "metadata": { 478 | "kernelspec": { 479 | "display_name": "Python 3", 480 | "language": "python", 481 | "name": "python3" 482 | }, 483 | "language_info": { 484 | "codemirror_mode": { 485 | "name": "ipython", 486 | "version": 3 487 | }, 488 | "file_extension": ".py", 489 | "mimetype": "text/x-python", 490 | "name": "python", 491 | "nbconvert_exporter": "python", 492 | "pygments_lexer": "ipython3", 493 | "version": "3.6.4" 494 | } 495 | }, 496 | "nbformat": 4, 497 | "nbformat_minor": 2 498 | } 499 | -------------------------------------------------------------------------------- /examples/factor_gains.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pymc3 as pm\n", 11 | "import bayesalpha as ba\n", 12 | "import pandas as pd\n", 13 | "import seaborn as sns\n", 14 | "import matplotlib.pyplot as plt" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 21, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "n_time = 600\n", 24 | "n_algos = 10\n", 25 | "\n", 26 | "returns = np.random.randn(n_time, n_algos) / 1e3\n", 27 | "signal = (np.random.randn(n_time, 2) > 0) * 2 - 1" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 22, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "name": "stderr", 37 | "output_type": "stream", 38 | "text": [ 39 | "Auto-assigning NUTS sampler...\n", 40 | "Initializing NUTS using jitter+adapt_diag...\n", 41 | "Multiprocess sampling (2 chains in 2 jobs)\n", 42 | "NUTS: [gains_factor_algo_raw, gains_factor_algo_sd_log__, gains_time_raw, gains_time_sd_raw_log__, gains_time_sd_sd_log__, gains_time_alpha_log__, author_is, gains_raw, gains_mu, gains_sd_log__, log_vlt_time_raw, log_vlt_time_sd_log__, log_vlt_time_alpha_log__, log_vlt_mu]\n", 43 | "100%|██████████| 1000/1000 [01:00<00:00, 16.54it/s]\n", 44 | "The estimated number of effective samples is smaller than 200 for some parameters.\n" 45 | ] 46 | } 47 | ], 48 | "source": [ 49 | "returns = pd.DataFrame(returns)\n", 50 | "# returns.colums = ['algo1', 'alg']....\n", 51 | "returns.index = pd.DatetimeIndex(\n", 52 | " pd.date_range('2018-01-01', periods=n_time, freq='1B'))\n", 53 | "signal = pd.DataFrame(signal)\n", 54 | "signal.columns = ['VIX', 'HMM']\n", 55 | "signal.index = returns.index\n", 56 | "\n", 57 | "fit = ba.fit_population(returns, gains_factors=signal)" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 29, 63 | "metadata": {}, 64 | "outputs": [ 65 | { 66 | "name": "stderr", 67 | "output_type": "stream", 68 | "text": [ 69 | "/Users/adrianseyboldt/anaconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py:6462: UserWarning: The 'normed' kwarg is deprecated, and has been replaced by the 'density' kwarg.\n", 70 | " warnings.warn(\"The 'normed' kwarg is deprecated, and has been \"\n" 71 | ] 72 | }, 73 | { 74 | "data": { 75 | "text/plain": [ 76 | "" 77 | ] 78 | }, 79 | "execution_count": 29, 80 | "metadata": {}, 81 | "output_type": "execute_result" 82 | }, 83 | { 84 | "data": { 85 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD8CAYAAACSCdTiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xt0XGd97vHvbzQajS6juyzJlh35RmInxE5q4oSESxMChFtyCqWQwgklNC0thR7aU6CctQotaxVoy6XtWaUp0LpACAmUkwTSEGISyNWJEztxEtvxXZItS7Lut5E0mvf8MXuMougy0sxotkfPZy2tmdmzZ/ZPo5lnXr373e825xwiIpKfArkuQEREskchLyKSxxTyIiJ5TCEvIpLHFPIiInlMIS8ikscU8iIieUwhLyKSxxTyIiJ5LLiUG6utrXXNzc1LuUkRkXPe008/fcY5V7eYxy5pyDc3N7N79+6l3KSIyDnPzE4s9rHqrhERyWMphbyZVZrZD83sgJntN7MrzKzazH5uZoe8y6psFysiIguTakv+68B9zrkLgC3AfuDTwE7n3EZgp3dbRER8ZN6QN7Ny4PXAtwCcc+POuT7gemCHt9oO4IZsFSkiIouTSkt+HdAF/LuZ7TGzb5pZKVDvnGsH8C5XZLFOERFZhFRCPghcCvyLc+4SYJgFdM2Y2S1mttvMdnd1dS2yTBERWYxUQr4NaHPO7fJu/5BE6HeYWSOAd9k504Odc7c657Y557bV1S1qmKeIiCzSvCHvnDsNtJrZ+d6ia4AXgbuBm7xlNwF3ZaVCERFZtFQPhvoT4HtmFgKOAr9H4gviDjO7GWgBfjs7JYqIyGKlFPLOub3Athnuuiaz5Yj4y227Wl6x7Mbta3JQicji6IhXEZE8ppAXEcljCnkRkTymkBcRyWMKeRGRPKaQFxHJYwp5EZE8ppAXEcljCnkRkTymkBcRyWMKeRGRPKaQFxHJYwp5EZE8ppAXEcljCnkRkTymkBcRyWMKeRGRPKaQFxHJYwp5EZE8ppAXEcljCnkRkTymkBcRyWMKeRGRPKaQFxHJYwp5EZE8ppAXEcljwVRWMrPjwCAwCcScc9vMrBr4AdAMHAfe65zrzU6ZIiKyGAtpyf+mc26rc26bd/vTwE7n3EZgp3dbRER8JJ3umuuBHd71HcAN6Zcj4m/3Pd/O5+5+IddliKQs1ZB3wP1m9rSZ3eItq3fOtQN4lyuyUaCIX8SdY/eJXr77xAl6hsdzXY5ISlIN+Sudc5cC1wF/bGavT3UDZnaLme02s91dXV2LKlLED7oGxxgZnyQWd9zz7KlclyOSkpR2vDrnTnmXnWb2Y+AyoMPMGp1z7WbWCHTO8thbgVsBtm3b5jJTtsjSO949DEB5OMi/PXyUwoKXt5Fu3L4mF2WJzGnelryZlZpZJHkdeDPwPHA3cJO32k3AXdkqUsQPjp0ZJhIOctWGWtp6R+kcjOa6JJF5pdJdUw88YmbPAk8CP3XO3Qd8EbjWzA4B13q3RfKSc47jZ4Zprilly+pKDNjb0pfrskTmNW93jXPuKLBlhuXdwDXZKErEb3pHJhiIxmiuLSUSLmRjfRl7W/t40+Z6Ama5Lk9kVjriVSQFyf74tTWlALx6VSV9oxN0DozlsiyReSnkRVJw/MwwxYUFrCgvAmBFJHHZO6KhlOJvCnmRFBzvHua8mpKzXTOVJYUA9CnkxecU8iLzGBqLcWZonGavqwagrChIMGD0jkzksDKR+SnkRebRM5Tod6/3umoAzIzKkkK15MX3FPIi8+iPxgCoKA69bHlVSYi+UbXkxd8U8iLz6Pda6xXFhS9bXllSSK/msBGfU8iLzKN/dIJQQYBw4cs/LlUlIYbHJxmPxXNUmcj8FPIi8+gfnaC8uBCbdtDT2RE2o2rNi38p5EXm0T86QUXxKw8Or/T66Ps0wkZ8TCEvMo9EyIdesbyqNLFMB0SJnynkReYwGXcMRmMztuQj4SABU0te/E0hLzKHobEYjlcOnwQImFFRXKiWvPiaQl5kDr8ePjnzhK1VJSG15MXXFPIic5jtQKikypKQjnoVX1PIi8xhtgOhkqpKChmMxojFNVZe/EkhLzKH/tEJCgvsFQdCJVWWhHBAv7psxKcU8iJzSA6fnH4gVNKvD4hSyIs/KeRF5jDbgVBJVSXeWHnNYSM+pZAXmcNsB0IlVRQXYqglL/6lkBeZRWwyPuuBUEkFAaMsHGRAIS8+pZAXmUXX0NisB0JNFQkHGYgq5MWfFPIiszjVFwVmPxAqqTycGEYp4kcKeZFZnO5PhHz5LGPkkyLhoEJefEshLzKL9v5R4NdTCs8mEi5keCxGbFIHRIn/KORFZnG6PzrngVBJkXAQB3RrGKX4UMohb2YFZrbHzH7i3V5rZrvM7JCZ/cDM5m7uiJxjOgbHKA+/8oxQ00WKEt05nQNjS1GWyIIspCX/CWD/lNtfAr7qnNsI9AI3Z7IwkVzr6I8SCc/dHw+JljxA52A02yWJLFhKIW9mTcDbgW96tw24Gviht8oO4IZsFCiSKx2DUcrnGVkDU0NeLXnxn1Rb8l8D/gJI7lmqAfqcc8khBW3AqgzXJpIzzjk6BqKUp9CSL/NCvmNALXnxn3lD3szeAXQ6556euniGVd0sj7/FzHab2e6urq5FlimytAaiMaIT8bOt9LkEAwFKQgVqyYsvpdKSvxJ4l5kdB24n0U3zNaDSzJKfgCbg1EwPds7d6pzb5pzbVldXl4GSRbKv02uVp9KST66nHa/iR/OGvHPuM865JudcM/A+4BfOud8FHgTe4612E3BX1qoUWWIdXmBHUuiTh0S/fJd2vIoPpTNO/lPAJ83sMIk++m9lpiSR3OtYYEs+Ei5Ud434UmrNFI9z7iHgIe/6UeCyzJckknsdXqs8lT755HrPtY0RjzsCgbnH1YssJR3xKjKDzoExIkVBioIFKa0fCQeJxR29Oqm3+IxCXmQGHQNRVpQXpbx+8qApddmI3yjkRWbQMRClvjyc8vrlOiBKfEohLzKDjoGxBYV8siWvA6LEbxTyItM45+gcXGh3TaIl36WWvPiMQl5kmt6RCSYmHfWR1FvyhQUBIuHg2YOoRPxCIS8yTbLLZSHdNQArIkXqkxffUciLTPPrkE+9uwZgRSSskBffUciLTJOcg2ahLfn68iLNKS++o5AXmSbZkq+LLLAlXx6mY2AM52ackFUkJxTyItN0DEapLCkkXJja0a5J9eVhxmNx+kYmslSZyMIp5EWm6RgYW9DImqQGr3vntEbYiI8o5EWm6VzglAZJDRWJxyjkxU8U8iLTLPRo16TkYzr6FfLiHwp5kSnGY3E6B6OsrFh4yK+IqLtG/EchLzJFe/8ocQdN1SULfmwoGKC2LKT5a8RXFPIiU7T2jALQVFW8qMfXl4c5re4a8RGFvMgUbb0jAKyuWnhLHhIjbE7rhN7iIwp5kSlae0coCBiNi+iTB6ivCKu7RnxFIS8yRWvPKCsrwwQLFvfRaCwP0zM8TnRiMsOViSyOQl5kirbeEZoqF9dVA4mWPPx6/huRXFPIi0zR2jvK6urF7XQFHfUq/qOQF/FEJybpGhxb9E5XgIYKhbz4i0JexNPWmxg+uXoRY+STdNSr+I1CXsTT6g2fXOwYeYDycJDiwgK15MU3FPIinrYeb4x8Gi15M6OhIqyQF99QyIt4WntHCQUD1JUtfAbKqerLi9RdI74xb8ibWdjMnjSzZ83sBTP7vLd8rZntMrNDZvYDMwtlv1yR7GnrHaGpqphAwBb1+Nt2tXDbrhaiE3GOdA1x266WDFcosnCptOTHgKudc1uArcBbzexy4EvAV51zG4Fe4ObslSmSfa09ozSlMbImqTwcZCAa02kAxRfmDXmXMOTdLPR+HHA18ENv+Q7ghqxUKLJEWntHWJ3GTtek8uJCJuOO4XEd9Sq5l1KfvJkVmNleoBP4OXAE6HPOxbxV2oBVszz2FjPbbWa7u7q6MlGzSMYNRifoG5lIa6drUnm4EICBUZ3rVXIvpZB3zk0657YCTcBlwKaZVpvlsbc657Y557bV1dUtvlKRLDo7Rj4D3TUVxV7IRxXyknsLGl3jnOsDHgIuByrNLOjd1QScymxpIkun5ezwyfS7aypKEiHfN6KQl9xLZXRNnZlVeteLgTcB+4EHgfd4q90E3JWtIkWy7aXTgwCsrytL+7kiRUGCAaNneDzt5xJJV3D+VWgEdphZAYkvhTuccz8xsxeB283sC8Ae4FtZrFMkq/afHuC8mhJKi1L5SMzNzKguDSnkxRfmfUc7554DLplh+VES/fMi57z97YNsaijP2PNVl4boHVHIS+7piFdZ9kbGYxzvHmZTY+ZCvspryWusvOSaQl6WvYOnB3EOLmiMZOw5q0tCjMXi9Grnq+SYQl6Wvf3tiZ2umzPYkq8uTczycaJ7OGPPKbIYCnlZ9g6cHqCsKJjWFMPTJUM+OTRTJFcU8rLs7W8f4IKGCGaLm5hsJlUliZBvVchLjinkZVlzznGgfTCjO10BQsEAkaKgWvKSc+kPChY5x0ydArhneJzBsVjGQx4SI2wU8pJrasnLsnbaO7lHJkfWJFWXhmjtGc3484oshEJelrX2gVEMuKAhOyF/qn+U8Vg8488tkiqFvCxrp/ujVJeGKAllvueyuiSEc3CyT615yR2FvCxbzjlaukcyOnRyKg2jFD/QjldZtpI7XZtrS7NyPlaFvPiBWvKybB3vToRvc01pVp6/LBykKBigRUe9Sg4p5GXZOtE9THFhAXWRoqw8f8CM1dUlnOhWS15yRyEvy9bx7mHOqykhkMEjXadbW1vKka6hrD2/yHwU8rIsDY3FODM0nrWumqRNjeUcOzNMdGIyq9sRmY1CXpal42cS/eTNNemfuHsumxoixB281DGY1e2IzEYhL8vSie5hggFjZZaGTyYlp0vY3z6Q1e2IzEYhL8vS8e4RVleXEAxk9yOwprqEklDB2TnrRZaaQl6WnbHYJO39o5yX5a4agEDAOL8hopa85IxCXpadE90jxB2szfJO16QLGso5cHpQ53uVnFDIy7JztGuIAjPOW6KQ39wYoX90gnZvxkuRpaSQl2Xn6JlhmqqLCQWX5u1/gbfz9cBpddnI0lPIy7IyEJ3gZO8o62rLlmybyWmMtfNVckEhL8vKU8d6cMC6uqXpqgGIhAtZXV2sna+SEwp5WVYeO9JNMGCsqc7+yJqpkjtfRZbavCFvZqvN7EEz229mL5jZJ7zl1Wb2czM75F1WZb9ckfQ8fqSbNdUlFBYsbftmU2M5R7uGNL2BLLlU3ukx4M+cc5uAy4E/NrPNwKeBnc65jcBO77aIb/WNjLP/9MCSdtUkXbSynLiDfSf7l3zbsrzNG/LOuXbn3DPe9UFgP7AKuB7Y4a22A7ghW0WKZMITR3twjiXd6Zp02dpqzGDX0e4l37Ysbwv6n9XMmoFLgF1AvXOuHRJfBMCKWR5zi5ntNrPdXV1d6VUrkobHj5yhuLCApurszlczk8qSEBc0lPPE0Z4l37YsbymHvJmVAT8C/tQ5l/IwAefcrc65bc65bXV1dYupUSQjHj3SzWvWVmd9vprZXL6umt0nehiPxXOyfVmeUnq3m1khiYD/nnPuv7zFHWbW6N3fCHRmp0SR9HUMRDncOcSV62tyVsPl62qITsR5rq0vZzXI8pPK6BoDvgXsd859ZcpddwM3eddvAu7KfHkimfHYkTMAXLmhNmc1XNZcDcAT6peXJZRKS/5K4IPA1Wa21/t5G/BF4FozOwRc690W8aXHDndTUVx4dn73XKgqDXFBQ0T98rKkgvOt4Jx7BJjtJJjXZLYckcxzzvHYkW6uWFdDQSB753NNxeXrarj9qRbGY/ElmztHlje9yyTvnege4WTfKFduyF1/fJL65WWpKeQl7z3q9ce/Nof98Unb1yb65R87on55WRoKecl7jx3upqE8zLrapT/Sdbqq0hBbVleyc39HrkuRZWLePnmRc9Vtu1qIO8eDBzs5vz7C959szUkN09VHirj/xQ7a+0dprFj6A7NkeVFLXvJax0CUkfFJ1tct/VQGs9nsjfB54EW15iX7FPKS1450DgGwfoV/Qr4uUsS62lLuV8jLElDIS1470jVMbVmIiuLCXJdylplx7YX1PH6km/7RiVyXI3lOIS95azLuONY97KuumqQ3b24gFnc8dFCzgUh2KeQlb7X1jjAei/sy5C9ZXUldpIj7X1CXjWSXQl7y1uGuIQx8MXRyukDAuHZzPQ8e7GR0XGeLkuxRyEveOto1TGNFmJIif44UfserGxkZn+QXB9RlI9mjkJe8NDo+SUvPiC+7apK2r6uhtqyIe549letSJI8p5CUvPXm8h8m489XQyalu29XCD55qZeOKMh7Y38G3HzmW65IkTynkJS/9Yn8HhQVGc43/+uOn2tJUQSzu2N+e8snWRBZEIS95xznHA/s7WV9X5vvpfFdXl1BZUshzbf25LkXylL8/ASKLcLBjkJN9o2xqyN0JQlJlZly8qoJDnYP0Do/nuhzJQwp5yTs79ydGq5zfGMlxJam5uKmSuIOf7GvPdSmShxTyknce2N/BlqYKysP+mcpgLo0VYerLi/jR0225LkXykEJe8krX4Bh7W/u4ZlN9rktJmZlx6Zoq9rb2caRrKNflSJ5RyEteefBAJ87BNZtW5LqUBdmyupKAoda8ZJxCXvLK/S+eprEifHbO9nNFebiQ17+qjh/vOclk3OW6HMkjCnnJG52DUR482MW7tq7EzHJdzoK9+9Im2vujPK7zv0oGKeQlb/zXM4lW8Hu3rc51KYty7eZ6IuEgdz699KcplPylkJe84JzjjqdaeU1zla/nq5lLuLCAd1/axL372ukcjOa6HMkTCnnJC7tP9HL0zPA524pPuum1zcTiju8+8coTgIsshkJe8sLtT7ZSVhTk7Rc35rqUtKytLeWaC1bwvSdOEJ3QPPOSvnlD3sy+bWadZvb8lGXVZvZzMzvkXVZlt0yR2fUOj3PvvnbeuaWRkpA/545fiA9fuZbu4XHu1hTEkgGptOT/A3jrtGWfBnY65zYCO73bIjnxtQdeYiw2ye9duTbXpaTltl0t3LarhWNnhmkoD/OV+1/ie0+cyHVZco6bN+Sdc78CeqYtvh7Y4V3fAdyQ4bpEUvJSxyDf3dXC724/j1fVnxtz1czHzLhyQy2nB6I8f0pTEEt6FtsnX++cawfwLs+twwslLzjn+JufvEhZUZBPXvuqXJeTUZesqaSxIsx/72vXOWAlLVnf8Wpmt5jZbjPb3dXVle3NyTJy777TPHzoDH/6po1UlYZyXU5GBcx4x8Ur6Rud4F9+eSTX5cg5bLEh32FmjQDe5axnInbO3eqc2+ac21ZXV7fIzYm83DMtvfzZnXt59aoKPnD5ebkuJyvW1pZycVMF3/jlEVp7RnJdjpyjFjsU4W7gJuCL3uVdGatIxHPbrleOFb9x+xoOdw7y4f94ivryMN/+0GsoLMjfkcDXXdTI4c4h/uT7e7j9lssJFxbkuiQ5x8wb8mb2feCNQK2ZtQF/RSLc7zCzm4EW4LezWaRI0uHOIT7wzScJBgL854cvoy5SBMz8hZAPKooL+cp7t/LR7z3NJ+/Yyz+//1ICgXNvXh7JnXlD3jn3/lnuuibDtYjM6WTvKO+9/3ECZnzn5ss4z+cn6c6UnuFxrruwgXv3neaDI7u47tWNedtFJZl37h85IstCa88I3370GCWhAm56bTN7WvrY09KX67KWzJUbaukZmeDRI92cGRrnba9upDrPdjZLdijkxfe6h8bY8fhxSouC/P7r1lFRfG6c1i+TzIx3XtzIikgRP93Xztv/8WH+9E0buX7rqpf108+2H0OWL4W8+NrwWIz/eOw4AB96bfOyDPgkM+PydTWsri7hwQOdfOpH+/jSfQd558WNXLa2hsvWVue6RPEhhbz4Vtw5bnuyhf7RCT5y1Vpqy4pyXZIvrKos5qcfv4onjvbw748e447dbex4PDH9weqqYjY3lvMbzdWUFenjLQp58bFfvdTFsTPDvPvSJtYsk52sqTIzrlhfwxXra5iYjPPCqQEeOdTF7U+18rMXO3jkSDfvubSJ8xvyY6oHWTyFvPjSnpZeHtjfwatXVXDpmspcl+M7M/W9V5cW8Udv3MDp/ih37G5lx+PHuWpDLe+/bPU5eTpEyYz8PYpEzllDYzE+cfteysOF3LB1lQJqgRoqwnz0jevZvraaRw6f4d8ePprrkiSHFPLiO3911wu09Y7w3m2rKQ7pCM/FKCwI8M4tK7loZTl/+98HePDArDOPSJ5TyIuv3LX3JD96po2PXb2R5lr1w6cjYMZ7fmM1mxrK+fj393DszHCuS5IcUMiLb7T2jPB/fvw8l66p5ONXb8h1OXkhFAzwbzdtIxAw/uyOvUzGXa5LkiWmkBdfONk3yu9+cxcYfP19lxDM40nHltqqymL++voLeaalT/3zy5A+SZJzJ/tGed+tj9M7PM5/fvgyVleX5LqkvPOuLSu57qIGvnL/Sxw8PZjrcmQJKeQlZ4bGYnzjl0d41z89Qt/IBN/5yHYuWaNzwmeDmfGFGy4iEg7yidv3EJ3Q2aaWC42Tl6yYPo67f3SCjoEoa6pLOHpmiKNdw+w72c9gNMbrNtbyl2/bxKbG8hxVm9+m/i3ecfFKdjx+nA98cxc//Ohrc1eULBmFvGRNdGKSPS297G3to7V39OzySDjIuroy3nphAzduX6PW+xI6vyHCG15Vxy9f6uLHe9r4H5c05bokyTKFvGScc449Lb3c9/xpBsdiNJSHecvmetbUlPIHb1hHTWlIBzjl0Js21XOie4S//K/nqSsLc9XG2lyXJFlkzi3dkKpt27a53bt3L9n2ZOkdPzPMX/zoOZ481kNTVTHvuHgla7Qj1XcGoxP8eM9JjnYN89Xf2crbL27MdUkyBzN72jm3bTGPVUteMiIed3x31wn+9t4DBAuM37pkFZeeV0VALXZfioQL+cEtV3Dzjqf42PefYU/LWv7gDevPnk5R8odCXtK2+3gPn7/nRfad7Of1r6rjS+9+NQ8e6Mp1WTKPipJCvnPzdv7q7uf59qPH+O6uE9ywdRXb11Vz6ZoqVleV6HyyeUAhL4syGXf86qUuvvvECXYe6KShPMzXfmcr129dqf72c0hxqIAvv2cLf/iG9fzfB4/wk+fauf2p1sR9hQWsX1HKlqZKXrexjivW1yzrk7acq9QnLymLxx27TyR2qN73fDun+qPUlhWxZXUFr9tQRyiowy7OdXHn6BwYo7VnhM7BKB2DY7T0jDAeixMqCHD91pXc/Lq1XNCg4a5LSX3yknFTx1Z3DY7xTEsvhzoGOdUfJRQM8LoNtXz27Zt584X13Lm7LYeVSiYFzGioCNNQET67bDLuaOkZ4bm2Pv7f3pPc+XQbFzRE+If3buHClRU5rFZSoZa8zOibDx/lubZ+nmnppa13FAM21pdxyeoqLmiIUFSoKYCXo5HxGLuO9fDwoS6iE3HecmE9H3rtWi5fV61uuixSS14yYmIyzkMHu/jR0238/MUOJp2joTzMdRc1sHV1JZGw+mOXu5JQkN88fwWXr62hb3Sc/3z8BD97oYMNK8q4Yl0NF60qp6okRCzumJiME5t0TMYd5cVBasqKOK+6hBXl4fk3JBmjlvwyF4879rT2cs+z7dz97Cl6hsepKQ1xQUOES9ZUsbKyONclik/duH0No+OT3PPsKX685yT7TvYzNBab93Frqku4bG01V22o5aqNtTpBewrSackr5Jch5xz7Tvbzk+faueOpVvpGJwgGjAsaIly6poqN9REKNHROFijuHL3D40RjcQrMCARIXJoRjU0yFI3RMTjG8TPDHDszzKg3SdqFK8vPjt55VX0ZDeVhdf1Mk7OQN7O3Al8HCoBvOue+ONf6CvnUOec4dmaYJ4/1cLhziEcOn2Ey7ggGjHBhATVlId65ZSXNNaU015RSUTJzV8p4LE7nYJQT3SMc6Rpi9/FenjreQ3t/lGDAWF9XxsVNFWxqLCesfnZZInHnONU3yqHOIQ51DNHSM0zyfCaFBcbKymKqSkLUlIaoKg1RXRriRPcIpaECqstCNJYXnz015I3b16RVy47HjtM7Ms5k3OEclBYFiYSDfODy89L9NTMmJyFvZgXAS8C1QBvwFPB+59yLsz1GIT+34bEYX/zvA7zUMchLHYP0jkwAEAwYtWVFBAuMybhjeCzGQPTl/xYXBQNEwoWUhAoYjE4wGXeMxeKMxeIvW29FpIjXrK3m9RtrecuFDdy77/SS/X4isxmbmKStb5SuwTG6h8aoKSuid2ScnuFxeofH6R4ef8V7ubKkkMaKYt68uZ7NK8vZ3FhOU1XxnP8FjIzHOHh6kGda+njmRC8vnOrnRPcI01OwIGCsrS1lU2M5mxojbGos58LGcuoiRTn5LyNXO14vAw475456RdwOXA/MGvKZ4Jwj7iAWjxMdjzM0HmMoGmNoLMaw9zM45Xp0Ik5xqIDSUAGlRUHKioKJy3CQSFGQSLiQsnCQomCAAjPMSPmP6Jwj5oVuYvuTZ+sYGosxGJ1g0KstOhEn7hI7oSbj7uz1xL+4E7zUOciJ7hEm445QQYB1daW8bmMdG+rKqC4LvWJ6gInJOFesr+FE9wjHzwxzZmiMgegEw2OTtPaMEDAjXBigOFRApKiQ6rIQN1+1lsYK/Sss/lNUWMD6ujLW15UBr2ydO+fY8dgJhsdidA2N0d4fpb1/lPa+KP/0i0Nn/wsoCgZoqAhTHwlTUlRAOFjAxGScgegEHQNjtPaOkGzXNlUVc3FTBevqyqgtCxEMJI7zGB6P0TM8TlGwgGdO9HLPs6fO1lEeDrKmpoQ11SWsrk5c1pUVUVFcSEVJYeKyuJBQQYCCgPnis5ZOS/49wFudcx/xbn8Q2O6c+9hsj1lsS/4PvrObhw52MRlPhOqC6oRXfEvPJ2CJ8cIBr1/RMOIu8a9c3DnvZ2HPWRAwAt4XSMASz1kSKiAQMCJFQTasKOP8hggj45OcV12i09+JpGg8FqdjIMqp/lF6hsbpj05QXFhAdGKeN6tqAAAGVUlEQVSSkfFJQsEA5eFCaspCRCcmaSgP01RVQnmKR++Ojk/SPjDK6f4oVSUhWnpGaO0Zoa13lPHJ+JyPDRgEA4nA/8nHrzr7JbZQuWrJz/QV9YroM7NbgFu8m0NmdjCNbc6lFjiTpedOR0p1PbgEhczAr68Z+Lc2v9YFqm0xlqyuDV9Y8EOm1rboHQTphHwbsHrK7Sbg1PSVnHO3AremsZ2UmNnuxX7TZZNf6wLVthh+rQtU22L4tS7IXG3p9Ak8BWw0s7VmFgLeB9ydbkEiIpI5i27JO+diZvYx4GckhlB+2zn3QsYqExGRtKU1rYFz7l7g3gzVkq6sdwktkl/rAtW2GH6tC1TbYvi1LshQbUt6xKuIiCwtjdMTEclj51TIm1m1mf3czA55l1WzrHeTt84hM7tpyvL3m9k+M3vOzO4zs4ycpj4DdYXM7FYze8nMDpjZuzNRVyZqm3L/3Wb2fKbqSrc2Mysxs596r9cLZjbnlBop1vNWMztoZofN7NMz3F9kZj/w7t9lZs1T7vuMt/ygmb0l3VoyVZuZXWtmT3vv+6fN7Go/1DXl/jVmNmRmf57JutKtzcwuNrPHvffWPjPL6NSZafw9C81sh1fTfjP7zLwbc86dMz/Al4FPe9c/DXxphnWqgaPeZZV3vYrE/odOoHbKc30u13V5930e+IJ3PZCs0Q+1eff/FnAb8LyP/p4lwG9664SAh4Hr0qilADgCrPOe71lg87R1/gj4hnf9fcAPvOubvfWLgLXe8xRk8HVKp7ZLgJXe9YuAk36oa8r9PwLuBP48w++tdF6zIPAcsMW7XeOjv+eNwO3e9RLgONA85/Yy+cJm+wc4CDR61xuBgzOs837gX6fc/ldvWSHQReKgAgO+AdyS67q8661Aqd9eM+96GfAIiSDLdMinVdu09b4O/H4atVwB/GzK7c8An5m2zs+AK7zrQRIHqtj0daeul6HXadG1TVvHgG6gyA91ATcAfwd8jsyHfDp/z7cB381kPRms7f3APd6yGhLzh1XPtb1zqrsGqHfOtQN4lytmWGcVidBMagNWOecmgI8C+0gctLUZ+Fau6zKzSu/235jZM2Z2p5nVZ6iutGpL1gX8AzCSwZoyVRsA3mv4TmBnGrXMu52p6zjnYkA/iQ9aKo9NRzq1TfVuYI9zbizXdZlZKfApEv/FZkM6r9mrAGdmP/M+k3/ho9p+CAwD7UAL8PfOuZ65Nua7M0OZ2QNAwwx3fTbVp5hhmTOzQhIhfwmJf/n/icQ3aEoHG2erLhJ/gybgUefcJ83sk8DfAx9M8Xmz+ZptBTY45/7X9L7UXNc25fmDwPeBf3TeZHmLlMo0HbOtk9IUH2lIp7bEnWYXAl8C3uyTuj4PfNU5N2TZmcQrndqCwFXAa0g0bnZaYu6YdBoRmartMmASWEmi2/JhM3tgrve+70LeOfem2e4zsw4za3TOtZtZI4k+9unagDdOud0EPARs9Z7/iPdcd5DoB851Xd0k3kg/9pbfCdycal1Zru0K4DfM7DiJ98oKM3vIOfdGUpTF2pJuBQ45576Wak2zSGWajuQ6bd6XSwXQk+Jjc1UbZtZE4v31P5Pvfx/UtR14j5l9GagE4mYWdc79sw9qawN+6Zw7A2Bm9wKXkt5/ipmq7UbgPq9notPMHgW2kWi4zixb/U5Z6sv6O16+o+7LM6xTDRwj8S1X5V2vJvHN1w7Ueev9DfAPua7Lu+924Grv+oeAO/3wmk1bp5nM98mn+7p9gcSOu0AGagl6H5S1/Hpn2IXT1vljXr4z7A7v+oW8fMfrUTK7oy6d2iq99d+dyb9dunVNW+dzZL5PPp3XrAp4hsSOzSDwAPB2n9T2KeDfSbT0S0lM7X7xnNvL9B8+mz8k+qR2Aoe8y+SHfRuJM1Ml1/swcNj7+b0py/8Q2E9iz/k9QI1P6joP+JVX105gjV9esyn3N5P5kF90bSRaP877e+71fj6SZj1vI7Ej6wjwWW/ZXwPv8q6HSfyndRh4Elg35bGf9R53kDRG+WS6NuD/kOjD3TvlZ0Wu65r2HJ8jwyGfgb/nB4AXgOeZofGRw79nmbf8BRIB/7/n25aOeBURyWPn2ugaERFZAIW8iEgeU8iLiOQxhbyISB5TyIuI5DGFvIhIHlPIi4jkMYW8iEge+/8i4TCbqw53pAAAAABJRU5ErkJggg==\n", 86 | "text/plain": [ 87 | "
" 88 | ] 89 | }, 90 | "metadata": {}, 91 | "output_type": "display_data" 92 | } 93 | ], 94 | "source": [ 95 | "# How predictive is the signal of local non-annualized sharpe ratio?\n", 96 | "sns.distplot(fit\n", 97 | " .trace\n", 98 | " .gains_factor_algo\n", 99 | " .sel(gains_factor='VIX', algo=2)\n", 100 | " .values.ravel())" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": 24, 106 | "metadata": {}, 107 | "outputs": [ 108 | { 109 | "data": { 110 | "text/html": [ 111 | "
\n", 112 | "\n", 125 | "\n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | "
gains_factor_algo
chainsamplegains_factoralgo
00VIX0-0.010585
10.002652
2-0.012670
30.008918
40.011703
\n", 168 | "
" 169 | ], 170 | "text/plain": [ 171 | " gains_factor_algo\n", 172 | "chain sample gains_factor algo \n", 173 | "0 0 VIX 0 -0.010585\n", 174 | " 1 0.002652\n", 175 | " 2 -0.012670\n", 176 | " 3 0.008918\n", 177 | " 4 0.011703" 178 | ] 179 | }, 180 | "execution_count": 24, 181 | "metadata": {}, 182 | "output_type": "execute_result" 183 | } 184 | ], 185 | "source": [ 186 | "fit.trace.gains_factor_algo.to_dataframe().head()" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": 27, 192 | "metadata": {}, 193 | "outputs": [ 194 | { 195 | "data": { 196 | "text/plain": [ 197 | "" 198 | ] 199 | }, 200 | "execution_count": 27, 201 | "metadata": {}, 202 | "output_type": "execute_result" 203 | }, 204 | { 205 | "data": { 206 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVoAAAEYCAYAAAAdwT4RAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzsvXl8HNWV9/29Vb2rW5Ily9ZiWZYt73sMxhDAgFlM8JDEkISwhMwbhkkmwGTIJGQy8/IwzDMzGZ7JG0IShpA8ZIAkBAIkYQkwBOOFJU4M3sC7LUu2rH1v9VpV9/2jF7ekltw2arUk3+/nU5/uqrpV91R16/TRqXvPT0gpUSgUCkX20HJtgEKhUEx0lKNVKBSKLKMcrUKhUGQZ5WgVCoUiyyhHq1AoFFlGOVqFQqHIMsrRKhQKRZZRjlahUCiyjHK0CoVCkWVsuTYglbVr18pXX30112YoFIrMEbk2YDwwpiLatra2XJugyDLRaBTTNHNthkIxqoypiFYxMWlpaeHFF1/kzY2baDh+DCkl06ZVctFFF3LttddSVlaWaxMViqwixlJRmXPOOUdu27Yt12YoRoi6ujqeeuop/uf117EsC9NXhuGdAoDe14qt5wSaEKxZs4Ybb7yR6urqHFusOANU6iADlKNVjBhSSo4fP87777/Pm2++yY4dOxC6jXDxHCKli5BOb7/2IuzH0fwhzrb9SNNg+cc+xlVXXsmqVasoLCzM0VUoThPlaDNAOVpFxpimSVdXF21tbbS1tdHa2kpLSwvNzc0cb2ig7mgdoVAw1thdgKE5EYDUUh4FmBGEEUHaHKA7ABCWhYgG0KJBkLH8bWlZObNrZlFWVkZJSQmTJk0iPz8fn8+H1+vF4/Hg8XhwuVwIof7Wc4i6+RmgcrTjhIMHD/L973+f7u7uUe1XSolpSdraWolGIukbaTpS6EjNBs58pO5Aajb0YDvCjPZr6nK5WHftOl566SVCgY7B/Wk2pM3FidZ2mpqbwRr+wZkQAofDicPpxOFw4HDY0YQGIuYBRtsJJ/o73QBm3rx5fPWrX1WR/ARFOdpxwq9+9Ss++OCDXJsBgATQ7UjNgdTtMUer6YB2yvhm3bp13HHHHUgpee655wY3EALL6QUpkZYBlomwDLCM+KuJ4KQTk1ISDocIh0MjeYmjzrFjxzjnnHO46qqrcm2KIgtkNXUghFgLfB/QgZ9KKb8zXHuVOhga0zQ5fvx4xu2llMmoKt1r4r1lWcl10zQxDINoNEooFCIYDNLX10dvby/d3d10dHTQ3t5OS2sb7W1tRKP9I1xhd2G4izC8UzEKK3HW/wmbv6lfG5fLxTXXXMPLL79MKDTYOVp2D8LhRgQ6YMB305PnxevNw+f14fXm4fF4cLvduFwuXC5XPKKNLXa7vd9r4r3NZkPX9eQihEBLSW0IIZLbEktiPZvRscPhGK+jL1TqIAOy5miFEDpwALgCOA78Gfi8lHLPUMcoRzt+kFLS09OTzNE2NjZy9OhR9u3bz5Ejh2PO2+ZE6k4suxsSTmpgjlZKhBFCi/QhLAMhBMuWLWPRokVUV1dTXl5OSUkJBQUF2GzqH7AxiHK0GZDNb+5K4JCU8giAEOJXwCeBIR2tYvwghKCgoICCggJmz57db19XVxebN2/mt7/7HUcOH0bDJFSyAGNyDdLujjUywtjbDuFq+RDCfmZUz+T669Zz6aWXkpeXl4MrUiiyRzYj2uuBtVLK2+LrtwDnSSnvGNDuduB2gOnTp6+oq6vLij2K0UdKybZt23jyyZ+za9fO2EZ3ASAg2AXAgoULueXmm1m1apUaPTA+UR9aBmQzok33AQzy6lLKR4FHIZY6yKI9ilFGCMG5557Lueeey5EjR9iyZQuHDx9GCEF1dTUf//jHB0XDCsVEJJuO9jhQmbI+DTiRxf4UOSYSidDS0kJTUxONjY00NjbS1NREa2sL3Z2d9AX6kJbEZrdxouE4+/bto6KigunTpzNjxgxmzJhBQUFBri9DoRhxsulo/wzMFkJUAw3ADcCNWexPMYKEw2H8fn+/pbe3l97eXvx+P5s3b6atrS05SmGoYjF2DQRW8r1NA0NCX2cr9bWH2PpH0e/fnEmF+VROn0FlZSXl5eWUlpZSUlJCcXExRUVFuN3uUboDCsXIkTVHK6U0hBB3AK8RG971mJTyw2z1dyoaGxvZvXv3iJ7zVPnt1Jzj6bRN18dw+cvE8CwpJZZl8cYbb9DS0jJo+8DXk4uJaVpYpokZXx+p1H3UApfLw7p1sUkKfWmGdCWwC4kV6OLghzv5cPcuzDQ2CCGw6Tp6yjCtoYZiDXw/depUrrzyyiHvYWofw62nbk93/MB9ucbn87Fy5Up0Xc+1KWctWR0vI6X8PfD7bPaRoR389V//NT09Pbk2ZVygIXFoYNckdl1i18ChxV51IdEF1Pt1gmZmVTZPOUkhjk2TlHlORsWWhIglCBqCkCkIm4KwKYkakqhhnPZ1HTt2jLN1+ODXvvY1PvWpT+XajLOWs2JgohCCf/mXf+Gll15KbpNSnjLiHLg/3Xoi2hn4mtom9djTaS+lRNO0ZFQ61LGZvhqGQTgcji2hEKFQkFAwSDAUik1QCIVj0S6CsAVhS8AAf+a0CfLsEmvwc80heemll5BS8vLLLw/brshlsagoSmNAozlooy1spyuUvh+3y4k3Lw9PngeX24Pb7cHpdOJ0OrHb7f0+q3T3Ot09GvjZplvP9PNNPTadHUP1NdRxQ5Gu3cBzlpeXDxnJK0YHVVRGkURKmZwNlsjJJmaGDczT7ty5k+7ubgzDwDCiWNbg75Fdk9g1iSAWnTr0WKQsAdOCqBRELUHE7J+nLSkuYlrldMpScrSTJ0+mqKiIwsJC8vPzsdvto3VbFMMzdnIkY5izIqJVZIYQIlkVq6Sk5LSODQQCtLa20tzcnBx1cOLECZqbm2htaaGnp4dg5GRaIM/jpqhoEtMqq6isrKS6upoZM2ZQVVWFx+MZ6UtTKHKKcrSKEcHj8VBVVUVVVVXa/VJKIvHqX7quq+m0irMK9W1XjApCCJxOZ67NUChywpgSZ1QoFIqJiHK0CoVCkWWUo1UoFIoso3K0iqwjpWTHjh288sorHDh4iI6OdiqnVbJkyWLWrl075AM0hWKioMbRKrKGaZps2rSJJ554kqNHaxF2J1HPFCy7Cz3cg+5vBWmxaPFirr/uOi644AIcDkeuzVacHmocbQYoR6sYUaSUHDp0iLfffpsXX3qZ9rZWpLuQ0NRFGMUzQTv5T5SIBrG1HcLVtg9Cvbg9Hi6+6CJWrFjBokWLKCsrG1M1AxRpUR9QBihHqzhtUmVsmpqaaGpqoqGhgaNHj3Lg4CECfX4ALEceUrMjba6Tf45p5MaREmFGENEgmhlOaoW53B6qqqZTOW0apaWlTJ06NVnFq6CggPz8fDwej3LGuUXd/AxQOdoh6Ozs5Be/+AVdXV25NmVEOFUth+HYt28f3d3dmKZJNBqbcjv4fCImFa7bkK4C0B1ooS60SF+/VqeSG0/aK3QChmTfwSPs338Q5NCy44nKXU6Xi9KppThdTpwpYowJQcaECGO6mgOnIpMqaqNJTU0N69evV6mWcYJytEPw9NNP8+yzz+bajDFNzLE6YtGp5kCKEZAbT6BpWK54EXApEZYJVhRhRmOy46aRrHObKPdo+P0c9h8aoasb2/zhD3+gurqa8847L9emKDIga45WCPEYsA5okVIuylY/2eJLX/oSCxYsIBQKDYqA0lVtGlgdKhukqzZ2KtsGRmKJbenaJeTGDcPo9z4hPd7b20tnZyctrW20trYS6PPHnF40EDu33YnhKsLwTsEsqMBx/P1BcuOZVvKSmh2Ehj3Siwz1Dtqv6zp53kK83pgEeV5cetzpdKaVF7fZbP0iXIfDkdyWqJA23P1NraI21Gdzqgphw32W6SpvDWfT5MmTWbZs2bD3UDF2yKY448WAH3giU0ercrTji0AgkJQab2hooL6+nn3793P40CEsywLdjqU7sRweEPEh22lztBZaNIiIhhBWFIg5k1mzapgxo4pp8RztlClTklW88vLyxsy/8Wc56kPIgGwqLGwWQszI1vkVucfj8VBdXU11dXW/7b29vWzdupUXXniRXbt2oltRQiVziZbMQzq9sUZSovW1YW/bj7PjCNI0qJ45k8suvZQVK1YwZ84cVXhGMWHI6qiDuKN9SUW0Zy/79+/nySef5O23347VnHUVYNqc2MK9yEgAu8PB5WvWsH79eqWIOz5REW0G5NzRCiFuB24HmD59+oq6urqs2aPIDdFolP3797Nhwwbq6+vp6e1lRlUVS5Ys4ZJLLsHr9ebaRMWZoxxtBuTc0aaiItrxi5SS9vZ2jh07Rl1dHUePHqWuro7j9XW0dXQOehik6xplpaXMmTuP+fPns2jRImpqapRywvhDOdoMUEkwRUZEo1E6Ozvp6OigtbWV5557joaGBiKRSFKHLNWZaiSkayQ+m8SSApdu4dDBlALDMulqOc7mxhNs2LABALvdxqxZs5g9ew4zZsygoqIiOUnB5/Oph1+KcUs2h3c9BVwCTBZCHAf+l5Ty/47EucPhME1NTf1E8lL6Tfs+m5H7qaSp0w2zGigFPlAOfKDA38C2lmVhmmbaJXWfYRhs3LiRtra2fucZuKSTIzdNMyZBbppYp3n/LCBkxtRrXS5XUm48FBgsN+7SLYqcFiHTpO7gPg7s308aCbK4zLiOrp+UGk8nOa5pGmVlZVx77bXJ4V2JNkIIdF1PSpGn7hspyfB030chBKWlpar4+VlKNkcdfD5b57777rv58MMPs3V6RQq6iEWlDg2cusShS+wCmoIaoRGSGxdAvkOSj4T4JATDIibcaBGXGRdETEk4bAKRU/ZZX1/P1q1bT+NKs8+CBQt4+OGHc22GIgeMy9TBbbfdxsMPP0w4HB52IP5AMplwMJBMB5CnbkuNjoQQWJaV0QD1gdcwsP1wUtmJ17SD5KWEuM6sZcUnIkQNDNOIT6k1MAwTwxw8zdWUAtMUhEwgCnYNCl0kZ2VlwqkmKVT5TL65rIc6v43aHp1jfhsnAjbaIza6QgwpbG7TdfI8bjx5Hux2B0Joybap9yLdvRoYwQ73OaT7bgz3GaX2ndjncDj4q7/6q4zvmWJioYrKKJJYlkU0GiUUChEOhwkEAknp8e7ubrq6uujo6KCtrY3t27fT091N1DD6nUMT4NRikW/i1ZQQMQV5NonbLrFkPFo1Y+kFSwr6DI1o3Hf78jzMqJ5JeUqOdtKkSclCMl6vF5/Ph9PpVHnb3KM+gAwYlxGtIjtomobT6TytPGI0GqWpqYkTJ05QX1/PsWPHOHr0KPVHa2np6T91tj08+Hi73cac2bNZuGgxCxYsYP78+UyZMkU5UMWEQkW0iqzR3d3NsWPHaG5upr29nVAohGVZ5OXlMXnyZCorK6mqqlJDusY36hcxA1REq8gaBQUFFBQUsGjRuKsppFCMKEqcUaFQKLKMcrQKhUKRZZSjVSgUiiyjcrSKrGNZFu+//z6bNm3i8OHDRKNRpk+fzkUXXcTHP/5x9TBMMeFRjlaRNfr6+nj11Vd59tnnaGw8gbA5iLqLQWgcqnuXN954g6LiYm743OdYt24dHo8n1yYrFFlBDe9SjCiBQCAZvW7ctIloJILlnUJ4ynyMSVUn5calhd7dgLP5A/SeRtweD5+4+mouu+wy5s2bh67rub0QRaao4V0ZoByt4rSRUhIMBmlvb6e5uTkpNb5nz14OHjqIZZqg6Vi6A2n3IPWU1EAaKRthRhCRAJoRKziTl+dl8eJY2cTKykqmTJlCYWFhclaYw+FQExrGDuqDyADlaEcBKSVbt26lvr4+42NSayEMd97UtgPbDyc2OPA8hmH0K3kYDoeJRCLs3buX3t7eWDUwy8KMCzemlRvXbUgtpoqrhf1J/a9U+lXyCvWv5CU1O5bDgzAiCCuudjsEmq7Hah3k5TFnzhzy8vLweDy4XC6cTid2uz0pxpiuTsHA+zPUPUpXz2BgHYN0dSjy8vJYs2YNbrd7yGuYIChHmwEqRzsK7Nmzh29961u5NmNEkUJH6jbQHUjdidRs/f/kIv60xw1byUuAtLuR9rhzkjImL25GY1LjlgnSREgLyzSJmCaRSGTMVelKcOzYMb7yla/k2gzFGCCb9WgrgSeAUmK17x6VUn4/W/2NZebNm8e3v/1tDh48OGwlLkgfwQ4lW53umIERV2L/UG1TK4tJKQdFtYmlLxBMFpgJBQMIaSIME4ww0Itw5hF1FWF6p2AUVOCs/9MgqXEYvpKX6S4iUroIW08jtr5W9GAn0hwcFUNCbtxHfn4++fk+8n0+3G53MqJNRLKZRKkD7/NQbYarNTzw/ubn5/PpT386re2Ks49syo2XAWVSyveFED7gPeBTUso9Qx0zUVMHE41oNEpXVxft7e20tLTQ0NBAbW0te/bs5fjxY7FGug1LS+RoU37P0+ZoDUS0D80Ig7Sw2+3MmzcvmaMtKSmhqKiI/Px8PB6PytOOLdSHkAHZLPzdCDTG3/cKIfYCFcCQjlYxPrDb7ZSUlFBSUsK8efP67evo6ODPf/4zGzduYuuftmIFApgFFYSnLMAsqAARnyMjJbq/BUfTbvTeJux2O2uuupLLLruMZcuW4XA4cnBlCkV2GJWHYXGRxs3AIillz1DtVEQ7sejo6OCll17i+d/8lq7ODoTDTcRTAkLDHuyAUA95Xh/XX7ee9evXU1BQkGuTFaePimgzIOuOVgjhBTYB/yqlfD7NfiU3PsGJRqO8/fbbbN68mUPxmWFV8Zlhl1122dnwZH4ioxxtBmRbbtwOvAS8JqX8/07VXkW0459IJEJXVxednZ10dXXh9/uJRCKYponD4cDr9VJcXEx5eTk+ny/X5io+OsrRZkA2Rx0I4P8CezNxsorc8YMf/IBDhw71U9dNfR2ovDtQhTcSCRONGkkF3UwpKixg5qwaambPZvbs2cycOZPKykpsNjXqUDGxyOaogwuBLcBuSCr5fVtK+fuhjslmRJsq4T2UnHfi/amOGU6yG0gr3Z26/OIXv0hOXsikj8R64vzp+h7qmFTbUt9blollxuwZqPt1ugw3CWEo7ELisUtCcYXbVBwOR3Kx2WyDJMMTsuEDX1PFFYUQzJo1i69+9auD9itGFHVTMyCbow7eYhQ+hB07dvC1r30t291MKAQSXYBNA4eQWEJgyjP/qE4lJ54OmyYp88RUd6WEsCUImzFp8T4jjN9/aknxU7Fr1y5+85vfZNT2X/7lX7jooos+cp8KRTrG/f9oM2bM4Pzzz+fdd9/NtSnjBonAkGCYMBK/haeSE09Hlc/kjkW91Ptt1PXq1PttHOuz0x3SMDNXMh8Rli5dyty5c0e3U8VZhap1MI6RUibzpIncqRGvRZBYotEohmEQjUaJRqNEIhFCoRCRSIRgMEgwGGTDhg20tLT0y7v2z8cmXvunXNIhiEXKupDYhEQTMQlyAAmYEgwrFkFHUxxqyeRiZtXMprq6msrKSsrKypITFdxut/q3f+yiPpgMGPcR7dmMECKZw/worF+/PuO2pmkSCoUIBAL09fXR29tLb28v3d3ddHZ20tHRQWdnZ2zUQUc7/j4/kXAE04rN+JqUn09R8WTKy8upqqpi5syZ1NTUqDG0igmNcrSK00KPV8zKy8ujpKQk1+YoFOMCpRmmUCgUWUY5WoVCocgyytEqFApFllGOVqFQKLKMehimyDpNTU1s2bIlKYszefJkFi9ezIUXXkh+fn6uzVMoso4aR6vIGrt27eJXTz/Nu++8E1MhcPkwdSd6tA8ZCWK327nqqqv4zGc+Q1VVVa7NVZwZahxtBihHqxhRuru72bJlC7974QUOHjiAsLsITZ5LdPJspCsevUqJFmjH3rofZ/thpGWwcuVKPvnJT7Jy5UrsdvvwnSjGEsrRZoBytIrTxrKsflI2J06coL6+nr1793Ho0EEApKYj7R4suwcSs7rSydhY1kkZG8sgL8/LeeetZNmyZcydO5fKyko8Hk+uLlVxapSjzQDlaD8CgUCAAwcOAEOLKqaSSZt0pBNlHFhlLN2+1Km4AxfDMJJLYnpuYopuYtm5cyfd3d3J6biGaSblxtMYidTsYBkImb5YwSmlxp1eRDSIZkYg5RyarmNPqeJVWFjI8uXL8Xg8uN1unE5nUmI8tdpX4n1ifeD9Hkp2PN09HUo8M7Hu9Xqpqak5G6cKn3UXfCaoh2Efgbvvvpt9+/bl2oxRRSJAs8cEFzU7UreD0JFCAwF6oJ2hqsKcUmrc5kTanFgShDQQRgSMMNKMYJkxNV6Anp6eZJnJscS3v/1trrzyylyboRiDKEf7Ebjzzjt57LHHiEajQ0ZFQ0lYp2s31L4EQ0W2A6O0dOsDX9P1I6UkHI9mw+EwoVBcbjwUTEaxAglWFGFFgSBCtyOdXqKOfCxPMcKIoAc70l7LsFLjnmKCNWuwddVj627A3teCDPsHtXO6XHg8HvLy8vB4PLiczn77M5EVT22bui/TY9Pdx5qaGs4///y0161QZLPwt4uYIKOTmEN/Vkr5v4Y7ZrylDs4mIpEIfr+fnp4eurq66OjooK2tjZaWFhobGzlSe5SmxhMxpyQEls2FtOedUmocCcKMgBVFj/aBZVFYOInly2M52unTpzN16lSKiorw+XxKfWHsoVIHGZDNb20YuExK6Y9rh70lhHhFSvnHLPapyBIOh4OioiKKioqGbNPb28uOHTvYuHEjmzZtxgi0YRRMI1K6CNNXdvKhGIBlYuusw9nyIVqwg7w8L1f/xXouv/xy5s6dezbmOhUTmNGSG/cAbwFfkVJuHaqdimgnDl1dXbzwwgs8++xz9PR0g9NL1FeGtLkQYT8O/wlkNExZeQU3fO6zXHXVVbhcrlybrTh91C9iBmRbBVcH3gNqgB9JKe9J00bJjU9gwuEwGzduZOPGTezZuxd/bw9FxZNZtnQJa9asYeXKlWiamgk+jlGONgOymvCSUprAMiFEIfAbIcQiKeUHA9o8CjwKsYg2m/Yosk8gEKCpqSm5tLS00NbWhmkazKiaDsSGQrlcLurr67HZbMydO1dJjysmNKPyZEFK2SWE2AisBT44RXPFGMMwDB588EEOHTqUHIObTiInGokpKQxEF2DXZGzEAmAhiFoCK+VndVpFOQsXLWbu3LnMmTOH6upq8vLyRusSFYqskjVHK4QoAaJxJ+sGLgf+I1v9TWRStcFSJxcknN2TTz5JfX39kBLlqa+maQ6SQE9dT7SxLBPTtLBME+sM00uDJygM/i/TqUnyHRZdzcf5w4kTvPbaa8l9Npsen4jgSE5GSMiOJ6THh5IdT51kUFNTw1133XVG16BQjATZjGjLgMfjeVoNeEZK+dJId+L3+/nCF75AR0f6sZuKzNCQ6Fos+owJK4KmgabHpMm7IxpR6/TScZnIkGtCUuxKRMEmUSsmOR42BRHTIhwy6OsLfKRr27VrF88///xHOkemCCF48sknmTZt2qj0pxgfZM3RSil3Acuzdf4ELpeLFStW8Prrr2e7qwmNhcCyIApoQuC2CRy6xK1LPLpByBCn7WgzkSGv8pl8+2M9WBJO9Okc7rFxzK9zzG+jKWQnFP2IFzbKrFixQglNKgahah1MYEzT7Fe7IBwOEwwGCYVChMPh5LbEa2JJyJD39fXR19eH3++n9sgRAsEAhmEOKTlu18CuWfHXWArDkII8myTPLpNV5k0JhhRETHDo4LFJjvQ6CBmx87qcDmZUV1NVNYOysjKmTp1KcXExhYWF+Hy+5KwwNXlhTKBGHWSA+qZOYHRdx+1243a7R/S8oVAoWb2ro6OD1tZWWltbaWpqormpicbGBtq6epLtuyNDn8um61RXV7N29SIWLFjAvHnzmDZtmhrypZhQKEerOG1cLhelpaWUlpYO2SYUCtHc3Jwc3tXT00MkEvO4Xq+XSZMmUVlZSWVlJQ6HY7RMVyhygnK0iqzgcrmoqqpSygkKBUqcUaFQKLKOcrQKhUKRZZSjVSgUiiyjcrSKUaGjo4N33nmHQ4cOEYlEqKioYNWqVcycOVOVRFRMeJSjVWSV48eP8/Of/5zXX38d0zQRdidoOjIc4Cc/+Qmzamq4/rrruOyyy3AOUEtQKCYKasKCYsSRUrJ//35+/etf8+abbyKFRrh4NtGSuVjuSSAEIhrE1lGLs20/ItCJ1+fjqiuvZPXq1SxcuDAppqgY86h/RzJAOVrFR0ZKSWtrK4cOHWL37t289fbbHKuvR+h2DGc+CA2pxR3nQDkbCcIMI6KBmOQ4oOs2Pvax5cyZM4eqqirKy8uZMmUKRUVFajbY2EM52gxQjnYcEQ6H00t9D0Nqla7UJbFvYKWvRIWwxFTcQCBAX18fr7zyCs3Nzck2qeUSo9Fof/lz3YG0ubDsLvRgJ8I8WbBgOMnx5PFCA6EhLGPQPk3T0G02bAOqeBUWFnLuueficrlwu91JKfKEHLnD4UhW/9I0LXaelPeJdegvLz4wf5y6PV3lsIHHp72+IaTMB+J0OsdDZK8cbQao8GCc8P7773P33Xfn2oxBSKEhdSfoDqRmi8uPD/23l0lFLzQd01MMUiIsE6QZe7VMpBXFjMZqOKT20tnZSW1t7cheXI6ZPXs2P/nJT3JthmIEUI52nFBTU8Nll13GO++8c1rHDZQoPxOklMj4azTSv3CBkBbCCCGkiemehJlXjJFfgZlfDpqOe9/vsfU2JdtnUtHLchUQnTwHPdCOFurBFvUjzBDSGKZoQhy73YHQRNIJZ2NEw0jc01Phcrm4+eabs9qHYvTIeuogXo92G9AgpVw3XFuVOhj7mKZJIBCgt7eX7u5uOjs7aW5u5vjx4xw4eJB9+/YRjUQQdifhohowI2jh3pQTDCE5bkUQ0RCaEQIZS23Y7XYqplUyraI8KTleVFREfn4+Xq+XvLy8ZHrA5XLhcrlUMZrRR6UOMmA0Itq/BfYC+aPQlyLL6LqOz+fD5/NRXl4+aH84HGb79u28+uqrbN68BUtKIpNriJQuQboGfAUsA1tHLa6WPYhAB3a7nZUXXMB5563KrWDNAAAgAElEQVRk8eLFTJ8+fTzkKBWKU5JtFdxpwOPAvwJ3q4j27KK5uZmnnnqKF196KfbwzFeO6ZuK1HT0QAeO7uNII8z0qiquW7+eNWvW4PV6c2224vRQEW0GZNvRPgv8O+AD/j6do1Vy4xOftrY2fvvb37LhzTc50dAAgM+Xz/nnr2Lt2rUsX75czQ4bv6gPLgOyKc64DmiRUr4nhLhkqHZKbnxiEgwG2bdvH3v27OHAgQPU1x2lp6sTTQhsNh2Xw0ZTUyNbtmyhtbWV+fPnq4Lf45D33ntvis1m+ymwCFU7BcACPjAM47YVK1a0JDZmM0f7ceBaIcQnABeQL4T4uZRSPUqdQJimSXt7Ow0NDTz++OPU19cTCAT6jZG1a+DULAQSlybQMQj3tHGgq53du3aT+HXVNC05/nX69Ol88YtfpLy8nEmTJikHPEax2Ww/LS0tnV9SUtKpadpZHyhZliVaW1sXNDU1/RS4NrE9m+KM/wD8A0A8ov175WRzT+pEg0ceeYQjR46klR0fuCQmKaROVjCiUaLG4EkFA4laELW0U05WsCwLv9+P3++ntbWV9957L7nPZrMll8REg1O9JpaZM2fy5S9/GZvNht1uR9d1laoYORYpJ3sSTdNkSUlJd1NT06LU7Rk5WiFEAXAfcFF80ybgfill94haOcH54Q9/yLPPPptrM84YTZyUI9eFxC0g3ymxaeDQJK0hjZA5dOSZ0WSFOC7dYrLLImoJDAsMaWEYEcyoICLjqr0SrAz+vPfu3TvsuN2xyi233MKXvvSlXJtxKjTlZPsTvx/9/hAyjWgfAz4APhtfvwX4GbA+k4OllBuBjRn2NWHJy8vLtQkfCSnBYRPk2cFrsyhwmExyWhQ7Taa4LX5f76a2d2hHm8lkhQRT3RZrpwdpC+p0hDW6Ixq9UY2AqRMwNIIGSdXcicp4/74oTpLRqAMhxA4p5bJTbfuoqOFdo4thGIPkyBNLQpI8EAgkax4k/q3v7e2lq6uLjva2mPBir7/feV26xKVL3DYLt03iiPveoCEIGAKPTeK2xb53loSQKQgagpApCJkaUau/nQU+L0XFxRROmoTPd3Kygsfj6be43e7k9kSNA5fLhdPpxG63q3RBFti5c2fd0qVL23Jtx1hj586dk5cuXTojsZ5pRBsUQlwopXwLQAjxcSCYBfsUo0gi5+nxeD7SeQKBAA0NDdTV1XH48GH27d3Lvn176QrEqnF5HYJyd5SKPBOXTWJa0B3RaA7ZaQoIEr/1pVOnsGL+AmpqapgxYwaVlZWUlpaqOrUTnAceeKDE4/FYd9xxR/tInO9//+//PeWxxx4rWbRoUeCFF144rQIY999//5S/+7u/a/P5fNapW2dOphHtMmITDwqIjZvrAL4opdw5ksaoiHbiYJomtbW1J4d31dfR0txMIBDAZrNRWFBA+bRKZs2axdy5c5k/fz6TJk3KtdmK02QsRrTV1dULX3nllYPz5s07dXGMAVRUVCzetm3b3rKyslM/5Y1jGMag8p1nFNFKKXcAS4UQ+fH1nkyNUJyd6LpOTU0NNTU1uTZFkQO+8Y1vlD377LNFZWVlkeLiYmP58uWBgoIC82c/+1lJNBoVM2bMCD/77LO1Pp/Puvvuu8u9Xq95//33N69cuXLuihUr/G+99VZ+b2+v/sgjjxxdu3atf9u2ba6//Mu/rI5Go8KyLJ577rnDixcvDg/s98Ybb5x+/Phx57XXXltz0003tV188cX+u+++e3ooFNJcLpf13//937VLly4NG4bB3/zN30zbuHFjPsCtt97aJqWkpaXFvnr16jmTJk0ytm7deuDHP/5x0Xe/+91SKaW4/PLLu/7rv/6rAcDj8Sy//fbbmzds2JD/f/7P/zl+1VVX+Qfakkqmow7uHrAO0A28F3fCCoVCAcDmzZs9L7744qTdu3fviUajYtmyZQuWL18euOmmmzq//vWvtwHcdddd5Q899NDkf/zHf2wZeLxhGGL37t17n3766YL777+/fO3atQd+8IMflPzN3/xN81e+8pWOUCgkjCGGFf7yl7+s37RpU8GmTZsOlJWVGR0dHdqf/vSnfXa7nd/+9re+b37zm9Nee+21w9/97ndL6urqnB9++OEeu91Oc3OzPnXqVPO//uu/piaOPXr0qP2+++6reO+99/aWlJQYF1100Zwnn3yy8JZbbukKBoPaokWLgg8++OCJTO5Jpjnac+LLi/H1a4A/A18WQvxaSvlAhudRKBQTnI0bN3qvvvrqLq/XKwF5xRVXdAG899577nvvvbeit7dX7+vr01evXp12eOhnPvOZToALLrig7xvf+IYD4Pzzz+/7z//8z7Ljx487brjhhs500Ww6Ojo69M997nPVR48edQkhZDQaFQAbNmzI//KXv9xqt9sBmDp16qCK+m+99VbeqlWresvLyw2Az33ucx2bNm3y3nLLLV26rvPFL36xM9N7kul0m2LgY1LKr0spv07M6ZYAFwNfzLQzhUIx8Rnquc/tt99e/cMf/rD+wIEDe+65554T4XA4rf9xuVwSYg9rTdMUAF/+8pc7fve73x1yu93W1VdfPeeFF17wZWLLPffcU7F69eregwcPfvjiiy8eikQiWsJGIcSwD6iGe37lcDis05FVytTRTgdSE8tRoEpKGQQy+mVRnN309vby+uuv89hjj/HUU0+xc+fOpKSOYmJxySWX+F977bWCQCAguru7tT/84Q+FAIFAQJs+fXo0HA6LX/3qV0Wnc849e/Y45s+fH/6nf/qnliuvvLJrx44d7kyO6+np0adNmxYB+PGPfzw5sf3yyy/veeSRR0qi0ZjMUnNzsw6Ql5dndnd3awAXX3xx39atW32NjY02wzD49a9/XXTJJZcMm4sdikxd8i+BPwohfhdf/wvgKSFEHrDnTDpWnB3U1tbyzDPP8D+vv445IK9WXlHBrV/4AmvWrFGiixOI1atXB9auXdu9YMGChRUVFeElS5b0FRQUmN/61rdOrFy5cn5FRUVk/vz5Ab/fn3Gx4SeffLLo17/+dbHNZpMlJSXRf//3f88oN3rPPfc03XbbbdUPPfRQ6UUXXZR8iP93f/d3rQcOHHDOmzdvoc1mk7feemvrt7/97dZbb7217eqrr549ZcqU6NatWw/ce++9DatXr54jpRRr1qzpvvnmm7vO5J5kXCZRCLECuJDY8K63pJQjPg5LDe+aGLS3t/PHP/6R119/nR07diB0G+GiGqKTZ2N5isGKYus6hqvlQ0RfO+UVFdx8001cdtlluFyuXJuvOA2GGt7V3d2tFRQUWL29vdr5558/95FHHqm78MILA7mwMRec1vAuIURqeF8bX5L7pJQdI26hYtwgpaSzs5MjR45w6NAhDh48yLvvvksgEP97EjqWw4vl8KCFunDWvd1PxsYUdoSrkIbGZh544AF+8IMfcsEF53POOeewcOFCVTZxHHPzzTdXHTx40B0Oh8UNN9zQfjY52XSc6v+19wDJyeK+ifBXxN/PzJJdigyRUvZbhtpuGMagKbepcuEDJcb/8Ic/0NLSMug8/SXJI1hWysNaocUeMiSNMNEifrRILK3lcrlYd228eleg/2+01Gz0RSVvbNjAG2+8ETudEDgcTpxOR7LqVqIyV2lpKZ/4xCdwOBw4nc7k4nA4cDhi7VMrfiWqfqnKXaPDiy++mFVJ4qamJv2SSy6ZO3D7xo0b95eWlg4aQZBrhnW0UsrqxPt4dDubWG1ZxWkipeT222/n4MGDuTblIyMBNBtSd4DdhtRsoNmRmoYeaAczmva4Yat3CYHlLsCSBQjLACuKMKOEjCihcM+gMv4NDQ39yiiOJt/4xje45pprctK3IkZpaam5b9++cfN8KNMJC7cRE1mcBuwAVgHvAGuyZ9rEQgjBpZdeOiEcrQCwDDRNw7R7MV2FWJ5iLE8xjuPbsPmb0x43XPUu01NMcM5V6L1N6L2N6H1t2MJdyGh6p50rJk+ezJw5c3JthmKckemj3r8FzgX+KKW8VAgxD/jnUx0khDgK9AImYEgpzzlTQycCN954IzfeeGOuzRgW0zQJhUIEg0GCwSDhcBjDMFLSBWH8fj8dHR3JQjIHDx3G33byB0RqNqTuRNqdSM2RTDz5zQjPvPga0uYBX2GyvTANhBEmf+dTSCOCpmlUV89k9uwlycIyxcXFSZlxt9udTBGoNIBiPJCpow1JKUNCCIQQTinlPiHEoPzIEFwqpRxTRScUQ6PrOnl5eadVC1VKSWtrK4cOHWLv3r3s2rWb3R/sxgr0IT1FhKcsIFo8C7T+o3m0YCeOhu3Yeo9idzi49LJLuPTSS1m6dOlHriimUIwlMnW0x4UQhcBvgdeFEJ1ARuPYFBMfIQRTpkxhypQpXHDBBUBsgsKmTZt47vnnqT3yFu4T7xOeVI2ZNxlhRrB1HcPWfRyny8XnvvAF1q9fT2Fh4Sl6UoxH/uqrd81t6+xxjNT5Jk/Kj/zkRw/tH2r/ypUr595zzz2N1113XXLc7P333z/ljTfeyD9x4oTz4MGDHz7++OOFjzzyyJR33333AMBrr73m/du//dvpu3fv3pOYljuSZFq969Pxt/cJId4kVi7x1UwOBf4nPtXtx3HF234MkBvPyGjF2Mfn87Fu3TquueYa3nvvPX7zm9/wx61bMZs/BKCouJh1X/gC1113HQUFBTm2VpFN2jp7HPUVl42Yo6Vhw7C7P/OZz7Q/9dRTRamO9rnnniv6j//4j+N33nlnFcCtt97a9bOf/WzyI488UvSlL32p484775z+ox/9qC4bThbOQJxRSrnpNJp/XEp5QggxhVgkvE9KuXnA+ZTc+AQkGAxSV1dHbW0tx44dw+Px8LHlywmHw+Tl5VFVVUVFRQU9PT3k5+erXKtixLjllls6/+3f/q0iGAwKt9st9+/f72hpabHPmDGjX33ahx9+uP7KK6+c++GHH7qXLVvWd8UVV/Rly6asznuUUp6Iv7YIIX4DrAQ2D3+UYjwgpcTv99Pe3k5LSwtNTU2cOHGC+vp6jtYeobGpOTmuV9dgkhN89tjwxmZT589b38WIlzooLMhn6bLlLF68mIULFzJz5kylqqA4Y0pLS82lS5f2PffccwU333xz1+OPP1507bXXdg78MV+wYEHkk5/8ZMd///d/lxw4cOCDbNqUNUcbr4OgSSl74++vBO7PVn+KoTFNM6kDlhhRkNACS4wsCIVC/bTDEq+p7fv6/PT29NDU1Ew4Mrh4vQDsmsSpS4qdEmf8vWEJgqbAsmJaYU7NotAOkbhWmIh28MGfNrFpU+yfJU0IyspKmVY5nalTpzJ58mQKCgooKChIaoK5XK7kkpi0oHTBFAk++9nPdjz99NOTbr755q7nn3++6Kc//enRgW1M02TTpk35brfbOnTokON0VBVOl2xGtFOB38S/+Dbgl1LKTPK6GXPgwAHuuusuQqHQSJ5WQcxpChGTGBfEJMY1IdEFGIaAQVMIYgn5iCWIWILelOGvLpeLdeviM8L8gz8rt25R5YtSaIdQXKSxp7WBHc0niFoiI0nx8cx3vvMdVq1alWszJhQ33XRT1z/90z9VvvXWW55QKKRdeOGFgf379/fLE3/nO9+ZMm/evOB9993XcMcdd0zfvn37vmxN+c7aRHIp5REp5dL4slBK+a8j3Udvb69ysiOMBti0WGRq1yR2YeHQrPh6bPvpxoyJGWGnmk1lE2DXwaGf7N+hSbQJHqR2dZ1RQSjFMBQUFFirVq3qve2222asX79+UE2W+vp628MPPzz1oYceOn799df3lJaWRr/3ve9NTneukWBc16ZbsWIFGzduzLUZY4ZEHYLUCQbRaDSZEggGgxmlDlKlx0OhEIFAH8G+Pvx9ffT19WHI05utNdyMMACPLVZOo9ZvIxKfpW636ZSVljK1rJzJkydTWFiIz+dLTlhwu91JKfFE+iBR4yCxJGocqMI0uWXypPzIqUYKnPb5MuCGG27ouPXWW2c99dRTRwbu++pXv1p51113NSXUE370ox/Vr169et7NN9/cmU5t4aOScZnE0UCVSRwfPPjggxw4cKBfUZqkcw6FCIfDpH6rdAEaFnYt9mDMlBC1BFHrZKg6Z3YNi5csZcGCBcyZM4fy8nJ0PeNypYocMRZVcMcCZ6SCq1Ck8rWvfW3Y/YZh0NDQwOHDh6mrq6O+vp7W1ha6uzoRCNweD2XlFcyYMYP58+ezcOFCvF7vKFmvUIw+ytEqRhybzUZVVRVVVVW5NkWhGBOo5JVCoVBkGeVoFQqFIssoR6tQKBRZRuVoFaNCe3s7u3fvpq2tDZ/Px5IlSygrK8u1WQrFqKAcrSJrSCnZtm0bzzzza7Zt+zMDhxIuW76cm2+6iRUrVqipsxOYv//qbXP9Xe0jVr3LW1gc+c8f/XTIMokAHo9neSAQ2J5Yf+ihh4q3bduW98QTT9Tffffd5d/73vfKdu/e/cGiRYvCAP/8z/885b777qvctGnT3osvvjhQUVGxuLS0NPLee+8l+5k3b94C0zTFwYMHPzxdm5WjVYw4gUCADRs28Oyzz3H0aC3C4SFUugRjUhWW04sWDWLrrGfHnn3s+Pu/Z+HCRXzhC7dw7rnnqskFExB/V7vj23MPjZij/bdhXWxmzJ49O/jEE08UPfDAA40Av/vd74pmzZrVb5ppX1+ffujQIXtNTU30/fff/0haicrRKj4y4XCYuro69u7dy9NPP01jY2NMNVezYbkKkDYXur8ZvftYP7lxw+5FQ+fDffu55557KC+v4IorLmflypXMmTOHbNUGVSg+8YlPdP3+978vfOCBBxr37Nnj8Pl8hs1m6/cv16c+9amOJ554ouj+++9vfuKJJ4quu+66jmeeeab4TPpTjnYCY1kWlmUNuS8hPx4IBPD7/fT09OD3+wkEAkQiETZu3EhraytwUr48McU3MSssEo1iGumLHgnLQA91A93A0HLjUrNjuQpoaG7l8ccf5/HHH0+2T0iIp8qFV1RUsH79+kFS4wPbK85ewuGwNm/evAWJ9e7ubv2KK67oTqzn5+eb5eXlkT//+c+uZ599tvD666/vfPLJJ/vVOrjxxhs7b7311ur777+/+bXXXiv8+c9/fkQ52nHEr371Kx555JFcm/GRkJoNhIYUdnC4kJoOuh0t2I2wTlNuXIC0uzHtboRlgRlBmFGC0SihsB9k/6nndXV1vPPOO9m8vDNm+fLlfPe731UpkBzjdDqtVDnyRI42tc1nP/vZjieffLJow4YNBZs3b94/0NGWlJSYBQUFxqOPPjqppqYm6PV600ctGaAcbQ4oKSnJtQkfGYFE2J0YNjeWw4flLsD0FONo3InN35L2mKGKy5ieYoLzPgFSovW1Yus5gdbXhj3cBaHe0bicEWPatGnqwd444YYbbui69957py1evDhQVFSU1olef/31nd/85jerHn744dqP0ldWHW1c0PGnwCJi5Ur/Hynlu9nsczywZs0a1qxZk2szkkgpCQaDyfRBoqKXZVlJp2GaJpFIhL6+Prq7u+nq6oqpK7S20tDQQFvD4ZPn0x1YdjfS5ooVtY0zlNy45czH0fA+zvZDEPYDUDGtkjnLV1BRUcGUKVOYNGkSPp8Pj8eTlBtPpAlUqkBxJni9XnnfffcdX7BgQXioNjfddFNnY2Ojff369T11dXVn/NAg29/O7wOvSimvF0I4AKUhPQYRQuDxePB4PEyZMuWMzhEIBNi3bx/btm3jzTc30th4AmEPEZo8l+iUBUjH4I9eRAI4GnfhbDuAlCbnrVzJ5ZdfzrnnnqsUcScQ3sLiyEiMFEg930id6/bbb+8cbv+kSZOsf/3Xf236qP1krUyiECIf2AnMlBl2osokTgyklOzatYvnnnuOLVu2IIUgWlCFMWk6ltOHFglg6z6Go+MIAslVV13FTTfdxLRp03JtuuI0UWUS0zOaZRJnAq3Az4QQS4H3gL+VUvZTmlRy4xMPIQRLly5l6dKlNDQ08Pzzz/Pa//wP/iMn01xOl4u1f7GOz372s1RUVOTQWoUi+2Qzoj0H+CMxyfGtQojvAz1Syv93qGNURDtxMQyDo0eP0t7ejtfrVeNkJwgqok3PaEa0x4HjUsqt8fVngW9lsT/FGMPv91NbW0ttbS3Hjx+nqamJ9vY2+vx+opEImq7hdLrw+vLJz89n0qRJyaWoqCipfOv1evF6vTgcDvVEf+xhWZYlNE0bO1ItOcayLAH0G8WQNUcrpWwSQhwTQsyVUu4H1gB7TnWcYvwQjUbp6emhs7OT1tZWmpubOX78OPX19dQeOUxrW3uyrV0Hh4hNetC0kxq6lowtphQY8tSKt5qmoQmB0DSEEPH3AiHi65qGpp18X1xczKpVq5LS5KkTHNLpiyUWXdeHXVcOP8kHra2tC0pKSrqVs4052dbW1gLgg9Tt2R51cCfwi/iIgyPAX2a5vwnL1q1bueeee3JtxikRgEOXODVJiUvi1GOLTYP6Xp2gqUEG0nf9JMpTlI4ty4qFCmZm+nkdHR0cPHjwjK7lbOW6667jjjvuyOjHxDCM25qamn7a1NS0CFV2FWKR7AeGYdyWujGrjlZKuQM4J5t9nC3s3r071yacEpuQeGwy5mjjztauc9ry5DDMLLLTRI/bZFqJqDkWQZ++aPrZw6ZNm7j99ttxOp2nbLtixYoW4NrsWzW+USq4iiSJegYJ2fLUugapMuQJufLEBIeOjg7a2tpoamqk4dgxWttP1jFw6lCeZ1LmNtjfZaM9nJmyrcvl4pprruHll1/uF9GeLnabjdLSKbhcbtxuD86U9IHNZktKlJ9u6sBms2G325PbU98najIkpM5T0xmJlEbifSJqHLg/sS01DTJGUb9YGaCm0yiSpP6hf5SZVoFAgLq6Oo4cOcKRI0eora3l0LF62sPDP5zWdR27zYYtPhrhlVdewePxUFxcjK7rg5zWQIeVzinV1NRw5513nvG1KBQjgXK0ihHH4/Ewf/585s+f32+7YRh0d3fT19dHJBJB0zScTid5eXn4fD50PbNoV6EYbyhHqxg1bDYbxcXFFBefUaU5hWLcMmYTPwqFQjFRUI5WoVAosoxytAqFQpFlVI5WkXWi0SiHDx+mvb0dIQSlpaXMmDFjLA9ZUihGFOVoFVmhu7ubt956i02bNrF9+3ai0f7yNgWFhVx26aV8+tOfVlXbFBMeNWFBMSKYpkltbS07duzgnXfeZcfOHVimCa58IgXTML1TsZw+kBZaqBtb1zHs3cfAMrng4x/nxs9/noULF6oaAuMP9YFlgHK0ioywLIve3l7a29tpa2ujtbWVlpYWTpw4QV19PbW1tUQj8cL37kLCBZUYRdVYnuJ+cjapiGgQe8teXK17kdEws+fM4eq1a7n44ouZPHly2mMUYw7laDNAOdpxhpTypNR3fFpsYppsQl48MZU2IQ+eKi0eiUQIhULJqbShUCi5hMPh5PtgMIi/r4/e3l56enrp7e2JRagDETpS05GaDanbQXfEFHETmBGEEUHaHKA7+h1qeYoIT18FZhR7+yGcrfsRcRnyimnTmD9vHjNmzKC0tJTi4mIKCwvJz88nLy8Pp9Opot+xgfoQMkDlaIFHH32UX/7yl7k2I8eIeOQpYvLfaCAEUggQGuguhBVCDJD+RpoI00SYEUijMu5yuVh3bbwKV6Cj/6GBdrSUbabuQHgmI4wwxxtbaWg4AfKMFZ5zwiWXXMK9996rHvQp+qG+DUAwGMy1CTkm7mTjS9LJEne8yeX0SVThuuaaazI0RaQ4+PEXLAUCgVyboBiDZFPKZi7wdMqmmcC9UsoHhzpGpQ6GJpEGiEaj/dIGifVEiiDT1EHqkpAXT00dBAJB/H1+env9+P29hM+wgtZwVbgMXynBeZ8Ay8TWUYuzbR9abwsARUXFzJ8/j6qqKsrKygalDjwej1JcGBuoDyADRiVHK4TQgQbgPCll3VDtlKMdu4TDYb73ve9x+PDhpKOPRqMx5xwOEwmHk22lZkPaXLEFa+gcrasAy5mPu+VDZCTAtMpKrl67losuuojKykrlRMcH6kPKgNHK0a4BDg/nZBVjG6fTybe+NbTkWzgc5sCBA2zfvp133n2XfXv3QsSP9BQRKZ6F6ZuKdHgBiRbsxtZ9DGfHEaQZZdnyj/H5z9/Aueeeq5yrYkIyWhHtY8D7UsofptmXKje+oq5O+eKJQGtrK5s2beLNjRv58IMPBu232ewsXryIz3/+86xcuTIHFipGCPXLmAFZj2jjemHXAv+Qbr+U8lHgUYilDrJtj2J0SAof6jqaEFjxH3SXLSYlEzGibN++ne3bt1NeVsqSpctYsmQJCxcupLKyUj21V0woRiN1cDWxaLZ5FPpS5IjUmWFvv/0WO3fuwrIsyr2Sv6gKsqgoygyfgTM+xDZoCOr9Okd6bBzoquetDc28+uqrAHjcLubMncusWTVUV1czbdo0ysvLKSoq+kjKDwpFrsh66kAI8SvgNSnlz07VVj0MG3sYhkFffOJCb28v3d3ddHZ20tXVRUdHR2x2WEMDdXV1ROL1DMrzJCsmB2kL6XSGB0emQUMQMAQem8Rti33/JBAxBUFDEDJjS9gUpH47NSEoKPBRWFCIr6AQn8+H1+tNLj6fLzkiISEv7nA4cDqdyVe3243b7VZqDiOHSh1kQFbDAyGEB7gC+Ots9jPWeOutt3jggQfGxPjcxA/pcA+ZTNNESovT/c0VgE2TODRJni4p9sQUZ+0aHOy2U5eQFx9APylx/6mHjTk1ySSnRdQCI9BFa18XTcfBQmBKgRVXtj0dNCHQ9IRtJ+9N4j4lhsel3reh7uGp7nEmn8FIsmrVKr7+9a9TWFg4Kv0pTk225cYDwFmnW/LMM8/Q09OTazOygi76S4nbtZhjdWgSLUM/crpS4pqQFDpjM8RMSVI63Iw72Ab+P1UAABgfSURBVMT7qCViztg6KSs+VMBlSYllpJlSPAHYsmUL69at47zzzsu1KYo4KuGVBR588EEMw+gXESXep4tu0qVvUtumtkndlipXndouIRmeTjY8saSOhR04CSJ1UkMgEEimDnp6uunq7KS7u5uu7h6syMnpsQIo80rm5odZUhxhUVGU7+7MZ1/X4Ij2pZdeQkrJyy+/nNH9FEB7xE5HKOZkh8Jht+P1enB7PLjd7ri0eCx9kEgdeOL7PB5Pv/cJCfKEDHlCgjyhvpuqsjvw3qeq8Cba6Lo+6JiBEfJwabt035vUbenaJvYlpM4VYwdVVEZxRliWRXd3N62trZw4cYLa2lr27dvH7l07CQRDOHWY5DDQNciz9Y92B+Zow/HcbMAQBE1B1DrZWNc1CgoK+djHPkZJSQnFxcUUFBSQn5/fLz/r9XpxOp05uBNnPSpHmwHK0SpGFMMw2LlzJ5s2bWLjmxvo6fXjsgnmFISp9hkUOCwsKeiOCOr9Nmr9Dnrik8oKC/JZuGgxCxYsYM6cOcyaNYuioqLcXpDiVChHmwHK0SqyhmEYbN++nS1btrBr5w6O1tUn92maRtX0SubMnceSJUtYsmQJ06ZNUzPDxh/qA8sA5WgVo4ZhGHR3d6PrOj6fTw2xmhgoR5sBKmOuGDVsNhvFxWfdIBSFQtWjVSgUimyjIlrFqNHX18fRo0dxu92Ul5fjcrlybZJCMSooR6vIKqZpsmXLFp577nk++GB3cgyo3eHg/FWr+PSnP82yZcvUQzDFhEY5WkVWCAaDvPHGG/zyqac40dAArnzCZUsxPZMR0iDS28yWd//E5s2bmVVTw/XXXcfq1avxeDy5Nl2hGHHUqAPFiBCNRqmvr2fv3r1s27aNP/5xK6FQEJlXTKh0CcakqpjIYyqWgb39MM6WPYhAJ3aHgxUfW8E556xg4cKFzJw5U01CGPuof0UyQDlaxSCklEQiEYLBYHJJaIsFAgF6e3vp7OyktbWVpqYmjh0/TmNjY1KOXDg8GJoDhIbUHSf/FIeSHpcgzAjCCPH/t3fuwXHd1R3/nH1Iu5IlW7KlWJKd+IFjlfg5BecBdYFkMJBC0knahAkNDYUOj0JLJzNAPZSEls6QaccNfZAmMJiWZ0gCOOAkPGKDGVoDCbbjkNiSLduys7Ielmw9V7t7T/+4d+WVsiut5L3alXw+M3fu1b2/373n/u7q7G/P/f3ON+AkITUKuGNtm5YtY4WnG1ZXV0dNTQ1VVVVjGbqi0Sjl5eVj2zb1dNYxR5sH9qksMHv27OH+++8vthmzhKCBIBoIQjCChkMQDKMSIjjcg6TG649PJj2eRoNhnMo6SCVwnAQnz5yl/fQZcOZnAphsbN26le3bt1tvfh5hw7sKzNGjR4ttwqyggSBOsMztsWauJZSzj5Ov9LjruL1zhsouXmdi6GGecuDAgZJIsWkUDl9DByLyceD9uHmdXwDuUdWcCUgtdDB7OI5DIpEYl8krHS5IZ+3KDBukQwfpTF69vb10dnbRcbZjnBS5lFcyWlFPYPgcwZHz4645mfT4mF2hKIFQGEYupplcUFVFQ0MD9RNCB9Fo9FXhg3RIIZ2xK509K53VDNyQRDAYJBgMjsvSZSMfZoQ1Wh745mhFpAn4BfBaVR0WkUeB3aq6M1cdc7RzD1Wlt7eXkydPcvz4ce9l2HP09fUCghOOomWVbi81S4xWUkkkMUggMQIooVCI6667jnXr1rF27VpWrlxpCaxLG3O0eeB3jDYEREUkAVQAr/h8PWOWERFqa2upra1l8+bNgNtbPnz4MLt27eLZZ/fgDI+QqFlJYskmUpVLECdFsL+DcHcrwf4OQqEwN257K9u2bWPDhg32QsuYd/gdOvhr4HPAMPAjVb0rSxmTG5/HxGIxHn30UXY/9dS4EAPAwkWL+ONbb+WWW26hpqamSBYal4j1aPPAt66DiNQAtwArgT7gOyLyHlX9WmY5kxufv8TjcS5cuEBzczMVFRUcP36cc+fOEY1GaWpqYsuWLaxdu9ZCA8a8x8/faDcBbaraBSAiTwA3AF+btJYx5xgaGiIWi9He3s6pU6c4ceIEx1pbaG8/jZPxi6myTAgHIJ5y36ynpWwW19awYeMm1q9fzzXXXMPq1astfGDMK/z8NJ8CrvOUcIeBGwF70zUHyJywMDAwwMDAwJjMeE9PD93d3XR2dtJ59uw4mfE0rlijQ02ZJ+QYVFKOK1NTEVKiEcVRiHuy4kP9Pezbu4c9e/a49UNBrrzySpZf6U5UqK+vp7a2dmzEQVpmvLy83EYKGHMC3xytqu4XkceA54Ek8Fu8EEGp8fzzz7Nz505SqUsbFD/VP302gcXp1E+XmSquPplUdiwWGxtalSniqOqgjuJkDIPKRUAgJEo4oKRSwsQwXcKBhBNgMHlxXz4S45GgQ225w0gqRcepY5w60UZiEjFGgGAgQCAYJBgIIBlCiOlrNjQ0jGuXdFtMdY+5BBWznStbvVzlc11r69at3H777QQCl8dY4csNv+XGPwN8xs9rFIIdO3bQ3t5ebDNKlpAoZUEl4vVOywKuzHgwAKf6gwynpv6CyEdiXIDqMqUaRYGkk2LU6/XGvWXUAc1w7CnHIeU4JLKcb2hoiHPnss9AKzUOHz7M1q1bWbp0abFNMXzAAmHAF7/4Rdra2oDsMs+Z+3OR7+iNidLQaanqzF5otmtnk5jO7CGne6aZEuOZy8SJCaOjo8Tj8bElc78rNz7M8NAQg4MDDPQP0D84xFBynAkEA/lPLcxHYnxhmVIfTXFyoIwzgwGSE3qz4XCI2kULqK6qprKqigULqqisrBybvJCetJCerJCejJBe0hMVMteZ0uDTeRZTPZ9sZOslp+vV1dWZk53HWFIZIy/Sel89PT10dXXR1dVFZ2cn+/bto6enh3g8Pi70IjAWny0Puo4r4QjRkNsjTjoQd4SRpDCcCpDyPobVVQu4em0zq1evZvny5TQ2No7JjEejUYvJlh72QPLAHK1RMAYHB2lvb6etrY1jx47R2tLCsWOt9A8M5qyzrKmRdetdFdz169ebEu7cwx5WHljowCgYlZWVNDc309zcPLYvPUU3Fotx/vx5RkdHiUajLFq0iOXLl1uib+OywByt4SuZU3QN43LFxpIYhmH4jDlawzAMnzFHa8wafX19dHZ2Mjo6WmxTDGNWsRit4Ss9PT08+eSTPP3MM3TEYoArNX7dtddy2223sXHjRhtlYMx7bHiX4QunT5/mscce44c//KGr5FDdRLK6EUJlBIbOUd57Ak0Ms279et53zz1s3rzZHO7cxB5aHpijNQpCIpGgra2NgwcP8vN9+3jh0CEIBBitfQ2jDevRyMLxFZwk4a6jRDsOoaNDrF79Gt7+9rdxww030NjYWJybMGaCOdo8MEdrAK4qQjKZHJuqm56um6kVll7S2bz6+vro6urilVgHHR0X5cYJhHBCETQcdSVsYBKpcQUJEHRGkcEeAGoXL6F57dVcddVVNDU1UVdXR21tLVVV7pTb8vJywuGw9YBLA3sIeWCOdobE43EefPBBdu/eXWxTiocEUAm4zjQQQhIjiGbPgDYue9cEpQUNhklVLEacJJKMQypBwEmCk8x6rlKjoaGB++67j7Vr1xbblGJgjjYPbNTBDOnp6bnsnKzrVMM4oQhOuBKnzF20rBINV8IkKf7ykRrXQAgnXIGWVeKEK9ztQOm/r43FYsyVDoJRHHz9FHuaYR/A/dZ7RFX/1c/rzSaNjY3s3bvXl3O7+WF1XHauiccys3Slf/Knl8xMXo7jpsBKpVJj4YFcmbtSqdQ4CfJ4PD5Oery/f4AL/f0MDAwwONBPKjGh9yq5He1k2btS0VoSS64m3NtGuL8DTblJD0PhMFc0NrD0Cjfxd3V19Tg58fLy8rElEomMbZeVlY1l7AoGg+MyeKX/DgQCY9m7LARh+I2fcuPrgG8BW4BR4GngQ6rakqvOXAodXO6oKgMDA3R3d9PR0UF7eztPPvkksViMZNL9ye+EImjZAjQYyh6jVYfA6BCB5DA4KZbU1fHGN7yBjRs3smbNGhoaGggGg0W8SyMP7FsqD/x0tH8CbFPV93t/fxqIq+oDueqYo537qCpHjx7lqaeeYvfupxgdjZNcdCWj9c2kqpa6sdyRC4R7Wol0vYQm4lx73XXceccdbNq0yXqXcw97YHngp6P9PeD7wPW4mmE/BX6jqh+dUM7kxucp58+f54knnuCJ736X/gsXIBBAAkE06YYGbrjhBu6+++5x2b6MOYc52jzwddSBiPwF8BFgAPgdMKyqH89V3nq08wvHcThz5gxHjhxh//79tLe3k0wmWbhwIZs2bWLTpk2sWbOGSCRSbFONmWOONg/81gz7MvBlABH5J+C0n9czikc8Huf06dOcOHGClpYWjhw5wpGXX2Jo+OJQrvKQEBJoSyrPPfccAMFggDVr1rBhw0bWrVtHc3MzdXV1FkIw5hV+92jrVbVTRK4EfgRcr6q9ucpbj7Y0icfjnD9/nnPnztHd3U13dzddXV2cPXuWsx0dxGJn6O4Z/1gjnphjeikLKvGUMJR05WzCAWUk5UrZDCUDjKSE9CdxYfUCrrpqJcuWL6e+vp4lS5awaNEiqqurx6TGKysrTdqmNLAHkAd+D1J8XEQWAwngI5M52blGS0vLtMfRZhPzmyjsN5W0db6CjVNx8OBB+vr6xoaRZQ4b07TIozeELNc5wwEIieOthaRetH3EU6/NZLJJCxfPqTBygZbfHeLFwy+MaYnlIhAQgoEggWDAWwfHhBdramrYsGHDlO2TTTAzG7mew2THsj0jgBUrVnDzzTcTCpX+OGHj0vE7dPAHfp6/mGzfvp3Ozs5imzFrhMUVWYyElKi3Dmb4o5P9QZJTyI7nIzkeEqWhwh2f60qOc1Fq3JMbTzhCUgEEx1EcJwlZJpH19vZy/Pjxmd3wLFBfX8/1119fbDOMWcC+TmfIjh072L9/P/Bq+ehcvdJcctP59KDyqTfVuSbmMpi4ZMqQj4wMMzQ4yMBAPxcu9DMSHyWRFAYyHFo0JCyOOCwuTxAJKsPZZ9+OkY/keEVIqQwpseEwncPyKsnxNOFQkIpolGhFhSs5XllJRYUrPZ5eIpHI2ESGwCSz1rJJi+eSGJ/YztN9Run9S5cu5dprr528wYx5g+U6MPJiZGRkXIy2s7OTrq4uOjo66Ii9womTp8bJjYcDEAk642K0iZQwlBIiQSUUgJGkG14YSgqjzkWHtHxZIytWrqapqWlcjHbhwoXjEssYJYHFaPPAerRGXkQiERobG3OmMFRVuru7aWtro7W1lSNHjvDySy9ytrN70vNWRCNs3rxhbNTB1VdfTTQa9eMWDKNomKM1CoKIUFdXR11dHVu2bBnb39fXR1tbG2fOnKG3txfHcYhEIlxxxRWsWrWKZcuWTfqz3jDmAxY6MAzjUrDQQR5YV8IwDMNnSqpHKyJdQDrZwRJg8gBfcSll+8y2mVPK9pWibd2q+rZiG1HqlJSjzUREfqOqryu2HbkoZfvMtplTyvaVsm3G5FjowDAMw2fM0RqGYfhMKTvah4ttwBSUsn1m28wpZftK2TZjEko2RmsYhjFfKOUerWEYxrzAHK1hGIbPFNXRikitiPxYRFq8dU2Ock+LSJ+I/GDC/p0i0iYiB7xlUwnZtlJE9nv1vy0iZYWybZr2vdcr0yIi783Yv1dEjmS0XX0BbHqbd85WEflkluPlXlu0em2zIuPYp7z9R0Rk26XaUijbRGSFiAxntNNDhbYtT/u2isjzIpIUkdsnHMv6jI0SIp34uRgL8ADwSW/7k8Dnc5S7EXgn8IMJ+3cCt5eobY8Cd3rbD+FKrc+qfUAtcNxb13jbNd6xvcDrCmhPEDgGrALKgIPAayeU+TDwkLd9J/Btb/u1XvlyYKV3nmCJ2LYCOOzHZ2ya9q0ANgD/nfmZn+wZ21I6S7FDB7cAX/W2vwrcmq2Qqv4U6J8tozxmbJu4SUjfAjw2VX2f7dsG/FhVz6mrbvFjwK9ZPFuAVlU9rqqjwLc8G3PZ/Bhwo9dWtwDfUtW4qrYBrd75SsG22WBK+1T1hKoeAiZm6J3NZ2zMkGI72itUNQbgrWfy8/VzInJIRHaISCGTlF6KbYuBPlVNp8k+DTQV0LZ87WsC2jP+nmjHV7yfw58ugFOZ6lrjynhtcx63rfKpWyzbAFaKyG9F5Gci4odqyKXcv99tZxQA39MkishPgKVZDm0vwOk/BXTg/tx6GPgE8NkSsC2b05r2OLoC2DeZHXep6hkRqQIeB/4M92fpTMnnnnOVKUh7TcKl2BYDrlTVHhH5feB7InKNql6YZfv8qGvMEr47WlW9KdcxETkrIg2qGhORBmBaIlzpHh0QF5GvAPeWiG3dwCIRCXm9o2XAK9OxrUD2nQbelPH3MtzYLKp6xlv3i8g3cH++XoqjPQ0sn3CtifecLnNaRELAQuBcnnUvhRnbpqoKxAFU9TkROQZcDRQyn+el3H/OZ2yUDsUOHewC0m9J3wt8fzqVPQeTjoneChwuBdu8f849QPrt8LTvLQ/yse8Z4K0iUuONSngr8IyIhERkCYCIhIE/4tLb7tfAGm+0RRnuC6Vdk9h8O/Cs11a7gDu9N/8rgTXAry7RnoLYJiJ1IhIEEJFVnm2FVnzMx75cZH3GBbbPuFSK+SYONwb2U6DFW9d6+18HfCmj3D6gCxjG/Qbf5u1/FngB10l8DVhQQratwnUWrcB3gPIitd37PBtagXu8fZXAc8Ah4EXgQQrwlh94B3AU9w36dm/fZ4F3edsRry1avbZZlVF3u1fvCPB2Hz5rM7INuM1ro4PA88A7ffpfmMq+13ufr0GgB3hxsmdsS2ktNgXXMAzDZ4odOjAMw5j3mKM1DMPwGXO0hmEYPmOO1jAMw2fM0RqGYfiMOVrDMAyfMUd7mSAiHxSRuwt4vo+JyEsi8vUZ1P0bEakolC05rrFzYjpBwygWNo7WmBEi8jLuxIK2GdQ9gZuisXsadYKqmppG+Z24qSsfm6qsYfiN9WjnMF7WrZfFTfz9TRG5V0Q+ICK/FpGDIvJ4uucoIveJyL3e9l4R+byI/EpEjqYzUonINd6+A15GtDU5rvsQ7sy3XSLycRHZIiK/9DJc/VJE1nrlgiLyzyLygne+j4rIx4BGYI+I7PHKvdsrc1hEPp9xnQER+ayI7Aeuz2HL33v3e1hEHs6WhUxE3uG10y9E5AviJWkXN3n69zzb/k9ENsz4YRjGZBR7apotM1twp9oeAKJAFe5U3HuBxRll/hH4qLd9H3Cvt70X+Bdv+x3AT7ztf8PN6gVuRrToJNc/ASzxtquBkLd9E/C4t/0h3Mxg6WO1Weo2AqeAOtwkR88Ct3rHFPjTKdqhNmP7f/CmyOIlhcedWtsOrPT2fxMvSbt3v5/xtt8CHCj2c7Vlfi7Wo527vBH4vqoOq2o/8KS3f52I7BORF4C7gGty1H/CWz+Hm70f4H+BvxORTwBXqepwnrYsBL4jIoeBHRnXvAlXtSAJoKrnstR9PbBXVbu8cl8HtnrHUriOejLeLK70zAu4znLi/TYDx/ViiOObGcfeiOucUdVngcUisnCK6xnGtDFHO3fJlah7J/BXqroeuB+3R5eNuLdO4aXLVNVvAO/CTZDzjIi8JU9b/gHYo6rrcGV90tcUps6NOlnC8RGdJC4rIhHgP3GlXdYDj/Dq+53s/JbL1ZgVzNHOXX4BvFNEIiKyALjZ218FxLz0h3dN54ReGsDjqvoF3DR9+cYsFwJnvO0/z9j/I+CDXn5XRKTW29/v2QmwH/hDEVnipSN8N/CzPK+bdqrdXhtkG2XwMrBKLgpB3pFx7Od4bSQibwK6tbAJvQ0DmIXE34Y/qOqvRWQXbvq+k7iJqM8Dn8Z1XidxU0hW5TzJq7kDeI+IJHCVK/JVq3gA+KqI/C1ujDXNl3CTZB/yzvkI8O+4ahhPiUhMVd8sIp/Czd8rwG5VzSt3r6r2icgjuPd5Ajev68QywyLyYeBpEelmfJ7b+3DlfA4BQ1zMR2sYBcWGd81hRGSBqg54Iwt+Dvylqj5fbLtKjYx2EuA/gBZV3VFsu4zLBwsdzG0eFpEDuAmpHzcnm5MPeO30Im6Y47+KbI9xmWE9WiMnIpJWcZjIjaraM8u2fBdYOWH3J1TVZFuMksccrWEYhs9Y6MAwDMNnzNEahmH4jDlawzAMnzFHaxiG4TP/D8MR4gTCnHGOAAAAAElFTkSuQmCC\n", 207 | "text/plain": [ 208 | "
" 209 | ] 210 | }, 211 | "metadata": {}, 212 | "output_type": "display_data" 213 | } 214 | ], 215 | "source": [ 216 | "# Plot the posterior of the influence on the local non-annualized SR\n", 217 | "sns.factorplot(x='gains_factor_algo', y='algo', hue='gains_factor',\n", 218 | " data=fit.trace.gains_factor_algo.to_dataframe().reset_index(),\n", 219 | " kind='violin', orient='h')" 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": 28, 225 | "metadata": {}, 226 | "outputs": [ 227 | { 228 | "data": { 229 | "text/plain": [ 230 | "algo\n", 231 | "0 0.597\n", 232 | "1 0.627\n", 233 | "2 0.773\n", 234 | "3 0.607\n", 235 | "4 0.538\n", 236 | "5 0.567\n", 237 | "6 0.227\n", 238 | "7 0.276\n", 239 | "8 0.538\n", 240 | "9 0.479\n", 241 | "dtype: float64" 242 | ] 243 | }, 244 | "execution_count": 28, 245 | "metadata": {}, 246 | "output_type": "execute_result" 247 | } 248 | ], 249 | "source": [ 250 | "# What is the probability that predictor 1 is better than predictor 2 for a particular algo?\n", 251 | "vix = fit.trace.gains_factor_algo.sel(gains_factor='VIX')\n", 252 | "hmm = fit.trace.gains_factor_algo.sel(gains_factor='HMM')\n", 253 | "(vix > hmm).mean(dim=('sample', 'chain')).to_pandas()" 254 | ] 255 | }, 256 | { 257 | "cell_type": "code", 258 | "execution_count": null, 259 | "metadata": {}, 260 | "outputs": [], 261 | "source": [] 262 | } 263 | ], 264 | "metadata": { 265 | "kernelspec": { 266 | "display_name": "Python [conda env:anaconda3]", 267 | "language": "python", 268 | "name": "conda-env-anaconda3-py" 269 | }, 270 | "language_info": { 271 | "codemirror_mode": { 272 | "name": "ipython", 273 | "version": 3 274 | }, 275 | "file_extension": ".py", 276 | "mimetype": "text/x-python", 277 | "name": "python", 278 | "nbconvert_exporter": "python", 279 | "pygments_lexer": "ipython3", 280 | "version": "3.6.5" 281 | } 282 | }, 283 | "nbformat": 4, 284 | "nbformat_minor": 2 285 | } 286 | -------------------------------------------------------------------------------- /jenkins.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python2.7 5 | export WORKON_HOME=/mnt/jenkins_backups/virtual_envs 6 | if [ ! -d $WORKON_HOME ]; then 7 | mkdir $WORKON_HOME 8 | fi 9 | source /usr/local/bin/virtualenvwrapper.sh 10 | 11 | # Create virtualenv and install necessary packages 12 | if ! workon bayesalpha; then 13 | mkvirtualenv bayesalpha 14 | fi 15 | 16 | pip install setuptools==36.6.0 17 | pip install tox==2.9.1 18 | 19 | # Args after the bare '--' are forwarded to py.test. 20 | tox --recreate -- --junitxml="$(pwd)/pytest.xml" 21 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = bayesalpha/_version.py 5 | versionfile_build = bayesalpha/_version.py 6 | tag_prefix = 7 | parentdir_prefix = bayesalpha- 8 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from os.path import realpath, dirname, join 3 | from setuptools import setup, find_packages 4 | import versioneer 5 | 6 | DISTNAME = 'bayesalpha' 7 | AUTHOR = 'Adrian Seyboldt, George Ho, Thomas Wiecki' 8 | AUTHOR_EMAIL = 'opensource@quantopian.com' 9 | 10 | requirements = [ 11 | 'Bottleneck>=1.1', 12 | 'pymc3>=3.4.1', 13 | 'scipy>=0.19.0', 14 | 'xarray>=0.9', 15 | 'sklearn', 16 | 'seaborn', 17 | 'empyrical>=0.5.0', 18 | 'netcdf4', 19 | 'pytest-cov', 20 | 'pytest-timeout', 21 | ] 22 | 23 | 24 | if __name__ == "__main__": 25 | setup( 26 | name=DISTNAME, 27 | version=versioneer.get_version(), 28 | cmdclass=versioneer.get_cmdclass(), 29 | packages=find_packages(), 30 | install_requires=requirements, 31 | test_requires=['pytest'] 32 | ) 33 | -------------------------------------------------------------------------------- /tests/test_cov.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pymc3 as pm 3 | import numpy as np 4 | import pandas as pd 5 | import scipy 6 | import scipy.stats 7 | import bayesalpha 8 | import bayesalpha.dists 9 | import pytest 10 | 11 | 12 | @pytest.fixture( 13 | 'module', 14 | [ 15 | 'diag', 16 | 'dense', 17 | #'time-varying' 18 | ]) 19 | def Sigma_type(request): 20 | return request.param 21 | 22 | 23 | @pytest.fixture('module', [1000]) 24 | def T(request): 25 | return request.param 26 | 27 | 28 | @pytest.fixture('module') 29 | def date_range(T): 30 | return pd.date_range('01-01-2000', periods=T, freq='B') 31 | 32 | 33 | @pytest.fixture 34 | def Sigma(Sigma_type, T): 35 | if Sigma_type == 'diag': 36 | return np.matrix([[0.000246, 0.], [0., 0.000093]], 'float32') 37 | elif Sigma_type == 'dense': 38 | return np.matrix([[0.000246, 0.000048], [0.000048, 0.000093]], 'float32') 39 | else: 40 | raise KeyError(Sigma_type) 41 | 42 | 43 | @pytest.fixture('module') 44 | def algo_gain(): 45 | return np.array([0.0001, 0.0004], 'float32') 46 | 47 | 48 | @pytest.fixture 49 | def observations(date_range, T, 50 | algo_gain, Sigma): 51 | if Sigma.shape[0] == T: 52 | t = () 53 | else: 54 | t = T 55 | r = scipy.stats.multivariate_normal.rvs(algo_gain, Sigma, size=t, random_state=42) 56 | return pd.DataFrame(r, date_range, columns=['1', '2']) 57 | 58 | 59 | @pytest.fixture 60 | def algo_meta(date_range, T): 61 | mid = date_range[T//2] 62 | return pd.DataFrame( 63 | {'created_at': [mid, mid]}, index=['1', '2'] 64 | ) 65 | 66 | 67 | @pytest.fixture 68 | def sharpes(): 69 | location = os.path.realpath(os.path.dirname(__file__)) 70 | return pd.read_csv( 71 | os.path.join(location, 'test_data/author_model_test_sharpes.csv'), 72 | index_col=0 73 | ) 74 | 75 | 76 | @pytest.fixture 77 | def returns(): 78 | location = os.path.realpath(os.path.dirname(__file__)) 79 | return pd.read_csv( 80 | os.path.join(location, 'test_data/author_model_test_returns.csv'), 81 | index_col=0 82 | ) 83 | 84 | 85 | def test_fit_returns_population(observations, algo_meta, Sigma_type): 86 | trace = bayesalpha.fit_returns_population( 87 | observations, algo_meta, sampler_args={'draws': 10, 'tune': 0, 'chains': 1}, 88 | corr_type=Sigma_type 89 | ) 90 | 91 | 92 | def test_fit_returns_population_vi(observations, algo_meta, Sigma_type): 93 | trace = bayesalpha.fit_returns_population( 94 | observations, algo_meta, sampler_type='vi', 95 | sampler_args={'n': 1}, 96 | corr_type=Sigma_type 97 | ) 98 | 99 | 100 | def test_fit_authors(sharpes, returns): 101 | trace = bayesalpha.fit_authors(sharpes, 102 | returns, 103 | sampler_type='mcmc', 104 | sampler_args={ 105 | 'draws': 10, 106 | 'tune': 0, 107 | 'chains': 1 108 | } 109 | ) 110 | 111 | def test_scaled_mv_normal_logp_case1(): 112 | cov = np.array([[0.246, 0.048], [0.048, 0.93]], 'float32') 113 | mean = np.arange(2) 114 | obs = np.random.rand(10, 2) 115 | scale = np.arange(1, 21).reshape(10, 2) 116 | obs1 = obs * scale + mean 117 | with pm.Model() as model1: 118 | for i in range(10): 119 | pm.MvNormal('mv%d' % i, mu=mean, cov=cov * scale[i][None, :] * scale[i][:, None], observed=obs1[i]) 120 | 121 | with pm.Model() as model2: 122 | bayesalpha.dists.ScaledSdMvNormalNonZero('mv', mu=mean, cov=cov, scale_sd=scale, observed=obs1) 123 | logp1 = model1.logp({}) 124 | logp2 = model2.logp({}) 125 | np.testing.assert_allclose(logp1, logp2) 126 | 127 | 128 | def test_scaled_mv_normal_logp_case2(): 129 | cov = np.array([[0.246, 0.048], [0.048, 0.93]], 'float32') 130 | mean = np.arange(2) 131 | obs = np.random.rand(10, 2) 132 | scale = np.arange(1, 3) 133 | obs1 = obs * scale + mean 134 | with pm.Model() as model1: 135 | for i in range(10): 136 | pm.MvNormal('mv%d' % i, mu=mean, cov=cov * scale[None, :] * scale[:, None], observed=obs1[i]) 137 | 138 | with pm.Model() as model2: 139 | bayesalpha.dists.ScaledSdMvNormalNonZero('mv', mu=mean, cov=cov, scale_sd=scale, observed=obs1) 140 | logp1 = model1.logp({}) 141 | logp2 = model2.logp({}) 142 | np.testing.assert_allclose(logp1, logp2) 143 | 144 | 145 | def test_equicorr_mv_normal_logp_case1(): 146 | corrs = [] 147 | corrsm = [] 148 | for t in range(10): 149 | c1, c2 = .5*np.sin(t), .5*np.cos(t) 150 | corrsm.append(np.array( 151 | [[1., c1, .0, 0], 152 | [c1, 1., .0, 0.], 153 | [0., 0., 1, c2], 154 | [0., 0., c2, 1.]], 'float32')) 155 | corrs.append([c1, c2]) 156 | corrs = np.asarray(corrs) 157 | corrsm = np.asarray(corrsm) 158 | scale = np.exp(np.random.randn(10, 4)) 159 | mean = np.arange(4, dtype='float32') 160 | obs = np.random.rand(10, 4) 161 | obs1 = obs * scale + mean[None, :] 162 | with pm.Model() as model1: 163 | for i in range(10): 164 | pm.MvNormal('mv%d' % i, mu=mean, cov=corrsm[i] * scale[i][None, :] * scale[i][:, None], observed=obs1[i]) 165 | 166 | with pm.Model() as model2: 167 | eq = bayesalpha.dists.EQCorrMvNormal('mv', mu=mean, std=scale, corr=corrs, clust=[0, 0, 1, 1], observed=obs1) 168 | logp1 = model1.logp({}) 169 | logp2 = model2.logp({}) 170 | np.testing.assert_allclose(logp1, logp2) 171 | eq.distribution.random() 172 | 173 | 174 | def test_equicorr_mv_normal_logp_case2(): 175 | corrs = [] 176 | corrsm = [] 177 | for t in range(10): 178 | c1, c2 = .5*np.sin(t), .5*np.cos(t) 179 | corrsm.append(np.array( 180 | [[1., c1, .0, 0], 181 | [c1, 1., .0, 0.], 182 | [0., 0., 1, c2], 183 | [0., 0., c2, 1.]], 'float32')) 184 | corrs.append([c1, c2]) 185 | corrs = np.asarray(corrs) 186 | corrsm = np.asarray(corrsm) 187 | scale = np.exp(np.random.randn(10, 4)) 188 | mean = np.arange(4, dtype='float32') 189 | obs = np.random.rand(10, 4) 190 | obs1 = obs * scale + mean[None, :] 191 | with pm.Model() as model1: 192 | for i in range(10): 193 | pm.MvNormal('mv%d' % i, mu=mean, cov=corrsm[i] * scale[i][None, :] * scale[i][:, None], observed=obs1[i]) 194 | 195 | with pm.Model() as model2: 196 | eq = bayesalpha.dists.EQCorrMvNormal('mv', mu=mean, std=scale, 197 | corr=np.asarray([corrs[:, 0], np.zeros_like(corrs[:, 1]), corrs[:, 1]]).T, 198 | clust=[0, 0, 2, 2], observed=obs1) 199 | logp1 = model1.logp({}) 200 | logp2 = model2.logp({}) 201 | np.testing.assert_allclose(logp1, logp2) 202 | eq.distribution.random() 203 | 204 | 205 | def test_equicorr_mv_normal_logp_case3(): 206 | corrs = [] 207 | corrsm = [] 208 | for t in range(10): 209 | c1, c2 = .5*np.sin(t), .5*np.cos(t) 210 | corrsm.append(np.array( 211 | [[1., 0, c1, 0], 212 | [0, 1., .0, c2], 213 | [c1, 0., 1, 0.], 214 | [0., c2, 0, 1.]], 'float32')) 215 | corrs.append([c1, c2]) 216 | corrs = np.asarray(corrs) 217 | corrsm = np.asarray(corrsm) 218 | scale = np.exp(np.random.randn(10, 4)) 219 | mean = np.arange(4, dtype='float32') 220 | obs = np.random.rand(10, 4) 221 | obs1 = obs * scale + mean[None, :] 222 | with pm.Model() as model1: 223 | for i in range(10): 224 | pm.MvNormal('mv%d' % i, mu=mean, cov=corrsm[i] * scale[i][None, :] * scale[i][:, None], observed=obs1[i]) 225 | 226 | with pm.Model() as model2: 227 | eq = bayesalpha.dists.EQCorrMvNormal('mv', mu=mean, std=scale, 228 | corr=np.asarray([corrs[:, 0], np.zeros_like(corrs[:, 1]), corrs[:, 1]]).T, 229 | clust=[0, 2, 0, 2], observed=obs1) 230 | logp1 = model1.logp({}) 231 | logp2 = model2.logp({}) 232 | np.testing.assert_allclose(logp1, logp2) 233 | eq.distribution.random() 234 | 235 | 236 | def test_equicorr_mv_normal_logp_case4(): 237 | c1, c2 = .5*np.sin(1), .5*np.cos(1) 238 | corrsm = np.array( 239 | [[1., 0, c1, 0], 240 | [0, 1., .0, c2], 241 | [c1, 0., 1, 0.], 242 | [0., c2, 0, 1.]], 'float32') 243 | corrs = np.asarray([c1, c2]) 244 | 245 | scale = np.exp(np.random.randn(4)) 246 | mean = np.arange(4, dtype='float32') 247 | obs = np.random.rand(10, 4) 248 | obs1 = obs * scale + mean[None, :] 249 | with pm.Model() as model1: 250 | pm.MvNormal('mv', mu=mean, cov=corrsm * scale[None, :] * scale[:, None], observed=obs1) 251 | 252 | with pm.Model() as model2: 253 | eq = bayesalpha.dists.EQCorrMvNormal('mv', mu=mean, std=scale, 254 | corr=np.asarray([corrs[0], 0, corrs[1]]), 255 | clust=[0, 2, 0, 2], observed=obs1) 256 | logp1 = model1.logp({}) 257 | logp2 = model2.logp({}) 258 | np.testing.assert_allclose(logp1, logp2) 259 | eq.distribution.random() 260 | 261 | -------------------------------------------------------------------------------- /tests/test_data/author_model_test_sharpes.csv: -------------------------------------------------------------------------------- 1 | ,meta_user_id,meta_algorithm_id,meta_code_id,meta_trading_days,sharpe_ratio 2 | 0,aaa,aaa111,aaa111_0,163,-1.164508411282004 3 | 1,aaa,aaa111,aaa111_1,96,0.5931942359425223 4 | 2,aaa,aaa111,aaa111_2,232,-1.1642544425301051 5 | 3,aaa,aaa111,aaa111_3,118,0.27806966576107744 6 | 4,aaa,aaa111,aaa111_4,220,1.0416949628392107 7 | 5,aaa,aaa111,aaa111_5,117,-0.9156594663720532 8 | 6,aaa,aaa111,aaa111_6,110,-0.2878525676690856 9 | 7,aaa,aaa111,aaa111_7,141,0.35392525548002923 10 | 8,aaa,aaa111,aaa111_8,246,-0.8697354164438235 11 | 9,aaa,aaa111,aaa111_9,266,-1.321844498658501 12 | 10,aaa,aaa111,aaa111_10,286,2.3222049436143597 13 | 11,aaa,aaa111,aaa111_11,160,0.8910964542717676 14 | 12,aaa,aaa111,aaa111_12,205,-1.464612943473317 15 | 13,aaa,aaa111,aaa111_13,114,-1.196825294288831 16 | 14,aaa,aaa111,aaa111_14,201,0.6455401855029637 17 | 15,aaa,aaa111,aaa111_15,267,-0.07410571766882255 18 | 16,aaa,aaa111,aaa111_16,85,-0.8356842231833618 19 | 17,aaa,aaa111,aaa111_17,183,0.6555706418785069 20 | 18,aaa,aaa111,aaa111_18,153,0.6182262352977762 21 | 19,aaa,aaa111,aaa111_19,155,0.3968978964000288 22 | 20,aaa,aaa111,aaa111_20,218,-1.244556811928257 23 | 21,aaa,aaa111,aaa111_21,138,0.2860652921181968 24 | 22,aaa,aaa111,aaa111_22,194,0.11912373554805646 25 | 23,aaa,aaa111,aaa111_23,245,-0.5802383885112073 26 | 24,aaa,aaa111,aaa111_24,147,1.025959908675357 27 | 25,aaa,aaa111,aaa111_25,100,0.3943745463149358 28 | 26,aaa,aaa111,aaa111_26,280,0.7494958915271149 29 | 27,aaa,aaa111,aaa111_27,292,0.19096909107209845 30 | 28,aaa,aaa111,aaa111_28,261,0.8790084828599252 31 | 29,aaa,aaa111,aaa111_29,107,0.026885166171520883 32 | 30,aaa,aaa111,aaa111_30,114,-1.3979369928780958 33 | 31,aaa,aaa111,aaa111_31,172,0.16285511082294066 34 | 32,aaa,aaa111,aaa111_32,260,0.8803113544019813 35 | 33,aaa,aaa111,aaa111_33,155,0.410042657248656 36 | 34,aaa,aaa222,aaa222_34,251,-0.05582463137270477 37 | 35,aaa,aaa222,aaa222_35,210,1.6494743649862105 38 | 36,aaa,aaa222,aaa222_36,114,-0.05765220961726554 39 | 37,aaa,aaa222,aaa222_37,263,-0.3793622974850309 40 | 38,aaa,aaa222,aaa222_38,150,-0.3670828774769657 41 | 39,aaa,aaa222,aaa222_39,93,0.5247800832597984 42 | 40,aaa,aaa222,aaa222_40,165,0.4213243538896384 43 | 41,aaa,aaa222,aaa222_41,234,1.8121219323553681 44 | 42,aaa,aaa222,aaa222_42,289,0.1879606287677472 45 | 43,aaa,aaa222,aaa222_43,271,0.07193195721818776 46 | 44,aaa,aaa222,aaa222_44,253,0.4422119630730312 47 | 45,aaa,aaa222,aaa222_45,232,-1.4062107581449423 48 | 46,aaa,aaa222,aaa222_46,163,-0.9028928200013678 49 | 47,aaa,aaa222,aaa222_47,85,-0.10871047739465584 50 | 48,aaa,aaa222,aaa222_48,276,0.8683047422719509 51 | 49,aaa,aaa222,aaa222_49,171,1.807803083993704 52 | 50,aaa,aaa222,aaa222_50,189,-0.17868056952779396 53 | 51,aaa,aaa222,aaa222_51,243,2.9088925573163364 54 | 52,aaa,aaa222,aaa222_52,254,-0.7678165626923038 55 | 53,aaa,aaa222,aaa222_53,233,-1.2711568828387447 56 | 54,aaa,aaa333,aaa333_54,252,-1.2248244249087212 57 | 55,aaa,aaa333,aaa333_55,218,0.6445020510736111 58 | 56,aaa,aaa333,aaa333_56,93,-1.069714355506049 59 | 57,aaa,aaa333,aaa333_57,101,0.9661974837243014 60 | 58,aaa,aaa333,aaa333_58,287,0.5432677819983897 61 | 59,aaa,aaa333,aaa333_59,295,-0.5982847470737492 62 | 60,aaa,aaa333,aaa333_60,166,0.5079192803459703 63 | 61,aaa,aaa333,aaa333_61,150,0.518064855945171 64 | 62,aaa,aaa333,aaa333_62,203,1.5006041385255686 65 | 63,aaa,aaa333,aaa333_63,201,0.16115006238112847 66 | 64,aaa,aaa333,aaa333_64,146,0.07402753374202907 67 | 65,aaa,aaa333,aaa333_65,150,-0.8861620663815991 68 | 66,aaa,aaa444,aaa444_66,258,-1.3713192712682305 69 | 67,aaa,aaa444,aaa444_67,259,0.3880560019869987 70 | 68,aaa,aaa444,aaa444_68,183,0.5329520992575173 71 | 69,aaa,aaa444,aaa444_69,117,-1.8225939982341608 72 | 70,aaa,aaa444,aaa444_70,177,-1.695695250709946 73 | 71,aaa,aaa444,aaa444_71,286,-1.100390056181871 74 | 72,aaa,aaa444,aaa444_72,123,0.05364638125124956 75 | 73,aaa,aaa444,aaa444_73,200,-1.8559073661697891 76 | 74,aaa,aaa444,aaa444_74,146,0.9582455471647444 77 | 75,aaa,aaa444,aaa444_75,92,0.3735556436211373 78 | 76,aaa,aaa444,aaa444_76,273,-1.1357239600477995 79 | 77,aaa,aaa444,aaa444_77,239,-1.852858385758416 80 | 78,aaa,aaa444,aaa444_78,147,0.11271631062483055 81 | 79,aaa,aaa444,aaa444_79,215,-0.5580209895351321 82 | 80,aaa,aaa444,aaa444_80,199,-1.3829001835104262 83 | 81,aaa,aaa444,aaa444_81,277,-0.116214500801614 84 | 82,aaa,aaa444,aaa444_82,200,-1.0273330729755337 85 | 83,aaa,aaa444,aaa444_83,229,-0.027312648525198224 86 | 84,aaa,aaa444,aaa444_84,94,0.6253841183569171 87 | 85,aaa,aaa444,aaa444_85,165,-0.7533602072048058 88 | 86,aaa,aaa444,aaa444_86,122,0.5675873804244047 89 | 87,aaa,aaa444,aaa444_87,170,-0.16262236493862806 90 | 88,aaa,aaa444,aaa444_88,86,-1.2470568134648368 91 | 89,aaa,aaa444,aaa444_89,124,0.3496174750937113 92 | 90,aaa,aaa444,aaa444_90,200,-1.511447158951443 93 | 91,aaa,aaa444,aaa444_91,119,-0.7883548044268631 94 | 92,aaa,aaa444,aaa444_92,162,-0.3797451315709731 95 | 93,aaa,aaa444,aaa444_93,151,0.5550200225943599 96 | 94,aaa,aaa444,aaa444_94,134,-0.6922079744342597 97 | 95,aaa,aaa444,aaa444_95,174,1.7423885344221308 98 | 96,aaa,aaa444,aaa444_96,185,1.1167511643418333 99 | 97,aaa,aaa444,aaa444_97,87,-1.4240097602991042 100 | 98,aaa,aaa444,aaa444_98,261,-0.6393807747837205 101 | 99,aaa,aaa444,aaa444_99,298,0.17958971165191506 102 | 100,aaa,aaa444,aaa444_100,249,-0.2871731537237504 103 | 101,aaa,aaa444,aaa444_101,160,-0.01779247659418805 104 | 102,aaa,aaa444,aaa444_102,255,-0.9223564525710664 105 | 103,aaa,aaa444,aaa444_103,122,0.099009625087935 106 | 104,aaa,aaa444,aaa444_104,142,-1.0483090107390909 107 | 105,aaa,aaa444,aaa444_105,144,1.4046947321702945 108 | 106,aaa,aaa444,aaa444_106,214,0.4054800409449211 109 | 107,aaa,aaa444,aaa444_107,232,-0.5899850848592633 110 | 108,aaa,aaa444,aaa444_108,80,0.5372472033145146 111 | 109,aaa,aaa444,aaa444_109,250,-0.7255983269419944 112 | 110,aaa,aaa444,aaa444_110,129,-0.2833422489433768 113 | 111,aaa,aaa444,aaa444_111,85,-0.18574166593200314 114 | 112,aaa,aaa444,aaa444_112,123,0.1747591656780057 115 | 113,aaa,aaa444,aaa444_113,276,1.5600648527646608 116 | 114,aaa,aaa444,aaa444_114,184,-1.347822066282374 117 | 115,aaa,aaa444,aaa444_115,160,1.1750578064254122 118 | 116,aaa,aaa444,aaa444_116,138,-1.5974007948677649 119 | 117,aaa,aaa444,aaa444_117,280,-0.8685913250772295 120 | 118,aaa,aaa444,aaa444_118,184,-0.2679888854667482 121 | 119,aaa,aaa444,aaa444_119,175,0.6801338658120393 122 | 120,aaa,aaa444,aaa444_120,262,0.20402977338618172 123 | 121,aaa,aaa444,aaa444_121,93,-1.0293746207724717 124 | 122,aaa,aaa444,aaa444_122,179,1.4121229150283798 125 | 123,bbb,bbb111,bbb111_123,129,0.8467727970237655 126 | 124,bbb,bbb111,bbb111_124,92,-0.3845443736864208 127 | 125,bbb,bbb111,bbb111_125,297,0.20155489821500616 128 | 126,bbb,bbb111,bbb111_126,228,1.1782168601689702 129 | 127,bbb,bbb111,bbb111_127,188,-2.618849334189852 130 | 128,bbb,bbb111,bbb111_128,277,-0.04141957301762777 131 | 129,bbb,bbb111,bbb111_129,290,0.663593561564681 132 | 130,bbb,bbb111,bbb111_130,88,1.4021361754507575 133 | 131,bbb,bbb111,bbb111_131,183,1.3097211364060184 134 | 132,bbb,bbb111,bbb111_132,107,1.4346725676793313 135 | 133,bbb,bbb222,bbb222_133,258,-0.8322222066238559 136 | 134,bbb,bbb222,bbb222_134,218,1.3167687640005952 137 | 135,bbb,bbb222,bbb222_135,246,0.7577957747151605 138 | 136,bbb,bbb222,bbb222_136,188,0.4984563626125405 139 | 137,bbb,bbb222,bbb222_137,134,1.3218617005324735 140 | 138,bbb,bbb222,bbb222_138,215,-0.9111520387087026 141 | 139,bbb,bbb222,bbb222_139,94,1.036893095484406 142 | 140,bbb,bbb222,bbb222_140,204,-1.3731137769348016 143 | 141,bbb,bbb222,bbb222_141,216,-0.8618087245034214 144 | 142,bbb,bbb222,bbb222_142,212,-1.5627338391227894 145 | 143,bbb,bbb222,bbb222_143,282,1.3205967668773904 146 | 144,bbb,bbb222,bbb222_144,123,-0.5184685114144861 147 | 145,bbb,bbb333,bbb333_145,218,0.11874007981736705 148 | 146,bbb,bbb333,bbb333_146,237,-0.338254561040838 149 | 147,bbb,bbb333,bbb333_147,126,1.583834307614996 150 | 148,bbb,bbb333,bbb333_148,107,-0.5835809009959458 151 | 149,bbb,bbb333,bbb333_149,278,1.3610740328852151 152 | 150,bbb,bbb333,bbb333_150,205,-0.14140836664654532 153 | 151,bbb,bbb333,bbb333_151,86,-0.4127014771270915 154 | 152,bbb,bbb333,bbb333_152,293,0.2260884502401976 155 | 153,bbb,bbb333,bbb333_153,234,-1.2045071284040934 156 | 154,bbb,bbb333,bbb333_154,235,0.2122985470052242 157 | 155,bbb,bbb333,bbb333_155,234,1.0700252722278718 158 | 156,bbb,bbb333,bbb333_156,199,2.1580394752494807 159 | 157,bbb,bbb333,bbb333_157,92,0.018059778875268825 160 | 158,bbb,bbb333,bbb333_158,234,0.9623354989469968 161 | 159,bbb,bbb333,bbb333_159,167,-0.12307873029077672 162 | 160,bbb,bbb333,bbb333_160,124,-0.37166772315724783 163 | 161,bbb,bbb333,bbb333_161,118,2.0362129477902733 164 | 162,bbb,bbb333,bbb333_162,143,-0.7350677530255828 165 | 163,bbb,bbb333,bbb333_163,284,1.206090802665115 166 | 164,bbb,bbb333,bbb333_164,192,1.1022845052465868 167 | 165,bbb,bbb333,bbb333_165,182,1.0631747502484796 168 | 166,bbb,bbb333,bbb333_166,123,-0.5445662099838021 169 | 167,bbb,bbb333,bbb333_167,128,-1.170333956121108 170 | 168,ccc,ccc111,ccc111_168,187,-0.4507427798538105 171 | 169,ccc,ccc111,ccc111_169,218,1.3452727168328602 172 | 170,ccc,ccc111,ccc111_170,204,0.09169863542359108 173 | 171,ccc,ccc111,ccc111_171,267,1.889485030157908 174 | 172,ccc,ccc111,ccc111_172,283,1.1556662928187391 175 | 173,ccc,ccc111,ccc111_173,189,-0.8086702570787249 176 | 174,ccc,ccc111,ccc111_174,171,1.8757497651907005 177 | 175,ccc,ccc111,ccc111_175,223,1.417513967279158 178 | 176,ccc,ccc111,ccc111_176,235,0.15599629041675253 179 | 177,ccc,ccc222,ccc222_177,289,0.050361905303922115 180 | 178,ccc,ccc222,ccc222_178,206,-0.9951558187274392 181 | 179,ccc,ccc222,ccc222_179,297,-0.3307217346355318 182 | 180,ccc,ccc222,ccc222_180,265,0.280245190144694 183 | 181,ccc,ccc222,ccc222_181,110,-2.1461674967904267 184 | 182,ccc,ccc222,ccc222_182,199,-0.12397727692227355 185 | 183,ccc,ccc222,ccc222_183,182,-0.21327926513885806 186 | 184,ccc,ccc222,ccc222_184,298,-0.4877419927109769 187 | 185,ccc,ccc222,ccc222_185,163,-1.7932046565053703 188 | 186,ccc,ccc222,ccc222_186,116,0.034268321932448685 189 | 187,ccc,ccc222,ccc222_187,166,1.4618360866294688 190 | 188,ccc,ccc222,ccc222_188,194,0.9565432203529286 191 | 189,ccc,ccc222,ccc222_189,240,-0.41349335359523176 192 | 190,ccc,ccc222,ccc222_190,207,-0.793959248226948 193 | 191,ccc,ccc222,ccc222_191,85,-1.2560024335388522 194 | 192,ccc,ccc222,ccc222_192,130,1.443031550668982 195 | 193,ccc,ccc222,ccc222_193,168,-1.7637488838669542 196 | 194,ccc,ccc222,ccc222_194,287,0.6991108026605882 197 | 195,ccc,ccc222,ccc222_195,299,0.16836410682215813 198 | 196,ccc,ccc222,ccc222_196,93,-0.4696545027239611 199 | 197,ccc,ccc222,ccc222_197,91,1.189449384454767 200 | 198,ccc,ccc222,ccc222_198,108,-1.052571861013626 201 | 199,ccc,ccc222,ccc222_199,104,-0.6607261915108233 202 | 200,ccc,ccc222,ccc222_200,158,1.435090757719796 203 | 201,ccc,ccc222,ccc222_201,156,-0.6444848302756367 204 | 202,ccc,ccc222,ccc222_202,154,0.3394357606405807 205 | 203,ccc,ccc222,ccc222_203,82,-1.1219931095316915 206 | 204,ccc,ccc222,ccc222_204,225,0.5796286830002114 207 | 205,ccc,ccc222,ccc222_205,119,-0.67825352641415 208 | 206,ccc,ccc222,ccc222_206,207,-0.01397497605703267 209 | 207,ccc,ccc222,ccc222_207,94,0.1351857214535033 210 | 208,ccc,ccc222,ccc222_208,202,-0.8329636971599403 211 | 209,ccc,ccc222,ccc222_209,185,0.9529622443765604 212 | 210,ccc,ccc222,ccc222_210,276,0.05038009836275559 213 | 211,ccc,ccc222,ccc222_211,95,1.025106859784565 214 | 212,ccc,ccc222,ccc222_212,151,0.9751456018002502 215 | 213,ccc,ccc222,ccc222_213,141,-2.860132353802247 216 | 214,ccc,ccc222,ccc222_214,86,0.2370587475350101 217 | 215,ccc,ccc222,ccc222_215,93,-0.0063236361095078296 218 | 216,ccc,ccc222,ccc222_216,233,0.9147207326400604 219 | 217,ccc,ccc222,ccc222_217,233,1.328952471560663 220 | 218,ccc,ccc222,ccc222_218,175,-1.6964586003973006 221 | 219,ccc,ccc222,ccc222_219,123,-0.00463664616495109 222 | 220,ccc,ccc222,ccc222_220,123,-0.9572710335673648 223 | 221,ccc,ccc222,ccc222_221,121,-0.12004102740522372 224 | 222,ccc,ccc222,ccc222_222,273,1.2852458929397996 225 | 223,ccc,ccc222,ccc222_223,243,0.6031745394754108 226 | 224,ccc,ccc222,ccc222_224,233,0.33028592179713817 227 | 225,ccc,ccc222,ccc222_225,230,-0.12699693156861766 228 | 226,ccc,ccc222,ccc222_226,207,-0.07094328277793513 229 | 227,ccc,ccc222,ccc222_227,215,-0.5764057063298231 230 | 228,ccc,ccc222,ccc222_228,286,1.7295056220247025 231 | 229,ccc,ccc222,ccc222_229,236,-0.8118672490650529 232 | 230,ccc,ccc222,ccc222_230,232,-0.6036612190216478 233 | 231,ccc,ccc222,ccc222_231,285,1.287001499704057 234 | 232,ccc,ccc222,ccc222_232,261,-0.12269699334286024 235 | 233,ccc,ccc222,ccc222_233,188,0.18743270099587395 236 | 234,ccc,ccc222,ccc222_234,121,-1.2750106161959909 237 | 235,ddd,ddd111,ddd111_235,258,0.3384193390368857 238 | 236,ddd,ddd111,ddd111_236,171,-0.860503612347237 239 | 237,ddd,ddd111,ddd111_237,134,-0.3142364231154375 240 | 238,ddd,ddd111,ddd111_238,273,-0.1710868832572082 241 | 239,ddd,ddd111,ddd111_239,212,-1.8091332291038544 242 | 240,ddd,ddd222,ddd222_240,87,-0.4991867592935249 243 | 241,ddd,ddd222,ddd222_241,217,0.3276069493924944 244 | 242,ddd,ddd222,ddd222_242,230,1.4096881011332048 245 | 243,ddd,ddd222,ddd222_243,254,0.5191285948274418 246 | 244,ddd,ddd222,ddd222_244,97,0.04016804231610247 247 | 245,ddd,ddd222,ddd222_245,202,1.6709926370589736 248 | 246,ddd,ddd222,ddd222_246,169,-1.9763313805810363 249 | 247,ddd,ddd222,ddd222_247,118,-1.211918229805001 250 | 248,ddd,ddd222,ddd222_248,289,-1.0897468763983806 251 | 249,ddd,ddd222,ddd222_249,242,-2.195575400791592 252 | 250,ddd,ddd333,ddd333_250,216,1.6693047174834044 253 | 251,ddd,ddd333,ddd333_251,83,0.018219204279761782 254 | 252,ddd,ddd333,ddd333_252,100,-1.7542528791194143 255 | 253,ddd,ddd333,ddd333_253,97,0.4597451369822426 256 | 254,ddd,ddd333,ddd333_254,218,-0.21747958137299048 257 | 255,ddd,ddd333,ddd333_255,150,0.07237932021862323 258 | 256,ddd,ddd333,ddd333_256,103,-0.4687826733556692 259 | 257,ddd,ddd333,ddd333_257,127,0.5677410149815973 260 | 258,ddd,ddd333,ddd333_258,124,-0.028588663465029925 261 | 259,ddd,ddd333,ddd333_259,103,-0.5017720570527 262 | 260,ddd,ddd333,ddd333_260,116,1.828935845422379 263 | 261,ddd,ddd333,ddd333_261,186,0.3665686111738001 264 | 262,ddd,ddd444,ddd444_262,123,-1.2250790683811488 265 | 263,ddd,ddd444,ddd444_263,111,1.1359867557426735 266 | 264,ddd,ddd444,ddd444_264,165,-0.2944982896214636 267 | 265,ddd,ddd444,ddd444_265,233,0.3788521492179484 268 | 266,ddd,ddd444,ddd444_266,165,0.7723108891007048 269 | 267,ddd,ddd444,ddd444_267,141,-0.07191454635240975 270 | 268,ddd,ddd444,ddd444_268,129,0.4869025529893606 271 | 269,ddd,ddd444,ddd444_269,87,-0.2632888400468586 272 | 270,ddd,ddd444,ddd444_270,255,-0.3847304545682836 273 | 271,ddd,ddd444,ddd444_271,86,-0.22065541656897208 274 | 272,ddd,ddd444,ddd444_272,92,0.3951147440367725 275 | 273,ddd,ddd444,ddd444_273,168,-0.05176079473976713 276 | 274,ddd,ddd444,ddd444_274,202,-1.310099186603148 277 | 275,ddd,ddd555,ddd555_275,294,-0.7154979154957495 278 | 276,ddd,ddd555,ddd555_276,218,0.09981445367663838 279 | 277,ddd,ddd555,ddd555_277,294,-1.5338865891504725 280 | 278,ddd,ddd555,ddd555_278,232,-0.4654901296268529 281 | 279,ddd,ddd555,ddd555_279,104,2.207213158886021 282 | 280,ddd,ddd555,ddd555_280,148,0.006061404397173846 283 | 281,ddd,ddd555,ddd555_281,118,1.5341849354839088 284 | 282,ddd,ddd555,ddd555_282,139,0.5914922736598471 285 | 283,ddd,ddd555,ddd555_283,115,-1.3351245789420492 286 | 284,ddd,ddd555,ddd555_284,260,-0.9741942060892792 287 | 285,ddd,ddd555,ddd555_285,185,-0.27970755966137106 288 | 286,ddd,ddd555,ddd555_286,248,-1.4558536432131892 289 | 287,ddd,ddd555,ddd555_287,92,0.8910517169740189 290 | 288,ddd,ddd555,ddd555_288,261,-0.8749529191856044 291 | 289,ddd,ddd555,ddd555_289,209,0.4685465386505869 292 | 290,ddd,ddd555,ddd555_290,112,-0.34730862856770794 293 | 291,ddd,ddd555,ddd555_291,161,-0.4510030257327216 294 | 292,ddd,ddd555,ddd555_292,104,-0.3688803686283914 295 | 293,ddd,ddd555,ddd555_293,273,-1.0840541252346465 296 | 294,ddd,ddd555,ddd555_294,199,-0.08954049916759506 297 | 295,ddd,ddd555,ddd555_295,256,0.9239630448178752 298 | 296,ddd,ddd555,ddd555_296,282,-0.3328270712305276 299 | 297,ddd,ddd555,ddd555_297,158,-0.15892857331529908 300 | 298,ddd,ddd555,ddd555_298,194,1.0108969003235646 301 | 299,ddd,ddd666,ddd666_299,188,-0.02940000907767332 302 | 300,ddd,ddd666,ddd666_300,285,-0.01593070969203049 303 | 301,ddd,ddd666,ddd666_301,292,-0.5174215760980043 304 | 302,ddd,ddd666,ddd666_302,207,0.15408233044993996 305 | 303,ddd,ddd666,ddd666_303,150,-0.2510723745294756 306 | 304,ddd,ddd666,ddd666_304,282,0.06357661914481062 307 | 305,ddd,ddd666,ddd666_305,205,-0.9189190466808912 308 | 306,ddd,ddd666,ddd666_306,235,0.4411483154613501 309 | 307,ddd,ddd666,ddd666_307,114,0.16845055754484453 310 | 308,ddd,ddd666,ddd666_308,139,-2.4018822772324158 311 | 309,ddd,ddd666,ddd666_309,99,0.4426737085534173 312 | 310,ddd,ddd666,ddd666_310,215,-0.9203752630076566 313 | 311,ddd,ddd666,ddd666_311,265,0.6793211291412263 314 | 312,ddd,ddd666,ddd666_312,85,-0.6727827185365257 315 | 313,ddd,ddd666,ddd666_313,112,0.3866386412678371 316 | 314,ddd,ddd666,ddd666_314,277,2.4798875915757983 317 | 315,ddd,ddd666,ddd666_315,99,0.2411521759278262 318 | 316,ddd,ddd666,ddd666_316,120,0.366016578354661 319 | 317,ddd,ddd666,ddd666_317,202,-0.06008950888279601 320 | 318,ddd,ddd666,ddd666_318,156,-0.2514165976396704 321 | 319,ddd,ddd666,ddd666_319,272,0.9317877059328946 322 | 320,ddd,ddd666,ddd666_320,133,-0.21813456525556799 323 | 321,ddd,ddd666,ddd666_321,265,-0.6407645864837097 324 | 322,ddd,ddd666,ddd666_322,265,-0.9843966460407936 325 | 323,ddd,ddd666,ddd666_323,137,1.4938751748963195 326 | 310,ddd,ddd666,ddd666_324,132,-0.9203752630076566 327 | 311,ddd,ddd666,ddd666_325,231,0.6793211291412263 328 | 312,ddd,ddd666,ddd666_326,135,-0.6727827185365257 329 | 313,ddd,ddd666,ddd666_327,212,0.3866386412678371 330 | 314,ddd,ddd666,ddd666_328,97,2.4798875915757983 331 | 315,ddd,ddd666,ddd666_329,134,0.2411521759278262 332 | 316,ddd,ddd666,ddd666_330,204,0.366016578354661 333 | 317,ddd,ddd666,ddd666_331,193,-0.06008950888279601 334 | 318,ddd,ddd666,ddd666_332,267,-0.2514165976396704 335 | 319,ddd,ddd666,ddd666_333,121,0.9317877059328946 336 | 320,ddd,ddd666,ddd666_334,103,-0.218134565255568 337 | 321,ddd,ddd666,ddd666_335,119,-0.6407645864837097 338 | 322,ddd,ddd666,ddd666_336,293,-0.9843966460407935 339 | 323,ddd,ddd666,ddd666_337,95,1.4938751748963197 340 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27,py35 3 | skip_missing_interpreters = true 4 | 5 | [testenv] 6 | deps = numpy 7 | pytest 8 | pytest-cov 9 | pytest-timeout 10 | commands = 11 | pip install -e . 12 | pytest {posargs} 13 | 14 | [pytest] 15 | addopts = --cov=bayesalpha --cov-report=term-missing --pyargs --timeout 300 16 | 17 | --------------------------------------------------------------------------------