├── .github └── workflows │ └── Build.yml ├── .gitignore ├── .vscode └── settings.json ├── CHANGES.md ├── LICENSE ├── README.md ├── examples ├── ex1.ipynb ├── ex1.md ├── ex1_files │ ├── ex1_4_1.png │ ├── ex1_6_0.png │ └── ex1_8_0.png ├── ex2.ipynb ├── ex2.md └── ex2_files │ ├── ex2_11_0.png │ ├── ex2_13_0.png │ ├── ex2_14_0.png │ ├── ex2_15_0.png │ ├── ex2_3_0.png │ ├── ex2_5_0.png │ ├── ex2_7_1.png │ └── ex2_9_0.png ├── pyBASS ├── BASS.py ├── __init__.py ├── sobol.py └── utils.py ├── pyproject.toml ├── requirements.txt └── tests ├── __init__.py ├── test_bassPCA_fit.py ├── test_bass_fit.py ├── test_sobolBasis.py └── util.py /.github/workflows/Build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ${{ matrix.os }} 13 | continue-on-error: ${{ matrix.os == 'windows-latest' }} 14 | strategy: 15 | matrix: 16 | os: [ubuntu-22.04, macos-latest, windows-latest] 17 | python-version: [3.9, '3.10', 3.11] 18 | 19 | steps: 20 | - uses: actions/checkout@v3 21 | - name: Set up Python ${{ matrix.python-version }} 22 | uses: actions/setup-python@v4 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install --upgrade pip 28 | pip install -r requirements.txt 29 | pip install . 30 | pip install pytest 31 | - name: Test with pytest 32 | run: | 33 | python -m pytest -s 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea/ 3 | 4 | # local test scripts for debugging 5 | *debug_local.py 6 | 7 | ### VisualStudioCode ### 8 | .vscode/* 9 | !.vscode/settings.json 10 | !.vscode/tasks.json 11 | !.vscode/launch.json 12 | !.vscode/extensions.json 13 | 14 | # Byte-compiled / optimized / DLL files 15 | __pycache__/ 16 | *.py[cod] 17 | *$py.class 18 | 19 | # C extensions 20 | *.so 21 | 22 | # Distribution / packaging 23 | .Python 24 | build/ 25 | develop-eggs/ 26 | dist/ 27 | downloads/ 28 | eggs/ 29 | .eggs/ 30 | lib/ 31 | lib64/ 32 | parts/ 33 | sdist/ 34 | var/ 35 | wheels/ 36 | pip-wheel-metadata/ 37 | share/python-wheels/ 38 | *.egg-info/ 39 | .installed.cfg 40 | *.egg 41 | MANIFEST 42 | 43 | # PyInstaller 44 | # Usually these files are written by a python script from a template 45 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 46 | *.manifest 47 | *.spec 48 | 49 | # Installer logs 50 | pip-log.txt 51 | pip-delete-this-directory.txt 52 | 53 | # Unit test / coverage reports 54 | htmlcov/ 55 | .tox/ 56 | .nox/ 57 | .coverage 58 | .coverage.* 59 | .cache 60 | nosetests.xml 61 | coverage.xml 62 | *.cover 63 | *.py,cover 64 | .hypothesis/ 65 | .pytest_cache/ 66 | 67 | # Translations 68 | *.mo 69 | *.pot 70 | 71 | # Django stuff: 72 | *.log 73 | local_settings.py 74 | db.sqlite3 75 | db.sqlite3-journal 76 | 77 | # Flask stuff: 78 | instance/ 79 | .webassets-cache 80 | 81 | # Scrapy stuff: 82 | .scrapy 83 | 84 | # Sphinx documentation 85 | docs/_build/ 86 | 87 | # PyBuilder 88 | target/ 89 | 90 | # Jupyter Notebook 91 | .ipynb_checkpoints 92 | 93 | # IPython 94 | profile_default/ 95 | ipython_config.py 96 | 97 | # pyenv 98 | .python-version 99 | 100 | # pipenv 101 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 102 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 103 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 104 | # install all needed dependencies. 105 | #Pipfile.lock 106 | 107 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 108 | __pypackages__/ 109 | 110 | # Celery stuff 111 | celerybeat-schedule 112 | celerybeat.pid 113 | 114 | # SageMath parsed files 115 | *.sage.py 116 | 117 | # Environments 118 | .env 119 | .venv 120 | env/ 121 | venv/ 122 | ENV/ 123 | env.bak/ 124 | venv.bak/ 125 | 126 | # Spyder project settings 127 | .spyderproject 128 | .spyproject 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true 7 | } -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | # pyBASS 0.3.1.9999 2 | * added sobol for Basis 3 | 4 | # pyBASS 0.3.1 5 | 6 | # pyBASS 0.3.2 7 | * initial version of package 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, Los Alamos National Laboratory 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyBASS 2 | [![Build Status][build-status-img]](https://github.com/lanl/pyBASS/actions) 3 | 4 | A python implementation of Bayesian adaptive spline surfaces (BASS). Similar 5 | to Bayesian multivariate adaptive regression splines (Bayesian MARS) introduced 6 | in Denison _et al_. (1998). 7 | 8 | ## Installation 9 | Use 10 | ```bash 11 | pip install git+https://github.com/lanl/pyBASS.git 12 | ``` 13 | 14 | ## Examples 15 | * [Example 1](examples/ex1.md) - univariate response 16 | * [Example 2](examples/ex2.md) - multivariate/functional response 17 | 18 | 19 | ## References 20 | 1. Friedman, J.H., 1991. Multivariate adaptive regression splines. _The annals of statistics_, pp.1-67. 21 | 22 | 2. Denison, D.G., Mallick, B.K. and Smith, A.F., 1998. Bayesian MARS. _Statistics and Computing_, 8(4), pp.337-346. 23 | 24 | 3. Francom, D., Sansó, B., Kupresanin, A. and Johannesson, G., 2018. Sensitivity analysis and emulation for functional data using Bayesian adaptive splines. _Statistica Sinica_, pp.791-816. 25 | 26 | 4. Francom, D., Sansó, B., Bulaevskaya, V., Lucas, D. and Simpson, M., 2019. Inferring atmospheric release characteristics in a large computer experiment using Bayesian adaptive splines. _Journal of the American Statistical Association_, 114(528), pp.1450-1465. 27 | 28 | 5. Francom, D. and Sansó, B., 2020. BASS: An R package for fitting and performing sensitivity analysis of Bayesian adaptive spline surfaces. _Journal of Statistical Software_, 94(1), pp.1-36. 29 | 30 | 31 | 32 | ************ 33 | 34 | Copyright 2020. Triad National Security, LLC. All rights reserved. 35 | This program was produced under U.S. Government contract 89233218CNA000001 for Los Alamos 36 | National Laboratory (LANL), which is operated by Triad National Security, LLC for the U.S. 37 | Department of Energy/National Nuclear Security Administration. All rights in the program are 38 | reserved by Triad National Security, LLC, and the U.S. Department of Energy/National Nuclear 39 | Security Administration. The Government is granted for itself and others acting on its behalf a 40 | nonexclusive, paid-up, irrevocable worldwide license in this material to reproduce, prepare 41 | derivative works, distribute copies to the public, perform publicly and display publicly, and to permit 42 | others to do so. 43 | 44 | LANL software release C19112 45 | 46 | Author: Devin Francom 47 | 48 | [build-status-img]: https://github.com/lanl/pyBASS/workflows/Build/badge.svg 49 | 50 | -------------------------------------------------------------------------------- /examples/ex1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "In this example, we generate data from the Friedman function at fit a model with `pyBASS`." 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import pyBASS as pb\n", 17 | "import numpy as np\n", 18 | "import matplotlib.pyplot as plt" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 2, 24 | "metadata": {}, 25 | "outputs": [], 26 | "source": [ 27 | "\n", 28 | "# Friedman function (Friedman, 1991, Multivariate Adaptive Regression Splines)\n", 29 | "def f(x):\n", 30 | " return (10. * np.sin(np.pi * x[:, 0] * x[:, 1]) + 20. * (x[:, 2] - .5) ** 2 \n", 31 | " + 10 * x[:, 3] + 5. * x[:, 4])\n", 32 | "\n", 33 | "\n", 34 | "n = 500 # sample size\n", 35 | "p = 10 # number of predictors (only 5 are used)\n", 36 | "x = np.random.rand(n, p) # training inputs\n", 37 | "xx = np.random.rand(1000, p) # test inputs\n", 38 | "y = f(x) + np.random.normal(size=n) * 0.1 # noisy training outputs\n", 39 | "ftest = f(xx)\n", 40 | "ytest = ftest + np.random.normal(size=1000) * 0.1 # noisy test outputs\n" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "Fit the BMARS model with and see the results with" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 3, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "BASS MCMC Complete. Time: 3.236959 seconds.\n" 60 | ] 61 | }, 62 | { 63 | "data": { 64 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAagAAAEpCAYAAADGXra9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABe3UlEQVR4nO2dd3gUVdfAfyehdxBEehARBKWjKKCIoqAoNrAXrKioWF8sKOqnYu/lRRGxvFYUUEFQigUVpCOIClKkCIjSW0LO98fMhk2yZZLs7uwm5/c88+zMnXtnzuzunXPLueeIqmIYhmEYyUaa3wIYhmEYRihMQRmGYRhJiSkowzAMIykxBWUYhmEkJaagDMMwjKTEFJRhGIaRlJiCMgzDMJKSqApKRCqKSJq7f6iInC4ipeMvmmEYhlGSkWgLdUVkNtAVqA78CMwCdqrqhfEXzzAMwyipeBniE1XdCZwFPK+qZwIt4iuWYRiGUdLxpKBE5GjgQuBzN61U/EQyDMMwDG8K6ibgTuATVV0kIgcDU+MrlmEYhlHSiToHZRiGYRh+EHWoTkQOBW4DMoLzq2r3+IllGIZhlHS8WPHNB14BZgP7AumqOju+ohmGYRglGU9m5qraPkHyGIZhGAbgTUENBTYAnwB7Aumq+k9cJTMMwzBKNF4U1PIQyaqqB8dHJMMwDMMwKz7DMAwjSfFixVcauBY41k2aBvxXVTPjKJdhGIZRwvEyxPcaUBoY5SZdDOxT1SvjLJthGIZRgvFkZq6qraOlGYZhGEYs8eJTb5+INFHVZQCuq6N9UcrElJo1a2pGRkYib2mUIGbPnv23qtbyW45kwOqaEU8KWte8KKjbgaki8gcgQCOgfyHlKxQZGRnMmjUrkbc0ShAistJvGZIFq2tGPCloXYuqoFR1sog0BZrhKKglqronSjHDMAzDKBJhFZSIdFfVKSJyVp5TTUQEVf04zrIZhmEYJZhIPajjgCnAaSHOKWAKykgY785cxUOf/4KqUio9jVcuas/RTQ4Im3/lph2c88oP7NyTBUD96hWYePOxYfMb8eOBTxczf/VmRl97jN+iGClGWAWlqve5uw+oai5vEiLSOK5SGUYefl6zhazsbM5uV593Zqxi6YZtERXUqn92snHbHnq3qsNBVcpRrULpBEpbvJm06C86H1KTimW9xS19fXooZzSGER0vAQtHh0j7KNaCGEYkFKhUtjS39Dg05zhifjdD/86Nuad3CwZ2bxpX+UoKv6/fxtVvzWbwxwv9FsUoAUSag2oOtASq5pmHqgKUi7dghhGMqiICaSIAZGdHVlHZroZysxsxYrs7ZLrqn50+S2KUBCL10ZsBvYFq5J6H2gZcFUeZDCMf2dmQJvsVThT9lNODSjMNFVMCX/uC1ZvZsiuTquVt6NSIH5HmoMYCY0XkaFX9IYEyGUY+FCVNBHEVTtQhPjdHmumnuKAKl4yYwdiBXXKlZ+7LZvvuLKpXLFOg681a8Q/tG1XP+X0NA7zNQQ0QkWqBAxGpLiKvx08kw8hPtjqL8ALvr2guurKznU/BXnjxYv7qLfnS7vhoAW0f/DLqECzArr37WP3vTqYsWc85r/zAG9+viIOURirjRUG1UtXNgQNV/RdoGzeJDCMEqiAiOUN20aLE2BxUYli5aQcfzPoz53jMvDUAPDHpV3ZnRvaI9vjEX+ny6FSWbdgBwNIN2+MnqJGSeFFQaSJSPXAgIjXw5iLJMGLGfiMJ5zg7ioYKnDUFFTvGL1zHWS99nyut9/PfccdHCwDYl605DYeXpi2Lal4+8nvn/C5XkVlkOiMvXhTNk8D3IhIwLe8LPBQ/kQwjP9nqzkG5Q3bRjSQCc1CmoWLFde/MyZe2bXdWzv6xj03NdS4zK/KPlCbCPt2v1Cx2qpGXqD0oVX0TOAdYD2wAzlLVt+ItmGEEo+S24tMo7e1ss+JLKKrKms27cqWlp8G/O/bmHM9c/k/Ea7w7cxWbtpubT2M/Xob4AJbguDYaC2wXkYbxE8kw8pNdwDmowPlE6icRqS0iI0RkgnvcQkSuSJwE/hGqR5uWJtz1yf4FvdN+3RD1Or/bPJQRhJeQ7zcA9+H0oPbhGFMp0Cq+ohnGfgJzUJ6t+NQXM/M3gJHA3e7xb8D7wIiESuED8/78N19amgjrtuzOOX7rh5V0yKhOk1qV+H7ZppDXsWE+Ixgvc1A3Ac1UNfQ/yjASgLpm5jmeJDxa8ZFYM/OaqvqBiNwJoKpZIpLQ4J5+cfbL+ZdKpovkWsi7bU8Wl78xixoVy/DPjr05jYdow7VGycXLEN+fQP4FD4aRQAJGEl6t+AIkuAe1Q0QOwDVIE5FOlOC6oyi79ubXz/+481KhGhmmrIxgvPSg/gCmicjnQM4Mpqo+FTepDCMPquT2JOGxB5VgI4lbgHE4MdOmA7VwDIxKJA+PX1LwQqafjCC89KBWAV8CZYDKQZthJIxsdw4KnHkoz54kEqifVHUOThy1Y4BrgJaquiBaORHpKSK/ishSERkc4ryIyHPu+QUi0s5NLyciM0VkvogsEpH7g8rUEJEvReR397N63usmI6afjGC8hHy/P1oew4g3Cjm9pzQRD774yMmbKETkeuAdVV3kHlcXkfNV9aUIZdKBF4EewGrgJxEZp6qLg7L1Apq621HAy+7nHqC7qm4XkdLAdyIyQVV/BAYDk1V1mKv0BgP/ifUzxwIzjDDCEbUHJSJTRWRK3i0RwhlGAFXNMXcQos9BeZ2jijFXhXALFs3z/5HAUlX9Q1X3Au8BffLk6QO8qQ4/AtVEpI57HLDLLu1uGlRmlLs/CjijkM+UUExZGcF4mYO6LWi/HHA2kBUmbw4i0gB4EzgIyAaGq+qzIvI4TviOvcAyoH9wpTaMUGQrpLnNqTSRqFZ8gdd0WmKtJNJERNQdf3R7R9HcetfDMUQKsBqndxQtTz1gnXuP2cAhwIuqOsPNU1tV1wGo6joRObAwD5QI8v6Uf/6zk7+27qZjRg1f5DGSBy+eJGYHbdNV9RbyV6BQZAG3quphQCfgehFpgTOfdbiqtsJZJ3JnEeQ3SgjqWvFBYA4qcn6f1kFNBD4QkRNEpDvwLvBFlDKhJMz7dGHzqOo+VW0D1AeOFJHDCyYyiMjVIjJLRGZt3LixoMWLTtCPqShdH5tK31cswo/hbYivRtBWU0ROxukVRURV17mTxqjqNuAXoJ6qTlLVQA/sR5yKZRgRCYTbAI9GEgFPEoldB/UfYApwLXA9MBm4I0qZ1UCDoOP6wNqC5nFHIaYBPd2k9SJSB8D9DOvGQVWHq2oHVe1Qq1atKOImjt2Z+1BV2j34Jbd+MN9vcQwf8DLENxt3jhqnV7QcKJD7FhHJwAnRMSPPqctxVtobScrC1Vu4dORM9kQJnRBvdmXuo3WDagCUSktjxHfLefvHlWHzZ+5ze1BenXnFAFXNxjFgeLkAxX4CmopIY2ANcB5wQZ4844CBIvIezujFFnfYrhaQqaqbRaQ8cCLwaFCZS4Fh7ufYQj5W/AkyZAludzQf8gUntajNPzv2MnrOap7s19oH4Qw/CaugRKSvqn4InKCqfxT2BiJSCRgNDFLVrUHpd+MovHfClLsauBqgYUNz/ecXyzZu558de+nbvr7v4b2PPdRp3Q89vSVL1m2NkhtqVylHrUpl4y1WDiLSGRgKNMKpWwKoqh4crozrbWIgzvBgOvC6qi4SkQHu+VeA8cApwFJgJ9DfLV4HGOXOQ6UBH6jqZ+65YTjDjVfgLBXpW5RnW7dlV/RMMWDhmtzrmictXp+Q+xaUjdv28NYPKxh04qGJnucsUUTqQd0JfAh8BLQrzMVd09fROKa3HwelXwr0xlF+IcdqVHU4MBygQ4cOZtvjE4GV/dcffwgZNSv6LI3DOe2TdlR4BHAzzqiD5y6nqo7HUULBaa8E7SvOkGHecgsIEzzUdU12glcZonH0I/Ez3H0jKG7U4xN/jdt98nL3JwvZlbmPp/q1KXDZwaMXMHnJBo5uUpOjmxwQe+EMILKC2iQiU4HGIjIu70lVPT3ShcVZtDIC+CXY64SI9MQZqz9OVXcWTmwjUfix4DWF2aKqE/wWItbMWhE5TEZR2bo7qlFwXHhnxiqAQimoQJBFn5Yz5GLLzkwGvD2bp89tw0FVy/ktTkyJpKBOxek5vYUTtLCgdAYuBhaKyDw37S7gOaAs8KW78PJHVR1QiOsbCcAnl0GpylR3GcXH5HYLlj/SXwqxbY8/CsTwxug5q/nhj0288vUyhp7eslDX+HnNFq4cNYsJN3WlesVoKyMSR1gF5S4a/FFEjlHVAtuequp3hDaPHR8izUhSLHR6gQgsv+gQlKZAdx9kiRn20xd/Xpq2lL+27mb6sr/p3aqu3+Lk4MXVkQ8LI4xkITBFKKahoqKqx/stQzyw3nN+Yjmyt2VnJkcPm8zrl3Wk08EFn8/yGiMt4jXw5oQ50XgxMzdKMIE/rBkqeUNETgVa4nhdAUBVH/BPoqKTjArqg5/+ZNz8tbx1xZEp33iav3ozO/fu44UpSwunoOIgU7KQwFUiRiri04LXlEREXgHOBW7AeW/0xTE5T2mK0jg5t0OD6JkKwR2jF/Dd0r9z1rsF8+c/O9m4bU+IUkVHVVn+946YDnnHahg9yTo/McGLJ4nHRKSKiJQWkcki8reIXJQI4Qz/8cllUKpyjKpeAvzrRgE4mtweIFKSovRQbjyxaQwlyU+oAIddH5tKx4e+isn1l27YxuMTl+QMn70780+Of2Ias1bkD3EfiilL1tPsngnsiGBokl3AYfQ9WftyDefFpAeZE904ufDSgzrJXWDbG8flyqHA7XGVykga9rfuTEN5ILCadaeI1AUygcY+yhMTivLTF2VexAvN7vmC+X9ujtv1L3h1Bi9OXZYTBXjuKkcx7d2X7an8ExN/Y09WNsv/3hE+U84oRXS278mi2T1f8PRXv+e/TJivesR3y3n1m8i+FgL3jvfvVVC8KKiA+4BTgHdVNb6LIoykYr+RhM+CpAafiUg14HFgDrACJ3xGSlOYOagmtSry5uVHxkGa/Iz/eV3crp3ljnF7eW1v3rmXjMGfM37hfnkifXUZgz/nqjdnsdtdUxWcN3NfNnuy8q/13rzTUZSjZ68OeY/te7LYujszV5kHP1vMQ+N/iSh7sjZAvSioT0VkCY7p7GTX/9fu+IplJAv7jSSS8w+cTKjqg6q6WVVH48w9NVfVIX7LVVSiDe/ef3rLfC/iybd2y3FNFW/CzY8+PrEQIefzXdshXMci+M7LNjqhuV77Nn9vJVz5Lxev59p3nGVywXWs2+PTaHZPNEf4ee6BcsTQibQaOqlA5ZIZL+E2BuOMpXdQ1UxgB/kDqhnFlJzxcZ/lSGbc0BqIyFmBDWeh+wnufkoTrXXdt0N9PhpwTNzleODTxSHTw4n34tRlRb5nQdplAYOiOas2s2VXZq7ywXNl/+7Yyy8hfEkG32rNZu++D4PLFXaELpoi9ouwCiq40gHHA33c/Z5A/P+NRlKQbT0oLxznfp4WYuvtl1CxIrgHddtJh+Y7L0jYF3ksX3ivT19OVoi5n0T8MwMKJu/jXPDa/gAN2UFRND+c5cSXzLu+aMR3y2n74Jf0evbbfPfws4ola/WOtA7qOJzYNqeFOKc47lyMYk7OHJQtSAiLqt4nImnABFX9wG95Yk2gB1WpbCkGdm/KE5N+y5cnUQ0YJbcigNi+XLfvyWLZhu05oV28mLed9PTXTLr5uJBRniWo+KwV//DgZ6F7gU7eoj1ILBoDoawi/SSSq6P73M/+4fIYxZ/Anz5JG1hJg6pmu2Ezip2CCvSgwv0HFE3Y/0MVDr47t7e0SGv0xsxdQ9emNTnAY9iVa9+ezbe//829vVuwcfse/t6eez1VYOgumN/Wb3dly/9yD5bsnChRgr9cvJ7dmfsoVzrdk6wAF4+Ywbe//+05fzhSbogvgIjc5K6DEhF5TUTmiMhJiRDO8J9Ai8qG+DzxpYjcJiINgiNR+y1UUYn225crle65F9O4iCFbQrXwI9170PvzGPD2bKesao7F3OK1oeOJzV21GYAHPlvMy9P2z2H9uzOTdVt28WWE+FSR3u1/eYynFTBnD3uPPDcJVk5F0S3hem+L126NaqIeT7y4OrpcVZ91Q70fiBMsbSRQfExFjLDkeJIw/eSFy93P4NhNCoQNWJgKBH770qXyt2dPOeIg0tLEcwOmWoWiBb0M1cLP3Kds2ZUZNqDmui2O0fGDn/3C69OX88Wgrpzy3P45oH3ZSnoUU8WTn/km4vnMfdmhQ2+438uAt2Pj0D5WPZyde7PYtXdf1J5l4Hu66lh//sJeFFTglzsFGKmq8yVZjeaNmGPhNryjqim/KDcUgZhgtas47gVrVirD39sjt/QD1Koc/4jGr3y9jFe+XsaKYafmWoMUIPBSf90NjLhsQ+5Fs58vXMcJzQ/kuSm/szfL2wLcvGzZlZlrDmrTjr38sXF7zBcRR4o/VRDl1evZb1m5aSdl0tP47aFehbpGIvAy9T1bRCbhKKiJIlIZKNyvaKQcaj2oAiEih4tIPxG5JLD5LVNR2bvPGRYrV9p5XYy/qSuXd86ti8M1YMqVTmfCTV1zjov6N4r0As0Y/HlYU/Rg8s4j7cncx0vTlvLfr//w7CEiL6c+922uOaiXpy2j+5NfF+pakdi5N3yg5ndnrsqXdsO7c0PmXbnJiRUbeN6cOaiiiRdzvCioK4DBQEc3Am4ZnGE+owSQY8VnZhJREZH7gOfd7XjgMSBi5OlUYG+W8x8one68Lg6sXI5zOzbgwMpl6dOmHhC5ARPoecWCaFZmf23N70Mgr/HCXZ8szHU8bv5admcWrc29fuueuPc+3vpxZa6hSS98On9tvrT38iiyP//Zud9YMcm6UF7iQWWLyHLgUBEpXvGEC8DerGxOfe5b1hZgAV1xINDCMmexnjgHaA3MVdX+IlIbeM1nmYpMljvGVyZ9f3u22UGVmXn3iZ7K16hYhvev7sS5w38ssiyhTLmjEa3It7//zQExiCIbj/Dvf2/fQ013nmjImJ89l/vxj018v2xTyHODP86toNds3hW1Afrc5N+58YT4Ov4NRVQFJSJXAjcB9YF5QCfgB1I8SmhB2b4ni983bOfogw+gZd0qfouTUBrXqkipdFsI5YFdboMuS0SqABtIcQOJYMIZIUD0IeA0t4VT1Onr7wphUr1uy+6QC3yDGTMvf0+joFwxalaRr5H367nr44UMv6RDgXs25xWgMRB86Y9mr+bv7Xu5tluTXHme+vK35FRQOMqpI/Cjqh4vIs2B++MrVvIR+IP0OuIgLjk6w19hjGRlluss9lVgNrAdmOmrRDGgc5Oa3HzioVx6jLfQVp/d0CXsuaJ2xAMm4wUlYCCRauxyzeJXuHNG8UBVcxTjjOX/MGP5P/kUlF94UVC7VXW3iCAiZVV1iYg0i7tkScb+wH2GERpVvc7dfUVEvgCqqOoCP2WKBWlpwk1R4jqlBzX9D69XNd95v6c2Pl8QP4/nsUQVfl6zJed4xvJ/uPn9eSxauyVf3oKuT5q14h86ZORflqck73vNy7jNardVOAZnIeJYoOj94RRjf9iJZP0pDb8RkbEicoGIVFTVFcVBOXnl4FqVIp73O2zL/NX5X/DJyDHDpuRaILw3K5tP5q7J8VYRoGypNN76cWWBrr15Z34vGFC4xsPKTTv4a8tuuj42hYmL/ir4BTzixZv5mW4IgaHAEGAEcEbcJEpSAr+hrQcyIvAU0AVYLCIfisg5JcWwKNpC1wBmDRqdz0Os5crLQVUL/rcqXya0CyVFPTUcXpq2NGf/uMen0emRyfz5z64CGW8UFE8z3yLSTkRuBFoBq1XV2yq9YkS2zy1AI/lR1a/dYb6DgeFAPxxDiYiISE8R+VVElorI4BDnRUSec88vEJF2bnoDEZkqIr+IyCIRuSmozFARWSMi89ztlNg9aWgOqlKOGmGs4UI10r8Y1DXX8YKhJ3Fdksx9JDv/7sxk1T8Fm5cKrGPLi1fLyMe++DVkejzfiV588d0LjAIOAGoCI0XknviJlJzsD9znrxxGciMi5YGzgQE4xkWjouRPB14EegEtgPNFpEWebL2Apu52NfCym54F3Kqqh+FY116fp+zTqtrG3XJ7WI0D3w/uzqwwpuc5w0hB9af5QfutYRcOPYkq5UpzR8/mcZSw+BAqnlQ0wo3+qGrInm00y8cA8ewVezGSOB9oq6q7AURkGE446/+Lm1RJSLYtWDWiICLvA0cBX+AonWmqGq2WHwksVdU/3Gu8hxMQNNglQh/gTXUmcn4UkWoiUkdV1wHrAFR1m4j8AtTLUzZhpHlovYXLUbncfhP2+05rwf0ePEIYBSPcOq1wc1BtHvgyX9rZL3+fr5cczx6UFwW1AijH/jDvZYGih6pMMczlj+GBkcAFqhreH01+6gF/Bh2vxlFy0fLUw1VOACKSAbQFZgTlG+i6WpqF09P6twBy+Ua9auX9FqFYsi87t4VggHBzUNv3ZOVLm70y/18onq/ESBF1nxeR54A9wCIReUNERgI/46zvKFHsH+IzDWWERlW/KKBygtD1O2+bNmIeEakEjAYGqWpg7OdloAnQBkeRPRlWAJGrRWSWiMzauHFjAUT3Tql05xEqhJmoD6ZHi9pxkaGksy9b6f38d/nSs7OL1vBeu2U3GYM/Z+6q2Ld/IvWgAsuiZwOfBKVPi7kUKYAZSRhxYjXQIOi4PvmXcYTNIyKlcZTTO6qaE+VaVXMCF4nIq8Bn4QRQ1eE4Rh106NAhLiuWOjSqzqATm3JRp0Z0+L+vctKH9G7BwbVyx4iypRzxIZybtlA/+IZt+X0aRmParxtp27B6gctFIlJE3YiTuyUNMzM3IuGGoKmvqn9GzZybn4CmItIYWAOcB1yQJ884nOG693CG/7ao6jr3niOAX1T1qTzyBOaoAM7EGfnwDRFh0ImH5ku/okvoCCX39m7BAxHCoxsF54NZof+a/5uxMp9D3yMfmlzg68ejZWMO1jxiPSgjEq4Bw5hClMsCBgITgV+AD1R1kYgMEJEBbrbxwB/AUhw3SgGPFZ2Bi4HuIczJHxORhSKyAMez+s2FfLS4EM2f5eVhFJdReGYs/ydk+tRfN/LTitDnCkQc3IV4MZIwME8Shid+FJGOqvpTQQq5JuDj86S9ErSv5I7SG0j/jjBz1Kp6cUFkSCQ/3X0ilcraqyeZWLZxR/RMUSiMp/loRDKSeMv9vClcnpKErYMyPHA88IOILHMX1AZ6MEYQtSqXDevVwEhdosXqKgyRmjHtRaQRcLmIvEmelpqqxqBPmDrsdxZrGsoIS6/oWQyjePLi1GVc1KkRdarGbplApDmoV3AWHDbHseQL3ooe+CTFCLQOrAdlhENVVwLVgNPcrZqbZhglgjemr4jp9cIqKFV9znWh8rqqHqyqjYO2YhOEzStuUFEzkjDC4g6HvwMc6G5vi8gN/kplxJqalUL7Gxx4/CGeymccUCGW4iQV/y1gCJBoePFmfq2ItBaRge7WysuFwzmyFJEaIvKliPzufsbWcD5OZJuRhBGdK4CjVPVeVb0Xxz/eVT7LZMSYMmGiS3v16N63Q4PomQzAm7PYG8ndKnzHY6swnCPLwcBkVW0KTHaPUwZbB2VEQIBgTxL7SN5YcEYhCedzMFzPKpiq5UuzLx7mbhFI5Csr1lMgXtZBXUkhWoWquk5V57j723DWeNTDcXwZWAQ8ihSJLbXfWaxhhOV1YIYb6mIo8CPOQlojgXRoVPhBmQHHRQ/3USrMW/jcjg2jlj25ZW0yPXoJjxWJjGYcazdVXhYjFLlVmMeRZe3ACnd3NfyBBblWLPl+6d9c+84cT27l97m/stduvFGyEJE0nP/31zhBCwXor6pzfRWsBNLogIrMcp2aLhh6Eq2GTvJctnHN6PNDO/aGdrdYplT09v5lxzRm3Pz8Aclb1a/Kaa3q8tD4X6ILmcTEeoTJi4IaidMqDPjjO4MCtArzOrL0OocjIlfjxL6hYcPoLZPC8Nv6bWzZlclFnRpSrlT0dRkVyqTTsXGNuMhipDaqmi0iT6rq0TjhaAyfCO7hVAkK4+EFL72Njdv2FOiaT/RtzW0fzgegRd0qjJ6zOtf5//RszoDjDkZE+N/MVSz/u+iLZv3Ci5IuCFEVlKo+JSLTKESrMIwjy/UBP2EiUocwEUcT4cAycNHbTmpGtQrRx48NIwqTRORs4GPVRA6sFE9a1KnC4iiB+bo2rUndquV5P8jPXHp6+Ebw0od6ccjdE3KOLz26EaN+iP1KgDTZv3ayTp7w7Mc0OYAR3y3POb68S0aO8dXU27qRMfjzmMuTKLJiPL/mSd2p6hzX7PzZAiincI4sxwGXuvuXAmMLInAsyVl8a4YPRmy4BfgQ2CMiW0Vkm4gUPPSpAcBnN3Rh1j2hI/QGOLdjAx49J7dhcbg5IoBSeSzw6lQrn8vt0qEHVS6EpPnpdUSdsOdOOKw2P99/cs5xOKvAWBGLaYm8SjYcmVmxnV+L5zcTzpHlMKCHiPwO9HCPfUHNAawRI9w5qJ6qmqaqZVS1iqpWVtXIXlGNsKSlCTUrlY2YJ5Rnl7Pa1fd0/db1q9KvQ4NcyqJdw+pc3KlRzvEXg7qy5MGeXBnkvPbklrW5oXvkNU95O9CPnHUEfdrUzTmuVLYUM+46gZl3nRC3BvLA4w9hcK/mfHZDl6h5D6wc+XuuWt7bUOneGBuAxM1jYyRHlsAJ8bpvQbAghEascOegngCO9luW4sZ9p7Vg5aadvPH9inzn8lbdEw87kDYNqnm67tiBoV/cBwX1Fpof5LQvgkPSq8K13Zrw/JSlYa+dd4D3/CMbcv6RuefS84a4iCUDjz+E205u5jn/5p2ZEc97VaLTft3IezNXcd6RsbEbiNiDEpF0EfkqUp5UxkzHjRgzSUTOFhszjin9Ozdm6Oktc6VVr+AojFp5Wv5epkBqVCzafHO2alSfnMVtBrIgo4RbdkVWdgUhYg9KVfeJyE4Rqaqq+YPZpzgWhNCIMbcAFYF9IrILp+2jNswXex7oczgHVCpDx4zcVrUHRZgrmXZbNwDGDezM4rWFnxrMytaI0wJXdGlMxTLpfLGo0LcoMrH2LF6QV2QsFZSXOajdwEIRGSEizwW2mEngIxaE0Igl7pxTmqqWtjmo+FI6XTimSc2c48fOdgwlhpzaImyZjJpOaPn61StwUsuDCn3vU4+oE/adsWLYqQzp3YIbT2hK6QjWhJEIZzRxVdfIQRyb1S68gUcohfbJdcfk7BekEf/N7xsLLUdevCioz4EhwDfk9mie8miOFZ+/chjFA3G4SESGuMcNRORIv+UqCfTr2IAVw04tdJwprwEUrz++CX07NIj6wi6VnkaHRk7vrqCvl7yGBgHjilAjx+0bVefJvq1Z/MDJPH9B25z0vEOMY67vzOc35p9zm+r2KkMNSTYLsmg8obl3DxFlPawp9YqXdVCjRKQ80FBVf43ZnZOAgKWNDfEZMeIlIBvoDjwIbAdeBDr6KVTxJHKd/XDA0VEtAIP54c7uZO3L/Za+tlt+t0fp7rsinu+MquVL5xoma35QFcayNmRDWoCz2ztWi5HMycMZjlQu56iAUAOCwc94Q/dDePqr36LKDrE1m/fiLPY0YB5ObChEpI2IjIuZBD6SbVZ8Rmw5SlWvxxkWR1X/BWwFeByIVmU7ZtSgsTuk54XK5UpTPY/xRCRDh3i+MT657hieObdNzvF+Yy5hxbBTc8sRJEj50vt7Ll5noALvvlDryoPXk4VzkAtw+8nNGH9j15zjsqUTqKCAocCRwGYAVZ0HRB4MTRFyhvj8FcMoPmSKSDru+0FEauH0qIwYU9j5HS94aa8G5/l+cPdc8zUBCmuocHCtSpzRtl6+9GiWdHWrladr05qRM0W45pVdGnPTCU1zjtPThF6HH0TdKIt0Lz66ES3qVqHpgZUAqOhxuNSTfB7yZIWw4CsWRpRmJGHEmOeAT4ADReQh4DvgYX9FKn7c0P2QXAYS8SKSggmeD6pbrTxtG0bwoF7E90t2du731OBezYMunfvinQ4+oEDXDvSMFLindwtu7nHo/muL8PJF7fn+zvDLVp/s2zrH3+H1bsDG0jF0qO1F1f0sIhcA6SLSFLgR+D5mEviIqmMuastWjFigqu+IyGychegCnKGqqe2eOgm59STvC1ALQ6g1TofXc4wxW9ar6vk6nQ4+gB//+IeDirggN6AmA3Jdc+zB7M7cxzNf/V6k60LwEF/ByjU9sBLntK/PWe3y9/Ri2XvxoqBuAO4G9gDvAhNxJoBTHsWG94zYoqpLgCV+y1Ec6d2qDnNXbU7cDYPetCccVptv7zieBjW8h2u/sXtTzm5Xv0BlQnHpMRn8un4bV7pm5iLCxZ0a8cxXv3PpMRkhy3hVOOmFaJw/0Kclp7Wqm2/OLh54seLbCdwtIo86h7ot7lIliGxVM5AwjBThhQvaJeQ+4V4JBVU0aWlSZOUEjlXfi3me/YBKZfMZTMB+2cM50599z4lkZStHPTw5V/6C0K9DA8qVDm9KHksvGl6s+DqKyEJgAc6C3fki0j52IviHqlnwGYYRmlScaA+YeIdz2npApbK5fAAW5P1XzXUvldAQ8h7yjACuU9UMVc0ArscJYpjyZNsYn5EEiEhPEflVRJaKyOAQ58X14LJURBaISDs3vYGITBWRX0RkkYjcFFSmhoh8KSK/u5+Fj4Oe4vTrUJ+OGd4fP5VfCWXdns1ej2EvQq2dqlmpDFXK5R9cG3t9Z4ae1iLsQtx4KC4vc1DbVPXbwIGqficixWKYT1UL5ATRMGKNa5b+Ik7omdXATyIyTlUXB2XrBTR1t6OAl93PLOBWVZ0jIpWB2SLypVt2MDBZVYe5Sm8w8J+EPVgS8dg5rQuU/4y29Xh35qpcYTdShbJuD2qPRwUV6v03654eIfM2OqAil3UOv8IoYM2X14FvUQiroAKtNGCmiPwXx0BCgXOBaTGTwEcUG+IzfOdIYKmq/gEgIu8BfYBgBdUHeNON0vujiFQLRKUG1gGo6jYR+QWo55btA3Rzy4/CqbMlUkEVlNpVyjHt9uOj5rvvtBb5nNX6TeNazuLkwJqkcDzRtzVND6yUY8Fc0PVToejWrBZP9G1N71bhgzUWlEg9qCfzHN8XtJ+Kw7P5yM7WlO7OG8WCesCfQcercXpH0fLUw1VOACKSAbQFZrhJtV0FhqquE5EDYyu20T9Cb8IvOmbU4NOBXWhZN7KP4nPa7w/q+PXt3TiwctFjU4lIruvGgrAKSlWjNyF8pvuT01i/ZXehy+/Jyi60c0nDiBGh2kh5G4AR84hIJWA0MEhVCxxHQkSuBq4GaNgwNoHmDP84or73tVrgDN0lK1HnoESkGnAJkBGcX1VvjJtUHunTuh7bdhct9kiLKC0Nw4gzq4EGQcf1gbVe84hIaRzl9I6qfhyUZ31gGFBE6gAbwgmgqsOB4QAdOnQoFqMjxYGR/TtSt2p5v8XwFS9GEuOBH4GFJJlfsZtObBo9k2EkNz8BTUWkMbAGOA+4IE+eccBAd37qKGCLq3gEx8r2F1V9KkSZS4Fh7ufYOD6DEQeOb2ajsl4UVDlVvSXukhhGCURVs0RkII6HlnTgdVVdJCID3POv4DQSTwGWAjuB/m7xzsDFOOsT57lpd6nqeBzF9IGIXAGsAvom6JEMI2Z4UVBvichVwGc47o4AUNV/4iaVYZQgXIUyPk/aK0H7irP+MG+57wizbEdVN+H4BDSMlEXCucTIySByPfAQTriNQGZV1YPjK1ouGTYCK8Ocrgn8nShZIpAscoDJEopIcjRS1VqJFCZZiVDXkuV3BJMlFMkiB8SwrnlRUMtwArEly8PnQkRmqWoHk2M/JkvyypGqJNP3Z7IkrxwQW1m8uDpahDPubRiGYRgJw8sc1D5gnohMJfcclO9m5oZhGEbxxYuCGuNuycpwvwVwSRY5wGQJRbLIkaok0/dnsuQnWeSAGMoSdQ7KMAzDMPzAiyeJ5YTwvZdIKz7DMAyj5OFliC/YGqMczoK/5HLhaxiGYRQ7olrxqeqmoG2Nqj4DdI+/aJGJFuQtDvcLGRxORIaKyBoRmedupwSVudOV71cROTmGsqwQkYXu/Wa5aWED1MVRjmZBzz1PRLaKyKBEfSci8rqIbBCRn4PSCvw9iEh79/tc6gYGNCf3QSSyriVTPXOvXeLrmq/1TFUjbkC7oK0DMACYH61cPDcclzDLgIOBMsB8oEWc71kHaOfuVwZ+A1oAQ4HbQuRv4cpVFmjsypseI1lWADXzpD0GDHb3BwOPxluOEL/JX0CjRH0nwLHu//LnonwPwEzgaByvDBOAXn7+v5NpS3RdS6Z65l6/xNc1P+uZl3VQTwZtjwDtgX4eysWTnCBvqroXCAR5ixuquk5V57j724BAcLhw9AHeU9U9qrocx4/akXEUsQ9OYDrczzMSLMcJwDJVDefxI+ayqOo3QF6XWwX6HsTx9F1FVX9Qpxa9GVTGSHBdS4F6FrhnialrftYzL0N8xwdtPVT1KlX91cuDxZFwAdwSguQPDjdQRBa4XeFAVzeeMiowSURmixPLB/IEqAMCrpAT9V2dhxN1OUCiv5MABf0e6rn78ZQplfGtriVBPQOra+FISD2LqqBEpKyIXCAid4nIvYHN40PECy9B3uJz4/zB4V4GmgBtcCKcBiIRx1PGzqraDugFXC8ix0YSOY5yODcQKQOcDnzoJvnxnUQj3L39lCkV8OX7SZJ6BlbXCkpM65mXIb6xON22LGBH0OYnXoK8xRwJERxOVder6j5VzQZeZX83Om4yqupa93MD8Il7z/VuNxrJHaAuEd9VL2COqq535Ur4dxJEQb+H1e5+PGVKZRJe15Klnrn3tboWmsTUMw8TZD9Hy5PoDcc8/g+cSbjAxG3LON9TcMZNn8mTXido/2ac8VeAluSeLPyDGEyYAhWBykH73wM9gcfJPWn5WDzlyCPTe0B/P74TnEjPwZO3Bf4ecIIGdmL/5O0pfv/Hk2VLdF1LlnrmXtvq2v5r+1LPvAg2HDgiXn/IInxhp+BY+CwD7k7A/brgdEkXAPPc7RTgLZxowwtwopgG/2HuduX7lRhZhuFYU813t0WBZwcOACYDv7ufNeIpR9C1KwCbgKpBaQn5TnDG4dcBmTgttCsK8z3gWKf+7J57AdfDim0530/C6lqy1DP3ulbX1N965iXcxmLgEGA5jrNYwYkH1SpiQcMwDMMoAl4UVKNQ6RrZxNEwDMMwioQ5izUMwzCSEi9WfIZhGIaRcExBGYZhGEmJKSjDMAwjKSk2CkpEVETeCjouJSIbReSzoLReIjLL9ZS8RESecNOHuuUPCcp7s5vWwT2uJCL/FZFlrpflb0TkqBByjBeRau52XYyfcZCIVMh7rxhe+5JYXMu9XoaIXBDh/LTAdxtrROQJEfHd435xxepaTK5tdc0DxUZB4Xi3OFxEyrvHPYA1gZMicjiO7f1FqnoYcDjOIrIAC3F8XAU4B1gcdPwajsPEpqraErgMqJlXCFU9RVU3A9WAAlUacYj0mwzCWQuR915FQkRKAZcD/yvqtYLIAMJWmqLg4Xt6HmfxoBEfrK4VEqtrBaM4KShwVief6u6fT26HincAD6nqEgBVzVLVl4LOj8H10iwiBwNbgI3ucRPgKOAedVyKoI5358/zCiBO/JiawDCgiTgxWh53z90uIj+J49jxfjctw21lvgTMARqIyMtu63NRUL4bgbrAVBGZmudeiMgtIvKzuw3Kc+1X3WtNCnqpBNMdx31Klltumog8IyLfu9c70k2vISJjXPl/FJFWbvpxsj8ezVwRqew+f1c37eYwv9dFIe4xVERuC/o+f3afI+/31DXcs7lLIA4QkYPC3NcoOlbXrK7Fv67FYzW4HxuwHWgFfIQT+Xce0A34zD0/B2gdpuxQ4DbgY5zW3t3ApcA0nNXPpwOfeJRjBU5rL4PcrkFOwvHKITgNg89w4qxkANlAp6C8NdzPdFeGVsHXDnGv9jit0opAJZxV723da2cBbdz8H+C0avPKfD9wQ9DxNOBVd//YwHPgtJbuc/e7A/Pc/U9xnGri3r9U8Hcf5nsKd4+hBMW3wVl5npH3e4r2bDh+yc72+39ZHDera1bXElXXilUPSlUX4HyZ5wPjC3GJ93CGHs7AcQwZS05yt7k4Fbg50NQ9t1JVfwzK209E5rh5W+IEAYtEF5xKvUNVt+NU/q7uueWqOs/dn43z/eSlDm4LNoh3IScWTBVxxt+74LhWQVWn4LScqgLTgafclmc1dVuHHgh1j0jk/Z4iPdsGnFawEQesrlldCzoXt7pWrBSUyzjgCXIPOYDT0mkfpeynwMXAKnVc/AeXbS2Rx2KjIcAjqtrG3Q5R1RHuuRzv8CLSGKeFeYI67qQ+x2mlRrt2OPYE7e/DaXHlZVeIe+Rdwa1h7qOqOgy4EigP/CgizfMJKDLSHYIIfpmFukcWuf+XwXLl9aIf6dnK4TyXET+sruXG6lqMKY4K6nXgAVVdmCf9ceAuETkUQETSROSW4Ayqugv4D/BQnvRlwCzgfhERt3xTEekTQY5tOCGrA0wELhcnzg0iUk9EDgxRrgrOn2OLiNTGca8f7poBvgHOEJEKIlIROBP4NoJsefkFx99iMOe6cnYBtqjqFvc+F7rp3YC/VXWriDRR1YWq+ijO99Q8r6yq2t99WZwS5R4rcMJLIyLtcDwiF4ZDcYYsjPhhdc3qGsSxroXS8CmNqq4Gng2RvsCd0HxXHPNRxWkx5c33XphLX4kTDGypiOzE8Sp8ewQ5NonIdBH5GZigqreLyGHAD2692w5chNMaCS43X0Tm4rQk/8Dp0gcYDkwQkXWqenxQmTki8gYw0016TVXnihOR1AsTcIcTgvhXRL7HqcSXu2lDgZEisgDYiTN3ADBIRI53n2Wxe71sIEtE5gNvqOrTIe4b6h6jgUtEZB6Oe/7fPD5DDuLEEzoEpwIbccLqGmB1La51zXzxGQCIyCfAHar6u4hMw5k8TckXvIicCbRT1SF+y2IYebG65p3iOMRnFI7BOBO4xYFS7A99bRjJhtU1j1gPyjAMw0hKrAdlGIZhJCWmoAzDMIykxBSUYRiGkZSYgjIMwzCSElNQhmEYRlJiCsowDMNISkxBGYZhGEmJKSjDMAwjKTEFZRiGYSQlKeEstmbNmpqRkeG3GEYxZfbs2X+rai2/5UgGrK4Z8aSgdS0lFFRGRgazZqWkL0UjBRCRlX7LkCxYXTPiSUHrWtQhPhE5S0R+F5EtIrJVRLaJyNZo5QzDMAyjKHiZg3oMOF1Vq6pqFVWtrKpV4i2YYcSMzZv9lsAwjELgRUGtV9Vf4i6JYcSazEx49FFo0ADmzPFbGqMksXs3rFwJW22wqSh4UVCzROR9ETnfHe47S0TOirtkhlEUZs2Cjh1h8GDo0QMOOshviYxiSFZ2ULgiVRg3Do47DipXhowMqFoV2reHV16BrCwyswse3iirEGWKC16MJKrghBw+KShNgY/jIpFhFJAxc9dw/6eL+HdnJgBHblnF+8MHIrVrw8cfw5ln+iyhUVwplSYMm/s35bb8S68HbqbZ1M/ZXLchv1x8Hf/Wz6Dipo0cOnU8da69lr+efYmDPvuEYVurFugeg9vWjJP0yU9UBaWq/RMhiGEUhjFz13D7R/PJ3KfU2bqRdVVqMbNKA+4/8Ro63H0DvY89zG8RjWJOxY1/cf41Z1F99Qqm3Hw/P51/NVpq/6v1hytuptlX4+j58O3QqRN1nnqHdYe381Hi1MGLFV99EflERDaIyHoRGS0i9RMhnGFEYszcNdz6wXyqbP2XZz59nCmvDqD+5r9AhDfansoj36/zW0SjuLNpExdcfQaVN6zlvZc/YubF1+VSTgCI8GuPPrz5xgSoUoV+A8/lgD9+80feFMPLHNRIYBxQF6gHfOqmGYYvjJm7hjb3T2LQe3M5Y8FXfPXatZyyZDr/PeosNlQ6ICff2s27fJTSKPZkZkK/flRdt5oPn3+PP9sfEzH7v42awFdfsa9MGfoNPJdym/9JkKCpixcFVUtVR6pqlru9AdiqeyPhjJm7hhZDJjDo/Xls27GbNz+4lyfHP82yA+pzSv/neKbLhewtVTonf91q5X2U1ij23HsvTJnCF/c8yeq2nbyVadyYj555h4qbNnDq/Tc5hhVGWLwYSfwtIhcB77rH5wOb4ieSYexnzNw13PXxAnZmZjsJqiDCvrR0FtVuwqRDj+adNj1Ryd3WKp0m3H5yMx8kNkoE06fDY4/BlVfyc+9zC1T0rxZtmDroPno8fjdtP3qDuX1tmj8cXhTU5cALwNM41nvfu2mGERfGzF3D4xN/ZU2eIboj1v3OwxNf4L4eA5hT7zAe7XZZyPLVypdm6OktOaNtvQRIa5Q4du+G/v2hUSN46ilYuqfAl5h93lUc8u2XdHvuAZYeezLbateNg6CpjxcrvlXA6QmQxTAYM3cNd368kF2Z+3LSKuzdxS3fvk3/2Z/yd8VqVNwbem6pfOl0HjnrCFNMRnx54gn4/XeYNMlZ70TBFRQifHHX41zZ71h6PHYnHz85KuZiFgfCKigRuUNVHxOR53F6TrlQ1RvjKplRogjXazr2j9k8PPFF6m/dwNttevFot8vYVrZivvLVK5TmvtOs12TEmZUr4aGHoF8/ZwF4EdhSP4PpV95CtxceotHMb1l5ZNcYCVl8iNSDCrg3MtfGRlwJ1WsK0HzjCnaXKsM5Fz7KrPotQ5a/qFND/u+MI+ItpmHAkCHO5xNPxORyP104gLaj3+SEJ4cw8n+T0fT0mFy3uBBWQanqp+7uTlX9MPiciPSNq1RGieKWD+aR481FlbN/nsK2shWYdOjRjOh4Bm+0Pz2XdV4Am2syEsr8+fD22/Cf/zj+HWPAvrLlmHrjEM6482paTBjNot79YnLd4oIXM/M7PaYZRoEYM3cNGYM/z1FODf9dx9vv38OT45/mzEVTAdiXlp5POXVuUoMVw05l3n0nmXIyEsc990C1ao6CiiFLevRhfbPD6TL8cdIyM2N67VQn0hxUL+AUoJ6IPBd0qgqQFW/BjOLNPWMW8vaPqwAotS+LK38aw6Dp/2NvWinuOek63mnTM2S5zk1q8M5VRydSVMNwnA9/9hn83/85SiqWpKXxzbV30nfQhRz+2XssOPPi2F4/hYk0B7UWZ/7pdGB2UPo24OZ4CmUUTy589QemL8u/ev7Y5XMY/PUbfHHo0dx34jWsr5zfOWa6COcf1cDmmgx/eOABqF4dbrghLpdf1rUHa1u25eiRz7HwtPPzu0sqoUSag5oPzBeRT4AdqroPQETSgbIJks8oJhz10Jes37Y357j83t20WfcbPzRqxZQmHcMaQaSnCU/2bW1DeYVARBoAbwIHAdnAcFV91l+pUpAFC+DTT+H++6FKnGK1ivDD5YM4+9ZLOezLsSzudXZ87pNieJmDmgQE+4wpD3wVH3GM4saYuWtods+EXMrpuD9m8+WI6xgx+n6q7doKIiGVU/UKpU05FY0s4FZVPQzoBFwvIi18lin1eOIJqFgRBg6M621+P64nGw9uRqeRz5oLJBcv/chyqro9cKCq20WkQhxlMooBY+au4T+jF7AnKzsnrcbOLdw7eThnLP6apTXqc2nf+9lcPnSL1Oaaio6qrgPWufvbROQXHIfPi30VLJVYtQrefddRTjVqxPdeaWnMuHQgve+7gcY/TGX5Md3je78UwEsPaoeI5AQvEZH2gLmJNsIyZu4aBr0/L5dyqrJ7O5NGXMcpS6bzTOfzOaX/8/zU4PB8ZQVnXZMpp9giIhlAW2CGz6KkFs895/Rmbk7MtPvinmexrWZtOr79ckLul+x46UENAj4UkbXucR2gYN4RjRLBPWMW8s6Pq3K5Ham6axtbyldma7lKPH/MeUxv1JqlNRvmK2s9pvghIpWA0cAgVd0a4vzVwNUADRvm/21KClnZSqk02Z+wfTu89hqccw4k6HvJLl2G2eddSbcXHqLW74vZ2LRkj8h68cX3k4g0B5rhNHCXqKoZ6xu56PHUNH7fsCPnOD17H1f+9Ak3Tn+Pi/s9yJz6hzGq/Wkhy5pyih8iUhpHOb2jqh+HyqOqw4HhAB06dCixkx+B8O0B2r0/gpO2bOHNXpexNig9mHiEY5939qV0fu0p2r/3Kl8MeTrm108lIq2D6q6qU0TkrDynmooI4f7sRslizNw1uT1BAIf/tZRHJzxHyw1/MLFpJ9ZUDR0+rGypNB49u5UZQcQJERFgBPCLqj7ltzwpRXY27d97lbWHt2Ntqw4JvfXuqtVZ1OtsWk4YzbQb7wVirwRThUg9qOOAKUCoZq8CERVUOBNXEakBvA9kACuAfqr6b4ElN3wneLFtgFu+eYvrf/yQvytW45oz7mJis/xRRgW40PznJYLOwMXAQhGZ56bdparj/RMpNciY+Q0HrFzGpw++5Mv9Z597JW0+eZtWY/8H3e7zRYZkINI6qPvcz8JG0wqYuM4RkcrAbBH5ErgMmKyqw0RkMDAYiK3vECOujJm7hlven0d2iHNby1XkvdYn8ehxl7G1XKVc52pXLsOMu4vmAdrwjqp+h9MeMApIuw9eZ0f1mizp4U+koY2HtmRVu6Np9+FIeHIIpHmxZyt+RBriuyVSwWhDBhFMXPsA3dxso4BpmIJKesKFw6ixcwtDJr/KV4ccxeeHdeW1I/OOCDuUTsOUk5ESVFm3mkO+mciPl97AvjL++SSY07c/Z9x5NUycCL16+SaHn0Qa4qvsfjYDOgLj3OPTgG8KcpM8Jq61XeWFqq4TkQPDlDHLoiRhzNw13PrhfPYFTzSpctaiKdwzZQSV9uzk54MOCVs+DXi8b5u4y2kYsaD1J28BjrGCn/zW/VS2H1CLSi+/bAoqL6p6P4CITALaqeo293go8GG4cnnJa+LqzNtGxyyLkodbP5jHvqBfoOG/63ho4ot0XTmP2XWbM7jnDfxeq1HY8k+d28YMIYyUIC0zk9Zj3uaPzieytW5sQmoUluzSZZh/xkV0fv0ZJ1Bio/B1rLjiZWCzIbA36HgvjoFDVMKYuK4XkTru+TrABs/SGgnlnjELyRj8eS7lBHDEX0tps+5X7ulxLedc9FhY5dT0wIqsGHaqKacYICK9RaRkTkQkkKbffEGlvzcw1+feU4D5Ac/mI0b4K4hPeFmo+xYw03Uaq8CZONZ5EYlg4joOuBQY5n6OLajQRnwJeIII5vC/lnLIpj8Z0/J4Pm/ehR8ateKfClVDlrcIt3HhPOBZERkNjFTVX6IVMApOm49GseWgevzR+QS/RQFwenE9ezoK6t57oYR5OfeyUPchEZkAdHWT+qvqXA/XDmniiqOYPhCRK4BVgEXnTSJCeR2/+bt3uGLWWFZXPZDPmnclK72UKacEo6oXiUgV4HxgpIgoMBJ4NzD8bhSRP/6g8Yyv+XbAf5Ir9Po118AZZ8Dnn0OfPn5Lk1C8quMKwFZVHSkitUSksaouj1QgiolrcjRPDMDpMd3/6SL+3ZnbQUjX5XN4eOKLNNiynv+17smwbpeRlR76L5Mu8GQ/m2uKJ+4c7miciAKDcEYzbheR51T1eV+FKw6MGEF2WhoL+lzgtyS5OfVUqFsXhg83BZUXEbkP6IBjzTcSKA28jdNDMlKcUIttwTGEGPXBfSyvUY9+FwxjZgjHrgGeMSOIuCMipwP9gSY4w+5HquoGN7LAL4ApqKKQmQmvv84fnU9kW+26fkuTm1KloH9/eOQR+PNPaOCv8UYi8dKDOhPHRHwOgKqudRfeGilMyMW2qrRZ9xvz6jZjVfU6XHHOvXzfqDV7SpUJeQ1TTAnlHOBpVc21xENVd4rI5T7JVHz4/HP46y/m/edxvyUJzRVXwEMPwciRzlxUCcGLVdBeVVUcAwlEpGJ8RTLizYWv/sCgPMqpwea/ePODe/n4rdto+ddSAKY26RhSOaVhyskH1uVVTiLyKICqTvZHpGLEa69B3bos63yi35KEpnFj6NHDMZbYt89vaRKGFwX1gYj8F6gmIlfhRNN9Nb5iGfGi8eDPmb7sn5zj9Ox9XD1jNJNGXE/btUu4t8cAFtc+OGz5etXK27omfwjlhqNkrt6MNatXw4QJ0L8/msxWcldd5QRQ/KrkBDSP+Gu4puLvA82BrTjzUPeq6pcJkM2IEWPmrmHouEVs3pUnSooq7757J0euXsykpp2498QB/FUltOdks87zBxG5FrgOaCIiC4JOVQam+yNVMWPkSMjOdobRNvstTAROPx1q1oRXX4WTT/ZbmoQQUUGpqorIGFVtD5hSSkFCGUGUzdzjDN2JMPrwE3i9Qx++OPQYCOPlw4bzfOV/wATgERzHygG2qeo/oYsYnsnOdobNTjzRGUYLE/cpKShbFi65xInyu2EDHBjSS1yxwssQ348i0jHukhgxp8dT0/Ipp2P/mM1XI66j95JvAXi/9cl80axzSOVUtlSaKSf/UVVdAVwPbAvacEPXGEXhq68cN0JXXum3JN648krIyoJRo/yWJCF4GXA9HhggIiuAHThrm1RVW8VTMKPwhPIEEfA6fubiaSytUZ91lSMHQbMhvaThf0BvYDaOoVJwS0KB8BOGRnRefRUOOMBZCJsKHHYYdO7sGHXcdlvYUY/ighcFZROxKcKYuWu446P57M3jPK/nr9N5eOKLVNqzk2ePOZ8Xj+7H3lKlQ16j6YEV+fKWbgmQ1vCCqvZ2Pxv7LUuxY8MGGDsWrr/eGT5LFa680lkX9d130LVr9PwpjBdXRytFpB3QBafFNl1V58RdMqNAhOo1BRBVllevG9HreJWy6Sy4v2ccJTSKgoh0Buap6g4RuQhoBzyjqvlXWRveePNNZ4HuVVf5LUnB6NsXbrrJ6f0VcwUVdQ5KRO7FCSx4AFATxw/YPfEWzPBOj6em5VJOAdPx/rMcP7wTmnWO6HX8mXPbmHJKfl4GdopIa+AOYCWORwmjMKg6w2THHAMtWvgtTcGoWBEuvBA+/BD+/ddvaeKKFyOJ84GOqnqfGwa+E3BhfMUyvDBm7hoyBn/O7xt25KS1/GspY9+8hbumjaTdmiVORRRBw0RqMCOIlCHLXTDfB3hWVZ9lf1BRo6B8+y38+mvq9Z4CXH017N4Nb7/ttyRxxcsc1AqgHLDbPS4LLIuXQIY3Lnz1h1wLboO9jm+qUJUBZ9wZ0XTc5ppSjm0icidwEXCsiKTj+MU0CsPw4VC1KvTr57ckUcnKVkql5anHbdpAhw7OcwwcmKueh8yfonhRUHuARSLyJc4cVA/gOxF5DkBVb4yjfEYe8obDCHDo3yu5fNZY3m91EsO6XcbWcpVCljfrvJTlXOAC4ApV/UtEGgJJ6jguydm0CT76yOk9VajgtzRRKZUmDAuxPqt1zwvo9X+38OaoL1jbev9KoMFtI1vophJeFNQn7hZgWnxEMaKRMfjzXMc1dm6h2x+z+PjwE5hftxnHXz2cP6sdFLKsKabURlX/Ap4KOl6Fh8ChRgjefBP27HGGyVKYX04+k+5PDaHtx6NyKajihBcrvpKxIiyJyWehp8qZi6YyZMprVNy7k+mNWrO+cs2wysnmmVIfETkLeBQ4EGctVGA9YhVfBUs1VOG//4VOneCI1G6w7a1YiUWn9OWIT9/jq1v/jz1VqvktUsxJYs+IBkDzu8ezO2hdU4PNf/HQxBc5dsVc5tRtxuCeN7A+zKLbzk1q8M5VRydKVCO+PAacZqHei8i0aY5xRDHxxDDv7Eto99EbHPHZ+8y64Bq/xYk5pqCSkDFz13D7h/PIzM6dXi5zN2PevIUy+zIZ0mMA77TpRXZa/tDUFUqn8fBZrazXVLxYb8opBrzyClSv7qwlKgZsaHYEaw5vT5uPRjHr/KuLnWeJaN7M04Fhqnp7guQp8eS1zgNo/M8allevy+7S5fhPr5v4uXaTsF7HrddUbJklIu8DY3AMlwBQ1Y99kyjVWLcOPv4YbrgBypf3W5qYMbdff3rfO5CGs6azqmMXv8WJKRHXQanqPqC9G3bDiCP3jFlIRp5YTeUyd3Pn1Nf58rVrOXXJdwB81fSoiCExTDkVW6oAO4GTgNPcrbevEqUar77qOFq99lq/JYkpv/Tow66q1Wn34et+ixJzvAzxzQXGisiHOM5iAWu5xZJQITG6LJ/LwxNfoOGW9fyv9cl827ht2PK2pqn4o6r9/ZYhpcnMdIb3Tj4Zmjb1W5qYsq9sOeb3uZAj33mZyuvX4jj8KR548SRRA9gEdMdabjFnzNw1+ZTTXVNG8PYHQ8hML8W55z/CXT1vCLuuqXOTGqacSgAicqiITBaRn93jVtFcjonI6yKyIVCmRDNmjDPEd/31fksSF+b26w+qtP1wpN+ixBQvZuaFarmJyOs4imyDqh7uptXAidCbgeOhop+qFm9nUiEYM3cNd368gF3BVhCqpGk22WnpzK53GM+VKsOLx5zrBBYMQb1q5bn95GZmCFFyeBW4HfgvgKouEJH/Af8XocwbwAvYeil49lknIOEpp/gtSVzYUrchS489mTYfvwUvPgLlyvktUkzw4iy2voh84rbE1ovIaBGp7+HabwB5PZAOBiaralNgMrkjhJYIAmuagpVT/c1/MerD+7hmpjNqOrHZMTx17MUhlVPTAyuyYtipTB/c3ZRTyaKCqs7Mk5YVqYCqfgOUyKi7WdlBIWdmz4bp0x3jiPT8Vq/FhdnnXUWFzZvg3Xf9FiVmeJmDGokTNC1gl3mRm9YjUiFV/UZEMvIk9wG6ufujcLxS/MebqKlP3rmm9Ox99J81llu+e4dsSXN850XALPRKNH+LSBMcd2OIyDnAulhcWESuBq4GaNiwYSwu6TvB7oF63/cYTStU5KX2fdgTIaR7qrsIWtmxCxsOacGBTz8Nl11WLEzOvSioWqoaPLD5hogMKuT9aqvqOgBVXSciB4bLWNwqTV7z8WYbV/DE509zxPplfHnIkdzb41rWVamVr1ztymWYcXfEtoBRMrgeGA40F5E1wHJiFFVAVYe716ZDhw4aJXtKUWnDOg6b+DFz+/ZnT+Vi7nRDhFkXXM0pDwyCKVPghBP8lqjIeFFQf7sB0gL9xvNxjCbiSnGoNGPmruGWD+aRHUL68pl7qLXjX67rM5jxzTqHbO1Yj8kQkVuCDscDU3GG5ncAZxPkn8/IT/v3XkWys/mpGHpZCMWiXmdzyisPw5NPFgsF5cWK73KgH/AXzpDCOW5aYVgvInUA3M8NhbxO0hOYawpWTl2Wz+WG6Y6en1e3GcdeM4LxzbuEVE7PnNvGlJMBTsynykAH4FqgOlANGACkWKS9xFJmx3bafjSKX084jS31QgfrLG7sK1vOmWubMAEWLvRbnCITVUGp6ipVPV1Va6nqgap6hqquLOT9xgGXuvuXAmMLeZ2kJq9z1+o7t/DkZ0/y9gdD6LP4a8plOqG19pbKH84nYARhBhAGgKrer6r34yxuaaeqt6nqrUB7IKKxkoi8C/wANBOR1SJyRfwlTh7ajB5Fue1bmXnxdX6Lkliuu86JuvvYY35LUmTCDvGJyB2q+piIPI87MRtMtDhQbuXoBtQUkdXAfcAw4AO3oqxiv+FFsWDM3DX8Z/QC9mS5FnqqnLF4GkMmv0qVPTt47uhzw5qOpwFPmddxIzwNgeBAYHtxlmuERVXPj6dASc3u3XR8+2VWHHks6w5v57c0iaVGDSfW1fPPw4MPQkaG3xIVmkhzUAHHlLMKc+EIlSP1B0bzMGbuGu76eAE783h3PXD7Pzw88QWW1MpgcM8b+K1WRsjyNtdkeOAtYKaIfILTYDwTxxLWCMWoUVT+ez2fPfiS35L4w623wosvwqOPwssv+y1NoQmroFT1U9dZ7OHmLDY8ea3z0rP3cfJvPzC+WWc2VD6Acy58nCW1GoX0Og4Wq8nwhqo+JCITgK5uUn9VneunTEnL3r3wyCOsPbwdK4/sGj1/caR+fbj8cnj9dbjnHqiXmu8YT85iEyRLShHKuWvLv5Yy5s1beGnsMI5e5UxQLq59cEjl1LlJDZtrMgqEqs5R1WfdzZRTOEaNgpUr+e6a24vFWqBCM3gwZGfDsGF+S1JoPDmLFZFxgDmLJXSspnKZu7n5u/9xxU9j+KdCVa7tM5gfGoaO1mmh1w0jjuzZAw89BEceyR/HFLvZhIKRkQH9+8Pw4XD77ZCC60m9KKhgZ7EBFChRCiqk/zwAVd56/146rlnM/1qfzLBu/UM6di2dBo/3teE8w4grw4fDypVOWPeS3HsKcM89To/ywQedcCMpRtycxRYnQoXDqL5zC9vKViQrvRTPu5Z5M8L0mswIwjASwLZtzov4+OPhpJNgXtz9CSQ/DRvCNdfASy85hhPNm/stUYHw4iy2wG7+ixP5lJMqZyyaylevXcvVrnPXbw5uH1Y5WRBBw0gQTzwBGzfCww9b7ymYe+6BChWcOakUw4sniVeBO4FMcNz8A+fFU6hkIa9yCngdf+azJ1lZvQ5fHXJk2LKl0xwLPZtvMowEsHo1PP449OsHnTr5LY2vZOX1rXbggfCf/8DYsfD1197KJAle5qAqqOrMPFHfI7r5T3XGzF3D3Z8sZMfefTlpfRZN5ZGJL5Atadx74jW83faUkNZ51SuU5r7TWtpck2EkkjvvdCzWHn3Ub0l8J9iTe07a8Rdz1UGvsOeq6xn5zldoqdyv/mT15O7VWWxc3PwnI6GG9BBhRfW6fJfRlvtOvCak13EBnrY1TYaReL7+Gt5+G+66K6W9JsSTrPIVmHLL/Zx5xxW0HT2KOeemhtcrLwoqbm7+k41g5VQuczeDvvsf5bL2MrTHAObXbcbVZ4WeejPTccPwicxMJ4x7o0Zw991+S5PU/HrCaazo2JVjX3yYX7ufyo5aB/ktUlS8zEGpqp4I1AKaq2oXj+VShjFz19Dy3i9ylFOX5XOZNOJ6Bsz8mDL7shDNDlkusNjWlJNh+MSjj8KiRY7fuQoV/JYmuRFh4t2PU2rvHno8fpff0njCSw9qNI4X5R1BaR9RDDxM5F3bVG3XVoZMeY2zf57CH9Xrcu75j4S0ziuTLjx2TmsbzjMMP1m0CB54AM47D047zW9pUoJ/GzZh+lW3ctyLD9Psq3H8euLpfosUkUjezJsDLYGqInJW0KkqQLl4CxZvQgUTrLR3Fz1+n8HzR5/LCyG8jgtwoQ3nGYb/7NkDF18MVavCc8/5LU1KMeOSgRw6dTwnP3w7q9scxY6atf0WKSyRelDNgN44wdGCmyfbgKviKFNCuP/TRWQr1N+ynnMWfsUznS9gddXadL72dbaVrZgvv80zGUYSMWQIzJ3rmE7Xym+0ZIQnu3RpPnvgRS678AROvXcgH7zwvt8ihSWSN/OxwFgROVpVf0igTAlh6/bdXDFrHLd+9zbZksaYFt1YUaNePuVUvnQaj5zVyobzDMNHsrKVUmnuUpfPPnPWPA0YAKcn9xBVsrLp4EOZfOv/0fPh2+j0xnPQ/v/8FikkXuagzhSRRcAu4AugNTBIVd+Oq2TxZN48PnnrVlr9tZSvmnRkyEnXhTQdt16TYSQHgbU91Vb9wWUXXcjm5q14+5K7yMqz3idAsq7rSSbmnX0JDWdP59iXHoHeXeHkk/0WKR9erPFOUtWtOMN9q4FDgdSND5WZCX36UHfbJq4//T9cefa9+ZRThdJp5gXCMJKMsls303fQhWh6Op88/jpZ5cr7LVJqI8KEIU/xd5PD4NxzYckSvyXKhxcFVdr9PAV4V1X/iZQ5afn2W0c5lS4NH33EjM+/ZVLLY/P57LqoU0MWP9jLhvQMI5nYtYuzb72UaqtXMvrJUWyp18hviYoFmRUqMfrpN6FsWejZE9au9VukXHhRUJ+KyBKgAzBZRGoBu+MrVgzZtAkuvRSOPdZxwQ/QsSOnHteSx/u2pl618ghQr1p56zUZRjKydy/07UuDOT/w2QMvsLqdOV+OJVvqNoTx45135UknwYYNfouUg5dwG4NF5FFgq6ruE5EdQJ/4i1Y4xsxdw+MTf2Xtvzu5bMX3DJ70X8pu3+qsMr/yylx5z2hbz3pKhpHM7NoF55wD48cz8a4n+KXnWdHLGAWnfXsYNw5OPRVOOAEmTYI6dfyWKrqCEpHSwMXAsa7D2K+BV+IsV6FwFt4uZFfmPoZMeY0rZo1lfr1mbPrv+3Tvd6Lf4hmGURD+/hvOPBOmT4fhw5nX4Uy/JSreHH+8YyF5+ulwzDHwxRfQrJmvInkZ4nsZx2vES+7Wzk1LOp4avxjd6Ti8GHfYsQw94WrOvOAxhvxRrDwzGUbxZ/58J2zGTz/Be+/BVSm/9DI16N4dpk6FHTvgqKOcoT8f8fLm7qiql6rqFHfrD3Qsyk1FpKeI/CoiS0UkNlG05s3jhReuY8iU1wCYX7cZb3Q4ney0dNZu3hWTWxiGEWdUneivnTo5w3tTpzoxnozE0bGj0zDIyHCG/G6/3fHc4QNeFNQ+N9wGACJyMLAvQv6IiEg68CLQC2gBnC8iLQp7PXbudIJxdehAve2b+L5h63xZ6lYzc1TDSHqWLIETT3S8kx93nOMp4mgziPCFRo3g++/h2mudSMVt2oQNdhhPvCio24GpIjJNRL4GpgC3FuGeRwJLVfUPVd0LvEdhjS5mzYIjjoDHHoP+/fnxs2+Z0qpbrizlS6dz+8n+jqMahrGffNFb1651lNIRR8CcOfDKKzBhghMJ1vCPChWc3uyECU5vtls3OOss+PnnhIngxYpvsog0xfHNJ8ASVS1Kf68e8GfQ8WrgqLyZRORq4GqAhg0bhr5SrVqOs8ipU6FbN04FMqtUc6z4Nu+ibrXy3H5yM7PUM4wkolSaMGzORg5aPI92H46kxYTRiGYz/4yL+G7AHeysUQvmbcpVxjxDxJdcrqTy0rMnLF4MTz7pdAY++YTsU3uTdsNAp8ebnj+yeKzwYsVXDrgO6IITVfdbEXlFVQu7FirUt6D5ElSH4wRKpEOHDvnOA043dPbsXIttzXTcMPYjIj2BZ4F04DVVHeabMNnZjvHDuHFcOep/1Fz+G3vLVWD+GRcy8+Lr2FI/wzfRSjqhwsTno/e1lOt6Hh3efZUuY0Y5iqt+fWcZQJ8+0Lmz4wghlnJ5yPMmjgfz593j84G3gL6FvOdqoEHQcX2g8MuXJYzWN4wSTtB8bw+ceveTiIxT1cVxv7kqrF7txGyaNw9mzHC8uWzaBCLsanMUX9z1OL+cfBZ7KleJuzhGbNhdtTrfDbiDLs896KybeustZxjwmWegUiVnznDgwJg58fWioJqparDlwVQRmV+Ee/4ENBWRxsAa4DzggiJczzCM0OTM9wKISGC+t2AKavdux7vAnj3OXMTOnY4Z8rZtsHkz/Puvs2bpr7+c+aQ//4Tly518AZo2hd69nUWgJ5/MO2ts6UdKU7Ys9O3rbNu2wZQp8OWXTiPk7yg9sQLgRUHNFZFOqvojgIgcBUwv7A1VNUtEBgITcYYdXlfVRYW9nmEYYfE03xuVyZMd5RKJ9HTHqKFOHTjkEMdlTtOmcNhh0Lo1VK+eO/+a2L3EDJ+pXNkZ4uvj2rpp6BmZwiAa5WIi8guOgcQqN6kh8AuQDaiqtoqZNOFl2AisDHO6JpDq//ZUf4ZUl7+Rqha7qHci0hc4WVWvdI8vBo5U1Rvy5MsxSMKp678W4bbJ8l8wOXKTLHIUqK556UH1LIIwMSHSA4nILFXtkEh5Yk2qP0Oqy1+M8TTfG2yQVFSS5b9gciSnHAXFi5l5uJ6LYRjJjc33GimNlx6UYRgpiM33GqlOcVBQMRma8JlUf4ZUl7/YoqrjgUR6/EyW/4LJkZtkkaNARDWSMAzDMAw/sMUIhmEYRlKS0goqLmE74oiIvC4iG0Tk56C0GiLypYj87n5Wj3QNvxGRBiIyVUR+EZFFInKTm55Sz2EUDq+/s4isEJGFIjJPRGYVtHws5Aj3X3XPDRWRNa5880TklALeP+K7Rxyec88vEJF2XsvGWI4L3fsvEJHvRaR10LmQv1FSoaopueFM+i4DDgbKAPOBFn7LFUXmY3ECPv4clPYYMNjdHww86recUZ6hDtDO3a8M/IYTNiWlnsO2Qv/+nn5nYAVQs7DlYyFHuP+qezwUuK2Q94767gFOASbg+B7tBMzwWjbGchwDVHf3ewXkiPQbJdOWyj2o2IXtSBCq+g3wT57kPsAod38UcEYiZSooqrpOVee4+9twFm3XI8Wewyg0Rf2dY/U/iXqdCP/VouLl3dMHeFMdfgSqiUgdj2VjJoeqfq+q/7qHP+KshUsZUllBhXLjkopuzGur6jpwKhSQMkFwRCQDaAvMIIWfwygQXn9nBSaJyGzXU0VBy8dKDiDffzXAQHfo6/UCDjV6efeEyxPL91ZBr3UFTq8uQLjfKGlIZTNzT2E7jPggIpWA0cAgVd0q5lW+2CAiXwEHhTh1dwEu01lV14rIgcCXIrLEHUFItBz5/qtu8svAgzjvjAeBJ4HLvV4yRFred0+4PLF8b3m+logcj6OgugQlF/k3ijeprKBiG7bDP9aLSB1VXecOAWzwW6BoiEhpnAr/jqp+7Can3HMYoVHVE8OdExFPv7OqrnU/N4jIJzjDUd9QgP9JLOQI819FVdcH5XkV+CzcvULg5d0TLk8ZD2VjKQci0gp4DeilqjmRICP8RklDKg/x5bhxEZEyOG5cxvksU2EYB1zq7l8KjPVRlqiI01UaAfyiqk8FnUqp5zAKTdTfWUQqikjlwD5wEvCz1/IxlCPcfxVXqQU4M0g+L3h594wDLnGt+ToBW9yhyFi+t6JeS0QaAh8DF6vqb0HpkX6j5MFvK42ibDiWMr/hWLLc7bc8HuR9F1gHZOK0fq4ADgAmA7+7nzX8ljPKMwQiKy8A5rnbKan2HLYV+vcP+TsDdYHx7v7BOBZl84FFwXUzVv8Tj3KE/K+6594CFrrnxgF1Cnj/fO8eYAAwwN0XnGCRy9z7dIhUtgi/RzQ5XgP+DXr+WdF+o2TazJOEYRiGkZSk8hCfYRiGUYwxBWUYhmEkJaagDMMwjKTEFJRhGIaRlJiCMgzDMJISU1BJhIhkSJCn82RBRKaJSAe/5TAMPxCR10SkRYj0y0TkhSJcd3vRJCv+pLInCcMDIlJKVbP8lsMwkgV3Aa+oaraX/Kp6ZZxFMsJgPSgfEZFbRORndxvkJpcSkVGuE8uPRKSCm3eYiCx2059w02qJyGgR+cndOrvpQ0VkuIhMAt4UkRki0jLovtNEpL27mvx1t+xcEenjni8vIu+593ofKJ/I78UwYo07OvGLiLwEzAGGuP/7BSJyv5unooh8LiLz3Tp5rpueM4IgIv1F5DcR+RroHHT9N0TknKDj7e5nJRGZLCJzxIm9lM9zuYjUEZFvxInL9LOIdI3rl5FCWA/KJ0SkPdAfOApn1fkM4GugGXCFqk4XkdeB69zPM4HmqqoiUs29zLPA06r6nevSZCJwmHuuPdBFVXeJyM1AP+A+18VLXVWdLSIPA1NU9XL3mjPFcdB5DbBTVVu5frzmxPv7MIwE0Aynzo0BzsHxPSfAOBE5FqgFrFXVUwFEpGpwYbfu3I9Tt7YAU4G5Ue65GzhTHYfKNYEfRWSc5vaQcAEwUVUfEpF0oELRHrP4YD0o/+gCfKKqO1R1O46/rK7An6o63c3ztptvK84f/TUROQvY6Z4/EXhBRObhuGupEvCvBYxT1V3u/gdAX3e/H/Chu38SMNgtPw0oBzTECaz4NoCqLsBxB2MYqc5KdWIzneRuc3EaX82BpjguiU4UkUdFpKuqbslT/ihgmqpuVCf+0vse7inAwyKyAPgKJxxG7Tx5fgL6i8hQ4Ah1YlcZWA/KT8LFp8jre0pVNUtEjgROwHEIORDojtPAODpIETkXdkJf7Ai6wBoR2eT2hs7F6SEFZDhbVX8NUd58YBnFjUCdEOARVf1v3gzuyMYpwCMiMklVH8iTJVy9yMJt8LtzXGXc9AtxembtVTVTRFbgNAT3X1D1G7cHdyrwlog8rqpvFvjpiiHWg/KPb4AzRKSC6034TOBboKGIHO3mOR/4Tpx4NlVVdTwwCGjjnp+Eo6wAEJFAeijeA+5wr7PQTZsI3OBWKESkbZBsF7pphwOtCv+YhpF0TAQud+sVIlJPRA4Ukbo4Q9tvA08A7fKUmwF0E5EDxAnj0Tfo3AqcoT9wotqWdverAhtc5XQ80CivMCLSyM3zKo739bz3LbFYD8onVHWOiLwBzHSTAl6HfwEuFZH/4nhqfhnnTz5WRMrhtP5udsvcCLzoDh+UwlEsA8Lc8iOcOasHg9IeBJ4BFrhKagXQ273nSPe684JkNIyUR1UnichhwA9u22w7cBFwCPC4iGTjRBy4Nk+5de4w3A84UQnmAOnu6Vdx6uhMHO/qgd7aO8CnIjILpy4tCSFSN+B2Ecl0ZbkkJg9aDDBv5oZhGEZSYkN8hmEYRlJiCsowDMNISkxBGYZhGEmJKSjDMAwjKTEFZRiGYSQlpqAMwzCMpMQUlGEYhpGUmIIyDMMwkpL/B06Bc6Sv4Pt9AAAAAElFTkSuQmCC", 65 | "text/plain": [ 66 | "
" 67 | ] 68 | }, 69 | "metadata": { 70 | "needs_background": "light" 71 | }, 72 | "output_type": "display_data" 73 | } 74 | ], 75 | "source": [ 76 | "mod = pb.bass(x, y)\n", 77 | "mod.plot()" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "The following gives the posterior predictions of the BMARS mean model." 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 4, 90 | "metadata": {}, 91 | "outputs": [ 92 | { 93 | "data": { 94 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAWn0lEQVR4nO3df4zcdZ3H8dd7xsGbVrwtgfbKHns9CWnuEGgvG1uDufTOSFFDKYTC1XLHRWP9QxNBsidgc10iv3ILiImXy7UHEWLlCljGoperjdFDCRC3zrZLrT3E1MK0aSHQE2WTru37/pjZst3u7sx8v5+Z+X6/83wkZne/OzP7Hie8+PD+fn6YuwsAkD65ThcAAIiGAAeAlCLAASClCHAASCkCHABS6j3t/GPnnnuuL1q0qJ1/EgBSb9euXW+4+3lTr7c1wBctWqTh4eF2/kkASD0z+81012mhAEBKEeAAkFIEOACkFAEOAClFgANASrV1FgoAdJtSuaKhHft16NiYzu8pamDlYq1e2hvktQlwAGiRUrmi27eNamz8hCSpcmxMt28blaQgIU4LBQBaZGjH/lPhPWFs/ISGduwP8voEOAC0yKFjY01dbxYBDgAtcn5PsanrzSLAAaBFBlYuVrGQP+1asZDXwMrFQV6fm5gA0CITNyqZhQIAKbR6aW+wwJ6KFgoApBQBDgApRQsFABrUylWVURDgANCAVq+qjIIWCgA0oNWrKqMgwAGgAa1eVRkFLRQAmGSmPvf5PUVVpgnrUKsqo2AEDgA1E33uyrExud7tc5fKlZavqoyCETiArjLbTJLZ+tzP3fa3px7DLBQAaLN6M0nq9blbuaoyClooALpGvZkkrd49MDQCHEBXKJUr096ElN4dYSexzz0bWigAMm+idTKTnjkFSa3fPTA0AhxA5k3XOpnM/d3vk9bnng0BDiCTJs828TqP/b+x8bbUFBoBDiBzps42qSepNynrIcABpF6pXNHg9r06VhtJ50w6WW/YXZPkm5T11J2FYmYXmNmPzGyfme01sy/Wrg+aWcXMRmr/+0TrywWA05XKFQ08uftUeEv1wztvJknq7Snq3msvSU3Pe6pGRuB/kHSru//czM6WtMvMdtZ+9zV3v7915QHA7IZ27Nd4g8Pt3p7iqRWVWVA3wN39sKTDte/fNrN9ktL5rysAmdPoboBpbpXMpKmFPGa2SNJSSS/WLn3BzPaY2SNmNm+G56w3s2EzG3799dfjVQsAU9S7AWlKf6tkJube2H96mNn7JP2PpLvdfZuZLZD0hiSX9FVJC93907O9Rn9/vw8PD8csGQDeVSpXdPPWkWl/11MsaGTjFe0tqAXMbJe790+93tAsFDMrSPqOpC3uvk2S3P3IpN9vlvS9QLUCwCn1zqFcvbRXw795U9964eBpzyvkTIOrLm53uW3VyCwUk/SwpH3u/uCk6wsnPewaSS+FLw9AN5sYXU/en/vmrSMqlSunPe6u1ZfooRuWqLeneKplMrTmssy1TKaq20Ixs49I+omkUUkna5fvkLRW0hJVWygHJH2udsNzRrRQADTjoju+r/GTZ14v5KSX7/lk+wvqkMgtFHf/qar3Aab6rxCFAcBMpgvv2a53G7aTBYCUYik9gI6afJPyj4sFmUnH3hlP7f4k7USAA+iYDaVRbXnh4KndAicvh5/p8AW8iwAH0BEbSqNnTP1rlE13V64LEeAA2maiXRJ3dL1uWV+gitKNAAfQUqVyRXc+s1dvvRP/0IS8mdYuu0B3rb4kQGXpR4ADaInqoQp7NBZozl/WdhIMgQAHENzEHt2NbvNaTyFnmdtJMAQCHEAQk6cD5sx0osGN8urpKRY0uOrizC+Lj4IABxDb1DMo44R33kwPXJ/9fUxCYCUmgNiGduxv+ADh2RQLecK7CYzAAcQWYtHNvDkFbbyKVkkzCHAAsUzd2rVZvdPs8Y3GEOAAmhZibnchbxq6jnZJHAQ4gKbEWQI/YU4hp3uuvZTwjokABzCrydMD55yV1++PR79ZSbskLAIcwIymTg+MGt43Lu9j+XsLEOAAplUqV3TLEyOKux7noRuWMOJuEeaBAzhDqVzRrU/ujhXePcUC4d1ijMABnKZUruiWrSOKmt2XX3iOtnz2w0FrwvQYgQM4ZWITKsI7HRiBA5BUa5s8sTvyPiYLzj6L8G4zAhzoUhtKo3r8xVd1wl0mRR51S4y8O4UAB7rQxx78sV4++vtTP0cJb0K78whwoEuEOo9Sqs4wIbw7jwAHusDUBTlxFAt5Da66OEBViIsAB7rA4Pa9QcKbpfDJUjfAzewCSY9J+hNJJyVtcvevm9k5krZKWiTpgKTr3f2t1pUKIIoNpVEdG4t3IjwLcpKpkXngf5B0q7v/haTlkj5vZn8p6TZJP3T3iyT9sPYzgAQplSuxdg7M54zwTrC6I3B3PyzpcO37t81sn6ReSVdLWlF72KOSfizpyy2pEkBTSuWKBrfvjTXypl2SfE31wM1skaSlkl6UtKAW7nL3w2Y2f4bnrJe0XpL6+vpiFQtgdqVyRV95ejTWlq+MuNOj4QA3s/dJ+o6km939t2bW0PPcfZOkTZLU398fc18zAFOFnB544/I+wjtFGgpwMyuoGt5b3H1b7fIRM1tYG30vlHS0VUUCmN6G0qi2vHAw1ipKScqZ9Kll7NmdNo3MQjFJD0va5+4PTvrVdkk3Sbqv9vW7LakQwLRCHG2WN+mB62mZpFUjI/DLJf29pFEzG6ldu0PV4H7CzD4j6aCkNS2pEMBpQhwoLFVH3a/c+8lAVaETGpmF8lNJMzW8Pxq2HACzmTho4cTJ+LeTPrWMSQVpx0pMIAUm7xwYV95Ma5ddQL87AwhwIMGqe5js0dj4yVivM29OQRuvuphed8YQ4EBCvbsBVbzw5kT47CLAgYQa2rE/1gZUxUJO9157KaPuDCPAgQQqlSuRF+YwNbB7EOBAgqzb/Lyee+XNyM9//3vz2nPnlQErQpIR4ECHhVoKf9H8udr5pRVhikIqEOBAh4RakFPIm4auu4yWSRciwIEOCHXE2dyz8rr7mksI7y5FgANtFKpdYiZ9jRuVXY8AB9ogVLtEkgo509AaWiYgwIGWK5UrGnhyt8YD7F/SUyxocBUrKlFFgAMtNvDkiGIuptTlF56jLZ/9cJiCkBmNHGoMIKJ1m5+PHd43Lu8jvDEtRuBAC5TKFd2ydSTWSTmcTYl6CHAgsLgn5bD5FBpFCwUIqFSuxArvyy88h/BGwxiBAwGUyhXdvHUk1mtwoxLNIsCBCEItyJGk9+RM9zOvGxEQ4EATQi7IYU434iLAgQaF2r/kj/KmX979iUBVoZtxExNo0J3P7I0d3hfNn0t4IxhG4EAD1m1+PlbbhOPN0AoEOFDHsrt36sjbxyM/f96cgsr/fEXAioAqAhyYxobSqB5/8VWd8HgbUBXypo1XXRyoKuB0BDgwSalc0VeeHtXvj8frdUtSb09RAysX0zZByxDgQE2pXNHAU7s1fiLeqJul8GiXurNQzOwRMztqZi9NujZoZhUzG6n9j9vqSLWJlZRxwvu978npoRuWEN5om0ZG4N+U9A1Jj025/jV3vz94RUCbxd18Sqquptx/18cDVQQ0pm6Au/uzZraoDbUAbRUiuCXJJN2/5rL4BQFNitMD/4KZ/YOkYUm3uvtb0z3IzNZLWi9JfX19Mf4cEMaG0qi2vHAw1l7dE+YUcrqH+d3oEPMGpknVRuDfc/cP1n5eIOkNSS7pq5IWuvun671Of3+/Dw8PxyoYiKp6NmX8480mcLMS7WJmu9y9f+r1SCNwdz8y6YU3S/pejNqAlgo5NVBiEyokR6QAN7OF7n649uM1kl6a7fFAp4TqczPaRhLVDXAze1zSCknnmtlrkjZKWmFmS1RtoRyQ9LnWlQhEs27z83rulTdjvw7hjaRqZBbK2mkuP9yCWoBgQoR3IScNreFgYSQXKzGROXHDmyXwSAsCHJlQKlc0uH2vjo3FOymHdgnShABHqoUKbokT4ZE+BDhSqzqve7fGT8ZbkjNvTkEbr2JaINKHAEcqlcoV3bJ1JNZqyp5iQSMbOWgB6UWAIzVCzemWpGIhr8FVHLSAdCPAkQohw5uWCbKCAEcqhApvZpkgSwhwJFKpXNHQjv2qHBsL8nrsX4IsIsCROKVyRbc+uVsnYs4ukapTA7d89sMBqgKShwBHIkyMuA8dGwuyTzetEnQDAhwdVypXdPu2UY2Nx9/udcHZZ+nFr3wsQFVA8tU91BhotaEd+4OE943L+whvdBVG4OiYkDcqH7qBXQPRfQhwdESpXNHAU7s1fiJex9skrVveR3ijKxHg6Ig7tu2JHd4syEG3I8DRdsvu3ql3YpwsnDPpwetpmQAEONoi1LavxUJe9157CeENiABHC4VeTclJOcDpCHC0RMi53cwwAabHPHC0RIi53fmcEd7ALBiBoyXitk1olwD1EeAIIu5J8BPe/9689tx5ZYCKgOwjwBFLiKPNJrABFdAcAhyRlcoV3bx1JMhr9fYUCW+gSQQ4IgnVMpGqc7sHVi4O8lpAN6k7C8XMHjGzo2b20qRr55jZTjN7ufZ1XmvLRFKUyhV94Pbvxw7vnmJBpurIm4U5QDSNjMC/Kekbkh6bdO02ST909/vM7Lbaz18OXx6SpFSu6EtPjCjuQTn0uoEw6ga4uz9rZoumXL5a0ora949K+rEI8EwLEd5MDQTCitoDX+DuhyXJ3Q+b2fyZHmhm6yWtl6S+vr6Ifw6dsqE0qi0vHIw8y2ROIad7rr2U0AZaoOU3Md19k6RNktTf3x9ithnaIMQMEw4UBlor6lL6I2a2UJJqX4+GKwmdFje8c1btcxPeQGtFHYFvl3STpPtqX78brCJ0RKhT4dm7BGifugFuZo+resPyXDN7TdJGVYP7CTP7jKSDkta0ski0VqidAwlvoL0amYWydoZffTRwLeiQEDsH3si5lEDbsRKzi4U4cCFvprXLLmBeN9ABBHgXmdznLuRNx2McKky7BOg8ArxLTO1zxwlv2iVAMhDgXWJw+97Yfe6cSZ9axjJ4ICkI8C6woTQa+zR4WiZA8nAmZsaVyhV964WDkZ/fUywQ3kBCMQLPqFK5osHte2ONvAluINkI8AzaUBqNNeqWuFEJpAEBnhEh5nRL7B4IpAkBngGlckUDT+3WeMSpgYWcNLSGdgmQNgR4Btz5zN5I4X3R/Lna+aUV4QsC0BYEeIrFOWyhkBPhDaQcAZ5Sy+7eqSNvH4/03JxVWyYA0o0AT5lSuaJbnxhR1JXwnEsJZAcBngIhZphwvBmQPQR4woU4bOHG5exfAmQRAZ5gpXJFt2wdiXzE2YH7Phm0HgDJQoAnUKlc0Z3P7NVb70RfBt/bUwxYEYAkIsATJsQy+GIhr4GViwNVBCCpCPAEiRreF82fq3eOn9ShY2M6n1kmQNcgwBMianhzgxLoXgR4h5XKFQ08OaLxk809j2mBAAjwDomzX3dPsUB4AyDAOyHOjcpiIa/BVRcHrghAGhHgbRbliLM5hZzGxk9ygxLAaQjwNhvasb/hx3KDEsBsCPA2WLf5eT33yptNPaeQE+ENYFaxAtzMDkh6W9IJSX9w9/4QRWVF1J0Dc2K7VwD1hRiB/427vxHgdTKjugHVHo01OzdQUt5MD1x/GX1uAHXRQgks7gyTe6+9hPAG0JC4Ae6SfmBmLunf3X3T1AeY2XpJ6yWpr68v5p9Lpqj7dT90wxIN7djPEngAkcQN8Mvd/ZCZzZe008x+6e7PTn5ALdQ3SVJ/f3/UnVETJ8QhC6uX9hLYACLLxXmyux+qfT0q6WlJHwpRVNJNHLIQJ7zZ7hVAXJED3MzmmtnZE99LukLSS6EKS7J/emp3rBNy2O4VQAhxWigLJD1tZhOv8213/+8gVSXYus3P63iEE4XNJHcOFQYQTuQAd/dfS7osYC2JtaE0qsdffFUnvPngzkl68IYlBDaA4JhGWEe8aYE53XvtpYQ3gJYgwOvYEiG8580paONVFxPcAFqKAJ/BxMHCzTZNOAkeQLsQ4NMolSsaeGq3xpu8WdlTLLSoIgA4U6x54Fk1tGN/0+FdyBkHLQBoK0bg0zjUxAIdk1gGD6AjCPBpnN9TrLvKspA3DV3HroEAOocWyjQGVi5WIW8z/j5vhDeAziPAp7F6aa+GrrtMcwpn/t9TLOTZrxtAIhDgM1i9tFe/+OrH9dANS9TbU5Spugye/boBJEXX9cAntoFtdA9utnwFkFRdEeCT9+426dTinMqxMd2+bVSSCGkAqZPpAJ9YTfnWO+Onrk2d3T02fkJDO/YT4ABSJ7MBvqE0qi0vHGxoKXwz874BICkyeROzVK40HN5Sdd43AKRNpkbgUc6p5HQcAGmVmQCfOKeykaPOJm5kcjoOgDTLTIAP7djfUHj3FAsaXMVe3QDSLzMBXu9GpElat7xPd62+pD0FAUCLZSbAZ9uAilYJgCxKVYDPtopyYOXiM3rgxUKepe8AMivxAd7oKsqJkG5mmTwApFmiA3zqzJJ6qyjZtwRAN0n0Qp5GZpawihJAt0p0gDcSzqyiBNCtEh3g9cKZVZQAulmiA3xg5WIVC/nTrk0cdMbhCgC6XaybmGZ2paSvS8pL+g93vy9IVTXMLAGAmUUOcDPLS/pXSR+T9Jqkn5nZdnf/RajiJGaWAMBM4rRQPiTpV+7+a3c/Luk/JV0dpiwAQD1xArxX0quTfn6tdu00ZrbezIbNbPj111+P8ecAAJPFCXCb5toZZyi4+yZ373f3/vPOOy/GnwMATBYnwF+TdMGkn/9U0qF45QAAGhUnwH8m6SIz+3MzO0vS30naHqYsAEA95t7oyZHTPNnsE5IeUnUa4SPufnedx78u6TezPORcSW9ELii5svq+pOy+t6y+L4n3lkZ/5u5n9KBjBXhoZjbs7v2driO0rL4vKbvvLavvS+K9ZUmiV2ICAGZGgANASiUtwDd1uoAWyer7krL73rL6viTeW2YkqgcOAGhc0kbgAIAGEeAAkFKJCHAzu9LM9pvZr8zstk7XE5KZHTCzUTMbMbPhTtcTh5k9YmZHzeylSdfOMbOdZvZy7eu8TtYYxQzva9DMKrXPbaS25iFVzOwCM/uRme0zs71m9sXa9Sx8ZjO9t9R/bs3oeA+8ti3t/2rStrSS1obelrZTzOyApH53T/3iAjP7a0m/k/SYu3+wdu1fJL3p7vfV/uU7z92/3Mk6mzXD+xqU9Dt3v7+TtcVhZgslLXT3n5vZ2ZJ2SVot6R+V/s9spvd2vVL+uTUjCSNwtqVNCXd/VtKbUy5fLenR2vePqvoPUarM8L5Sz90Pu/vPa9+/LWmfqjuGZuEzm+m9dZUkBHhD29KmmEv6gZntMrP1nS6mBRa4+2Gp+g+VpPkdriekL5jZnlqLJXVthsnMbJGkpZJeVMY+synvTcrQ51ZPEgK8oW1pU+xyd/8rSR+X9Pnaf64j+f5N0oWSlkg6LOmBjlYTg5m9T9J3JN3s7r/tdD0hTfPeMvO5NSIJAZ7pbWnd/VDt61FJT6vaMsqSI7V+5ERf8miH6wnC3Y+4+wl3Pylps1L6uZlZQdWA2+Lu22qXM/GZTffesvK5NSoJAZ7ZbWnNbG7tBovMbK6kKyS9NPuzUme7pJtq398k6bsdrCWYiYCruUYp/NzMzCQ9LGmfuz846Vep/8xmem9Z+Nya0fFZKFLz29KmhZl9QNVRt1Q9QPrbaX5vZva4pBWqbtl5RNJGSSVJT0jqk3RQ0hp3T9UNwRne1wpV/zPcJR2Q9LmJvnFamNlHJP1E0qikk7XLd6jaK077ZzbTe1urlH9uzUhEgAMAmpeEFgoAIAICHABSigAHgJQiwAEgpQhwAEgpAhwAUooAB4CU+n8gvFxCcLFFqQAAAABJRU5ErkJggg==", 95 | "text/plain": [ 96 | "
" 97 | ] 98 | }, 99 | "metadata": { 100 | "needs_background": "light" 101 | }, 102 | "output_type": "display_data" 103 | } 104 | ], 105 | "source": [ 106 | "fpred = mod.predict(xx)\n", 107 | "plt.scatter(ftest, fpred.mean(axis=0)) # posterior mean prediction vs noisless test outputs\n", 108 | "plt.show()" 109 | ] 110 | }, 111 | { 112 | "cell_type": "markdown", 113 | "metadata": {}, 114 | "source": [ 115 | "To get full prediction uncertainty, use the `nugget=True` option. For instance, below we predict at just one new input setting. The distribution of predictions represents both uncertainty in the BMARS mean as well as predictive variance (e.g., measurement error)." 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 5, 121 | "metadata": {}, 122 | "outputs": [ 123 | { 124 | "data": { 125 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD4CAYAAAAXUaZHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAANWklEQVR4nO3dYazd9V3H8fdHOtExzSAtBNrGi6biYHFgbpCJMVWMIJAVH2BK1DSRpD5gkZklrrgH0wdNuqhTE91MHVjUCTYbCFnnBHEL8YFjl0mwpdRVqNC10ruhDjVhK/v64P6rx3Ju77nn3HNO76/vV3Jzzvmf/7nn+8u5fXP433POTVUhSWrLt017AEnSyjPuktQg4y5JDTLuktQg4y5JDVoz7QEA1q5dWzMzM9MeQ+eaQ4cWTq+4YrpzSEN6+umnv1pV6/pdd1bEfWZmhrm5uWmPoXPN5s0Lp5///DSnkIaW5F8Wu87DMpLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUoLPiHarS2Wxmx76p3O+RXbdM5X7VBp+5S1KDjLskNcjDMloVxnFo5MEXvgbA1ikddpHGyWfuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktSgJeOeZGOSzyU5mORAkru77RcleTzJl7vTC3tuc0+Sw0kOJblxnAuQJL3ZIM/cTwLvr6p3ANcBdyW5EtgBPFFVm4Anust0120FrgJuAj6a5LxxDC9J6m/JuFfV8ar6Unf+NeAgsB7YAtzf7XY/cFt3fgvwYFW9XlUvAoeBa1d4bknSGSzrmHuSGeAa4AvAJVV1HBb+AwBc3O22Hni552ZHu22nf6/tSeaSzM3Pzw8xuiRpMQPHPcnbgE8B76uqr59p1z7b6k0bqnZX1WxVza5bt27QMSRJAxgo7knewkLYP1FVD3WbX0lyaXf9pcCJbvtRYGPPzTcAx1ZmXEnSIAZ5tUyAe4GDVfWRnqseBbZ157cBj/Rs35rk/CSXA5uAp1ZuZEnSUgb5Yx3XA78A/GOSZ7ptvwbsAvYmuRN4CbgdoKoOJNkLPMfCK23uqqo3VnpwSdLilox7Vf0d/Y+jA9ywyG12AjtHmEuSNALfoSpJDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktSgNdMeQKvLzI590x5B0gB85i5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDTLuktQg4y5JDVoy7knuS3Iiyf6ebb+e5CtJnum+bu657p4kh5McSnLjuAaXJC1ukGfue4Cb+mz/naq6uvv6DECSK4GtwFXdbT6a5LyVGlaSNJgl415VTwKvDvj9tgAPVtXrVfUicBi4doT5JElDGOWY+3uTPNsdtrmw27YeeLlnn6PdNknSBA0b948B3wdcDRwHfrvbnj77Vr9vkGR7krkkc/Pz80OOIUnqZ6i4V9UrVfVGVX0L+CP+79DLUWBjz64bgGOLfI/dVTVbVbPr1q0bZgxJ0iKGinuSS3su/gxw6pU0jwJbk5yf5HJgE/DUaCNKkpZrzVI7JHkA2AysTXIU+BCwOcnVLBxyOQL8EkBVHUiyF3gOOAncVVVvjGVySdKilox7Vd3RZ/O9Z9h/J7BzlKEkSaPxHaqS1CDjLkkNMu6S1CDjLkkNMu6S1CDjLkkNMu6S1CDjLkkNMu6S1KAl36EqaTpmduyb2n0f2XXL1O5bK8Nn7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUIOMuSQ0y7pLUoCXjnuS+JCeS7O/ZdlGSx5N8uTu9sOe6e5IcTnIoyY3jGlyStLg1A+yzB/h94E96tu0AnqiqXUl2dJc/kORKYCtwFXAZ8DdJvr+q3ljZsc9tMzv2TXsESWe5JZ+5V9WTwKunbd4C3N+dvx+4rWf7g1X1elW9CBwGrl2ZUSVJgxr2mPslVXUcoDu9uNu+Hni5Z7+j3bY3SbI9yVySufn5+SHHkCT1s9K/UE2fbdVvx6raXVWzVTW7bt26FR5Dks5tw8b9lSSXAnSnJ7rtR4GNPfttAI4NP54kaRjDxv1RYFt3fhvwSM/2rUnOT3I5sAl4arQRJUnLteSrZZI8AGwG1iY5CnwI2AXsTXIn8BJwO0BVHUiyF3gOOAnc5StlJGnylox7Vd2xyFU3LLL/TmDnKENJkkbjO1QlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUFL/g1VSeeemR37pnK/R3bdMpX7bZHP3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQcZdkhpk3CWpQSP9DdUkR4DXgDeAk1U1m+Qi4C+AGeAI8LNV9W+jjSlJWo6VeOb+41V1dVXNdpd3AE9U1Sbgie6yJGmCxnFYZgtwf3f+fuC2MdyHJOkMRo17AY8leTrJ9m7bJVV1HKA7vbjfDZNsTzKXZG5+fn7EMSRJvUY65g5cX1XHklwMPJ7k+UFvWFW7gd0As7OzNeIckqQeIz1zr6pj3ekJ4GHgWuCVJJcCdKcnRh1SkrQ8Q8c9yQVJvuvUeeCngP3Ao8C2brdtwCOjDilJWp5RDstcAjyc5NT3+fOq+mySLwJ7k9wJvATcPvqYkqTlGDruVfUC8K4+278G3DDKUJKk0Yz6C9Vz2syOfdMeQZL68uMHJKlBxl2SGmTcJalBxl2SGmTcJalBxl2SGmTcJalBxl2SGmTcJalBxl2SGmTcJalBxl2SGmTcJalBfiqkpLPGtD5p9ciuW6Zyv+PkM3dJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QGGXdJapBxl6QG+QeyJZ3zpvWHuWF8f5y7ibhP84GRpLORh2UkqUHGXZIaNLa4J7kpyaEkh5PsGNf9SJLebCxxT3Ie8AfATwNXAnckuXIc9yVJerNxPXO/FjhcVS9U1TeAB4EtY7ovSdJpxvVqmfXAyz2XjwI/3LtDku3A9u7ifyY5NKZZxm0t8NVpDzElq3rt7z515sO3Lvemq3rdIzhX1w1jXHs+PNLNv2exK8YV9/TZVv/vQtVuYPeY7n9iksxV1ey055iGc3XtrvvcsxrXPq7DMkeBjT2XNwDHxnRfkqTTjCvuXwQ2Jbk8ybcDW4FHx3RfkqTTjOWwTFWdTPJe4K+B84D7qurAOO7rLLDqDy2N4Fxdu+s+96y6taeqlt5LkrSq+A5VSWqQcZekBhn3RSS5L8mJJPt7tv1mkueTPJvk4SRvX+S2q/qjF4Zde5KNST6X5GCSA0nunujgIxrlMe/2PS/JPyT59EQGXiEj/qy/Pcknu30PJnl3v/3OViOu/Ve6n/P9SR5I8h0TG3wAxn1xe4CbTtv2OPDOqvpB4J+Ae06/USMfvbCHIdYOnATeX1XvAK4D7lpla9/DcOs+5W7g4HhGG6s9DL/u3wM+W1U/ALyL1bf+PQz373w98MvAbFW9k4UXjmwd76jLY9wXUVVPAq+etu2xqjrZXfx7Fl6/f7pV/9ELw669qo5X1Ze686+x8A99/ZjHXTEjPOYk2QDcAnx8rEOOwbDrTvLdwI8B93a3+UZV/ft4p11ZozzmLLza8DuTrAHeyln2Xh7jPrxfBP6qz/Z+H72wagI3oMXW/r+SzADXAF+YxEATcqZ1/y7wq8C3JjbN5Cy27u8F5oE/7g5HfTzJBZMdbez6rr2qvgL8FvAScBz4j6p6bMKznZFxH0KSD7JwCOIT/a7us62Z15susfZT+7wN+BTwvqr6+qRmG6czrTvJrcCJqnp64oON2RKP9xrgh4CPVdU1wH8Bq+53TItZ4jG/kIX/I78cuAy4IMnPT3bCMzPuy5RkG3Ar8HPV/00CzX70wgBrJ8lbWAj7J6rqoUnONy4DrPt64D1JjrBwGO4nkvzZBEcciwF/1o9W1an/O/skC7Ff9QZY+08CL1bVfFV9E3gI+JFJzrgU474MSW4CPgC8p6r+e5HdmvzohUHWniQsHH89WFUfmeR84zLIuqvqnqraUFUzLDzef1tVZ9WzuOUacN3/Cryc5Ipu0w3AcxMacWwG/Hf+EnBdkrd2P/c3cLb9Mrmq/OrzBTzAwrG0b7LwDOVO4DALx9Of6b7+sNv3MuAzPbe9mYXfsv8z8MFpr2VSawd+lIVDUM/27HfztNczice853tsBj497bVMat3A1cBc95j/JXDhtNczwbX/BvA8sB/4U+D8aa+n98uPH5CkBnlYRpIaZNwlqUHGXZIaZNwlqUHGXZIaZNwlqUHGXZIa9D/rgAbI+/yWxwAAAABJRU5ErkJggg==", 126 | "text/plain": [ 127 | "
" 128 | ] 129 | }, 130 | "metadata": { 131 | "needs_background": "light" 132 | }, 133 | "output_type": "display_data" 134 | } 135 | ], 136 | "source": [ 137 | "ypred = mod.predict(xx, nugget=True)\n", 138 | "plt.hist(ypred[:, 40])\n", 139 | "plt.axvline(ytest[40], color='red')\n", 140 | "plt.show()" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "We can calculate the empirical coverage of the uncertainty from our test set predictions by forming a probability interval for each prediction (in this case, 95%), and counting how many intervals capture the corresponding true values." 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": 6, 153 | "metadata": {}, 154 | "outputs": [ 155 | { 156 | "data": { 157 | "text/plain": [ 158 | "0.947" 159 | ] 160 | }, 161 | "execution_count": 6, 162 | "metadata": {}, 163 | "output_type": "execute_result" 164 | } 165 | ], 166 | "source": [ 167 | "quantiles = np.quantile(ypred, [0.025, .975], axis=0)\n", 168 | "np.mean((quantiles[0] < ytest) * (quantiles[1] > ytest))" 169 | ] 170 | } 171 | ], 172 | "metadata": { 173 | "interpreter": { 174 | "hash": "9fd4f69baad59115719fc9456dc42058eb22be0176fed4417f58753af44012d7" 175 | }, 176 | "kernelspec": { 177 | "display_name": "Python 3.8.8 64-bit ('pyBASS': conda)", 178 | "language": "python", 179 | "name": "python3" 180 | }, 181 | "language_info": { 182 | "codemirror_mode": { 183 | "name": "ipython", 184 | "version": 3 185 | }, 186 | "file_extension": ".py", 187 | "mimetype": "text/x-python", 188 | "name": "python", 189 | "nbconvert_exporter": "python", 190 | "pygments_lexer": "ipython3", 191 | "version": "3.8.13" 192 | }, 193 | "orig_nbformat": 4 194 | }, 195 | "nbformat": 4, 196 | "nbformat_minor": 2 197 | } 198 | -------------------------------------------------------------------------------- /examples/ex1.md: -------------------------------------------------------------------------------- 1 | In this example, we generate data from the Friedman function at fit a model with `pyBASS`. 2 | 3 | 4 | ```python 5 | import pyBASS as pb 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | ``` 9 | 10 | 11 | ```python 12 | 13 | # Friedman function (Friedman, 1991, Multivariate Adaptive Regression Splines) 14 | def f(x): 15 | return (10. * np.sin(np.pi * x[:, 0] * x[:, 1]) + 20. * (x[:, 2] - .5) ** 2 16 | + 10 * x[:, 3] + 5. * x[:, 4]) 17 | 18 | 19 | n = 500 # sample size 20 | p = 10 # number of predictors (only 5 are used) 21 | x = np.random.rand(n, p) # training inputs 22 | xx = np.random.rand(1000, p) # test inputs 23 | y = f(x) + np.random.normal(size=n) * 0.1 # noisy training outputs 24 | ftest = f(xx) 25 | ytest = ftest + np.random.normal(size=1000) * 0.1 # noisy test outputs 26 | 27 | ``` 28 | 29 | Fit the BMARS model with and see the results with 30 | 31 | 32 | ```python 33 | mod = pb.bass(x, y) 34 | mod.plot() 35 | ``` 36 | 37 | BASS MCMC Complete. Time: 3.236959 seconds. 38 | 39 | 40 | 41 | 42 | ![png](ex1_files/ex1_4_1.png) 43 | 44 | 45 | 46 | The following gives the posterior predictions of the BMARS mean model. 47 | 48 | 49 | ```python 50 | fpred = mod.predict(xx) 51 | plt.scatter(ftest, fpred.mean(axis=0)) # posterior mean prediction vs noisless test outputs 52 | plt.show() 53 | ``` 54 | 55 | 56 | 57 | ![png](ex1_files/ex1_6_0.png) 58 | 59 | 60 | 61 | To get full prediction uncertainty, use the `nugget=True` option. For instance, below we predict at just one new input setting. The distribution of predictions represents both uncertainty in the BMARS mean as well as predictive variance (e.g., measurement error). 62 | 63 | 64 | ```python 65 | ypred = mod.predict(xx, nugget=True) 66 | plt.hist(ypred[:, 40]) 67 | plt.axvline(ytest[40], color='red') 68 | plt.show() 69 | ``` 70 | 71 | 72 | 73 | ![png](ex1_files/ex1_8_0.png) 74 | 75 | 76 | 77 | We can calculate the empirical coverage of the uncertainty from our test set predictions by forming a probability interval for each prediction (in this case, 95%), and counting how many intervals capture the corresponding true values. 78 | 79 | 80 | ```python 81 | quantiles = np.quantile(ypred, [0.025, .975], axis=0) 82 | np.mean((quantiles[0] < ytest) * (quantiles[1] > ytest)) 83 | ``` 84 | 85 | 86 | 87 | 88 | 0.947 89 | 90 | 91 | -------------------------------------------------------------------------------- /examples/ex1_files/ex1_4_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex1_files/ex1_4_1.png -------------------------------------------------------------------------------- /examples/ex1_files/ex1_6_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex1_files/ex1_6_0.png -------------------------------------------------------------------------------- /examples/ex1_files/ex1_8_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex1_files/ex1_8_0.png -------------------------------------------------------------------------------- /examples/ex2.md: -------------------------------------------------------------------------------- 1 | In this example, we generate data from the Friedman function but treat one of the variables (the first) as a functional variable. 2 | 3 | 4 | ```python 5 | import pyBASS as pb 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | ``` 9 | 10 | This generates 500 curves of length 50. 11 | 12 | 13 | ```python 14 | # Friedman function where first variable is the functional variable 15 | def f2(x): 16 | out = 10. * np.sin(np.pi * tt * x[1]) + 20. * (x[2] - .5) ** 2 + 10 * x[3] + 5. * x[4] 17 | return out 18 | 19 | tt = np.linspace(0, 1, 50) # functional variable grid 20 | n = 500 # sample size 21 | p = 9 # number of predictors other (only 4 are used) 22 | x = np.random.rand(n, p) # training inputs 23 | xx = np.random.rand(1000, p) 24 | e = np.random.normal(size=[n, len(tt)]) * .1 # noise 25 | y = np.apply_along_axis(f2, 1, x) + e # training response 26 | ftest = np.apply_along_axis(f2, 1, xx) 27 | ytest = ftest + np.random.normal(size=[1000, len(tt)]) * .1 28 | 29 | plt.plot(y.T) 30 | plt.show() 31 | ``` 32 | 33 | 34 | 35 | ![png](ex2_files/ex2_3_0.png) 36 | 37 | 38 | 39 | Now take the principal component decomposition of y to see that most of the variation is captured with 3 principal components. 40 | 41 | 42 | ```python 43 | pca = pb.BassPCAsetup(y) 44 | pca.plot() 45 | ``` 46 | 47 | 48 | 49 | ![png](ex2_files/ex2_5_0.png) 50 | 51 | 52 | 53 | Now fit a BASS model in PCA space. 54 | 55 | 56 | ```python 57 | mod2 = pb.bassPCA(x, y, npc=3, ncores=3) 58 | mod2.plot() 59 | ``` 60 | 61 | Starting bassPCA with 3 components, using 3 cores. 62 | BASS MCMC Complete. Time: 3.399235 seconds. 63 | BASS MCMC Complete. Time: 3.402881 seconds. 64 | BASS MCMC Complete. Time: 3.504653 seconds. 65 | 66 | 67 | 68 | 69 | ![png](ex2_files/ex2_7_1.png) 70 | 71 | 72 | 73 | Here are posterior mean predictions of our test data: 74 | 75 | 76 | ```python 77 | fpred = mod2.predict(xx) 78 | plt.plot(fpred.mean(0).T) 79 | plt.show() 80 | ``` 81 | 82 | 83 | 84 | ![png](ex2_files/ex2_9_0.png) 85 | 86 | 87 | 88 | Test data posterior mean vs noiseless observed: 89 | 90 | 91 | ```python 92 | plt.plot(fpred.mean(0).T, ftest.T) 93 | plt.show() 94 | ``` 95 | 96 | 97 | 98 | ![png](ex2_files/ex2_11_0.png) 99 | 100 | 101 | 102 | Functional residuals: 103 | 104 | 105 | ```python 106 | plt.plot(fpred.mean(0).T - ftest.T) 107 | plt.show() 108 | ``` 109 | 110 | 111 | 112 | ![png](ex2_files/ex2_13_0.png) 113 | 114 | 115 | 116 | Prediction uncertainty for one of the test samples: 117 | 118 | 119 | ```python 120 | ypred = mod2.predict(xx, nugget=True, trunc_error=False) 121 | plt.plot(ypred[:,70,:].T, color='grey') 122 | plt.plot(ytest[70]) 123 | plt.show() 124 | ``` 125 | 126 | 127 | 128 | ![png](ex2_files/ex2_15_0.png) 129 | 130 | 131 | -------------------------------------------------------------------------------- /examples/ex2_files/ex2_11_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_11_0.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_13_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_13_0.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_14_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_14_0.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_15_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_15_0.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_3_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_3_0.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_5_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_5_0.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_7_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_7_1.png -------------------------------------------------------------------------------- /examples/ex2_files/ex2_9_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/examples/ex2_files/ex2_9_0.png -------------------------------------------------------------------------------- /pyBASS/BASS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Copyright 2020. Triad National Security, LLC. All rights reserved. This 5 | program was produced under U.S. Government contract 89233218CNA000001 for 6 | Los Alamos National Laboratory (LANL), which is operated by Triad National 7 | Security, LLC for the U.S. Department of Energy/National Nuclear Security 8 | Administration. All rights in the program are reserved by Triad National 9 | 10 | Security, LLC, and the U.S. Department of Energy/National Nuclear Security 11 | Administration. The Government is granted for itself and others acting on 12 | its behalf a nonexclusive, paid-up, irrevocable worldwide license in this 13 | material to reproduce, prepare derivative works, distribute copies to the 14 | public, perform publicly and display publicly,and to permit others to do so. 15 | 16 | LANL software release C19112 17 | Author: Devin Francom 18 | """ 19 | 20 | import numpy as np 21 | import scipy as sp 22 | import matplotlib.pyplot as plt 23 | import pyBASS.utils as uf 24 | from collections import namedtuple 25 | from multiprocessing import Pool 26 | import time 27 | 28 | 29 | class BassPrior: 30 | """Structure to store prior""" 31 | 32 | def __init__( 33 | self, maxInt, maxBasis, npart, g1, g2, s2_lower, h1, h2, a_tau, b_tau, 34 | w1, w2 35 | ): 36 | self.maxInt = maxInt 37 | self.maxBasis = maxBasis 38 | self.npart = npart 39 | self.g1 = g1 40 | self.g2 = g2 41 | self.s2_lower = s2_lower 42 | self.h1 = h1 43 | self.h2 = h2 44 | self.a_tau = a_tau 45 | self.b_tau = b_tau 46 | self.w1 = w1 47 | self.w2 = w2 48 | return 49 | 50 | 51 | class BassData: 52 | """Structure to store data""" 53 | 54 | def __init__(self, xx, y): 55 | self.xx_orig = xx 56 | self.y = y 57 | self.ssy = sum(y * y) 58 | self.n, self.p = xx.shape 59 | self.bounds = np.column_stack([xx.min(0), xx.max(0)]) 60 | self.xx = uf.normalize(self.xx_orig, self.bounds) 61 | return 62 | 63 | 64 | Samples = namedtuple( 65 | "Samples", "s2 lam tau nbasis nbasis_models n_int signs vs knots beta" 66 | ) 67 | Sample = namedtuple( 68 | "Sample", "s2 lam tau nbasis nbasis_models n_int signs vs knots beta" 69 | ) 70 | 71 | 72 | class BassState: 73 | """ 74 | The current state of the RJMCMC chain, with methods for getting the log 75 | posterior and for updating the state 76 | """ 77 | 78 | def __init__(self, data, prior): 79 | self.data = data 80 | self.prior = prior 81 | self.s2 = 1.0 82 | self.nbasis = 0 83 | self.tau = 1.0 84 | self.s2_rate = 1.0 85 | self.R = 1 86 | self.lam = 1 87 | self.I_star = np.ones(prior.maxInt) * prior.w1 88 | self.I_vec = self.I_star / np.sum(self.I_star) 89 | self.z_star = np.ones(data.p) * prior.w2 90 | self.z_vec = self.z_star / np.sum(self.z_star) 91 | self.basis = np.ones([data.n, 1]) 92 | self.nc = 1 93 | self.knots = np.zeros([prior.maxBasis, prior.maxInt]) 94 | self.signs = np.zeros( 95 | [prior.maxBasis, prior.maxInt], dtype=int 96 | ) # could do "bool_", but would have to transform 0 to -1 97 | self.vs = np.zeros([prior.maxBasis, prior.maxInt], dtype=int) 98 | self.n_int = np.zeros([prior.maxBasis], dtype=int) 99 | self.Xty = np.zeros(prior.maxBasis + 2) 100 | self.Xty[0] = np.sum(data.y) 101 | self.XtX = np.zeros([prior.maxBasis + 2, prior.maxBasis + 2]) 102 | self.XtX[0, 0] = data.n 103 | self.R = np.array([[np.sqrt(data.n)]]) # np.linalg.cholesky(self.XtX[0, 0]) 104 | self.R_inv_t = np.array([[1 / np.sqrt(data.n)]]) 105 | self.bhat = np.mean(data.y) 106 | self.qf = pow(np.sqrt(data.n) * np.mean(data.y), 2) 107 | self.count = np.zeros(3) 108 | self.cmod = False # has the state changed since the last write (i.e., has a birth, death, or change been accepted)? 109 | return 110 | 111 | def log_post(self): # needs updating 112 | """get current log posterior""" 113 | lp = ( 114 | -(self.s2_rate + self.prior.g2) / self.s2 115 | - (self.data.n / 2 + 1 + (self.nbasis + 1) / 2 + self.prior.g1) 116 | * np.log(self.s2) 117 | + np.sum(np.log(abs(np.diag(self.R)))) # .5*determinant of XtX 118 | + (self.prior.a_tau + (self.nbasis + 1) / 2 - 1) * np.log(self.tau) 119 | - self.prior.a_tau * self.tau 120 | - (self.nbasis + 1) / 2 * np.log(2 * np.pi) 121 | + (self.prior.h1 + self.nbasis - 1) * np.log(self.lam) 122 | - self.lam * (self.prior.h2 + 1) 123 | ) # curr$nbasis-1 because poisson prior is excluding intercept (for curr$nbasis instead of curr$nbasis+1) 124 | # -lfactorial(curr$nbasis) # added, but maybe cancels with prior 125 | self.lp = lp 126 | return 127 | 128 | def update(self): 129 | """ 130 | Update the current state using a RJMCMC step (and Gibbs steps at 131 | the end of this function) 132 | """ 133 | 134 | move_type = np.random.choice([1, 2, 3]) 135 | 136 | if self.nbasis == 0: 137 | move_type = 1 138 | 139 | if self.nbasis == self.prior.maxBasis: 140 | move_type = np.random.choice(np.array([2, 3])) 141 | 142 | if move_type == 1: 143 | # BIRTH step 144 | 145 | cand = uf.genCandBasis( 146 | self.prior.maxInt, self.I_vec, self.z_vec, self.data.p, 147 | self.data.xx 148 | ) 149 | 150 | # if proposed basis function has too few non-zero entries, 151 | # dont change the state 152 | if ( 153 | cand.basis > 0 154 | ).sum() < self.prior.npart: 155 | return 156 | 157 | ata = np.dot(cand.basis, cand.basis) 158 | Xta = np.dot(self.basis.T, cand.basis) 159 | aty = np.dot(cand.basis, self.data.y) 160 | 161 | self.Xty[self.nc] = aty 162 | self.XtX[0 : self.nc, self.nc] = Xta 163 | self.XtX[self.nc, 0 : (self.nc)] = Xta 164 | self.XtX[self.nc, self.nc] = ata 165 | 166 | qf_cand = uf.getQf( 167 | self.XtX[0: (self.nc + 1), 0: (self.nc + 1)], 168 | self.Xty[0: (self.nc + 1)], 169 | ) 170 | 171 | fullRank = qf_cand is not None 172 | if not fullRank: 173 | return 174 | 175 | alpha = ( 176 | 0.5 / self.s2 * (qf_cand.qf - self.qf) / (1 + self.tau) 177 | + np.log(self.lam) 178 | - np.log(self.nc) 179 | + np.log(1 / 3) 180 | - np.log(1 / 3) 181 | - cand.lbmcmp 182 | + 0.5 * np.log(self.tau) 183 | - 0.5 * np.log(1 + self.tau) 184 | ) 185 | 186 | if np.log(np.random.rand()) < alpha: 187 | self.cmod = True 188 | # note, XtX and Xty are already updated 189 | self.nbasis = self.nbasis + 1 190 | self.nc = self.nbasis + 1 191 | self.qf = qf_cand.qf 192 | self.bhat = qf_cand.bhat 193 | self.R = qf_cand.R 194 | self.R_inv_t = sp.linalg.solve_triangular(self.R, 195 | np.identity(self.nc)) 196 | self.count[0] = self.count[0] + 1 197 | self.n_int[self.nbasis - 1] = cand.n_int 198 | self.knots[self.nbasis - 1, 0: (cand.n_int)] = cand.knots 199 | self.signs[self.nbasis - 1, 0: (cand.n_int)] = cand.signs 200 | self.vs[self.nbasis - 1, 0: (cand.n_int)] = cand.vs 201 | 202 | self.I_star[cand.n_int - 1] = self.I_star[cand.n_int - 1] + 1 203 | self.I_vec = self.I_star / sum(self.I_star) 204 | self.z_star[cand.vs] = self.z_star[cand.vs] + 1 205 | self.z_vec = self.z_star / sum(self.z_star) 206 | 207 | self.basis = np.append( 208 | self.basis, cand.basis.reshape(self.data.n, 1), axis=1 209 | ) 210 | 211 | elif move_type == 2: 212 | # DEATH step 213 | 214 | tokill_ind = np.random.choice(self.nbasis) 215 | ind = list(range(self.nc)) 216 | del ind[tokill_ind + 1] 217 | 218 | qf_cand = uf.getQf(self.XtX[np.ix_(ind, ind)], self.Xty[ind]) 219 | 220 | fullRank = qf_cand is not None 221 | if not fullRank: 222 | return 223 | 224 | I_star = self.I_star.copy() 225 | I_star[self.n_int[tokill_ind] - 1] = I_star[self.n_int[tokill_ind] - 1] - 1 226 | I_vec = I_star / sum(I_star) 227 | z_star = self.z_star.copy() 228 | z_star[self.vs[tokill_ind, 0: self.n_int[tokill_ind]]] = ( 229 | z_star[self.vs[tokill_ind, 0: self.n_int[tokill_ind]]] - 1 230 | ) 231 | 232 | z_vec = z_star / sum(z_star) 233 | 234 | lbmcmp = uf.logProbChangeMod( 235 | self.n_int[tokill_ind], 236 | self.vs[tokill_ind, 0: self.n_int[tokill_ind]], 237 | I_vec, 238 | z_vec, 239 | self.data.p, 240 | self.prior.maxInt, 241 | ) 242 | 243 | alpha = ( 244 | 0.5 / self.s2 * (qf_cand.qf - self.qf) / (1 + self.tau) 245 | - np.log(self.lam) 246 | + np.log(self.nbasis) 247 | + np.log(1 / 3) 248 | - np.log(1 / 3) 249 | + lbmcmp 250 | - 0.5 * np.log(self.tau) 251 | + 0.5 * np.log(1 + self.tau) 252 | ) 253 | 254 | if np.log(np.random.rand()) < alpha: 255 | self.cmod = True 256 | self.nbasis = self.nbasis - 1 257 | self.nc = self.nbasis + 1 258 | self.qf = qf_cand.qf 259 | self.bhat = qf_cand.bhat 260 | self.R = qf_cand.R 261 | self.R_inv_t = sp.linalg.solve_triangular(self.R, 262 | np.identity(self.nc)) 263 | self.count[1] = self.count[1] + 1 264 | 265 | self.Xty[0: self.nc] = self.Xty[ind] 266 | self.XtX[0: self.nc, 0: self.nc] = self.XtX[np.ix_(ind, ind)] 267 | 268 | temp = self.n_int[0: (self.nbasis + 1)] 269 | temp = np.delete(temp, tokill_ind) 270 | self.n_int = self.n_int * 0 271 | self.n_int[0: (self.nbasis)] = temp[:] 272 | 273 | temp = self.knots[0: (self.nbasis + 1), :] 274 | temp = np.delete(temp, tokill_ind, 0) 275 | self.knots = self.knots * 0 276 | self.knots[0: (self.nbasis), :] = temp[:] 277 | 278 | temp = self.signs[0: (self.nbasis + 1), :] 279 | temp = np.delete(temp, tokill_ind, 0) 280 | self.signs = self.signs * 0 281 | self.signs[0: (self.nbasis), :] = temp[:] 282 | 283 | temp = self.vs[0: (self.nbasis + 1), :] 284 | temp = np.delete(temp, tokill_ind, 0) 285 | self.vs = self.vs * 0 286 | self.vs[0: (self.nbasis), :] = temp[:] 287 | 288 | self.I_star = I_star[:] 289 | self.I_vec = I_vec[:] 290 | self.z_star = z_star[:] 291 | self.z_vec = z_vec[:] 292 | 293 | self.basis = np.delete(self.basis, tokill_ind + 1, 1) 294 | 295 | else: 296 | # CHANGE step 297 | 298 | tochange_basis = np.random.choice(self.nbasis) 299 | tochange_int = np.random.choice(self.n_int[tochange_basis]) 300 | 301 | cand = uf.genBasisChange( 302 | self.knots[tochange_basis, 0: self.n_int[tochange_basis]], 303 | self.signs[tochange_basis, 0: self.n_int[tochange_basis]], 304 | self.vs[tochange_basis, 0: self.n_int[tochange_basis]], 305 | tochange_int, 306 | self.data.xx, 307 | ) 308 | 309 | # if proposed basis function has too few non-zero entries, 310 | # dont change the state 311 | if ( 312 | cand.basis > 0 313 | ).sum() < self.prior.npart: 314 | return 315 | 316 | ata = np.dot(cand.basis.T, cand.basis) 317 | Xta = np.dot(self.basis.T, cand.basis).reshape(self.nc) 318 | aty = np.dot(cand.basis.T, self.data.y) 319 | 320 | ind = list(range(self.nc)) 321 | XtX_cand = self.XtX[np.ix_(ind, ind)].copy() 322 | XtX_cand[tochange_basis + 1, :] = Xta 323 | XtX_cand[:, tochange_basis + 1] = Xta 324 | XtX_cand[tochange_basis + 1, tochange_basis + 1] = ata 325 | 326 | Xty_cand = self.Xty[0: self.nc].copy() 327 | Xty_cand[tochange_basis + 1] = aty 328 | 329 | qf_cand = uf.getQf(XtX_cand, Xty_cand) 330 | 331 | fullRank = qf_cand is not None 332 | if not fullRank: 333 | return 334 | 335 | alpha = 0.5 / self.s2 * (qf_cand.qf - self.qf) / (1 + self.tau) 336 | 337 | if np.log(np.random.rand()) < alpha: 338 | self.cmod = True 339 | self.qf = qf_cand.qf 340 | self.bhat = qf_cand.bhat 341 | self.R = qf_cand.R 342 | self.R_inv_t = sp.linalg.solve_triangular( 343 | self.R, np.identity(self.nc) 344 | ) # check this 345 | self.count[2] = self.count[2] + 1 346 | 347 | self.Xty[0: self.nc] = Xty_cand 348 | self.XtX[0: self.nc, 0: self.nc] = XtX_cand 349 | 350 | self.knots[tochange_basis, 0: self.n_int[tochange_basis]] = cand.knots 351 | self.signs[tochange_basis, 0: self.n_int[tochange_basis]] = cand.signs 352 | 353 | self.basis[:, tochange_basis + 1] = cand.basis.reshape(self.data.n) 354 | 355 | a_s2 = self.prior.g1 + self.data.n / 2 356 | b_s2 = self.prior.g2 + 0.5 * ( 357 | self.data.ssy - np.dot(self.bhat.T, self.Xty[0 : self.nc]) / (1 + self.tau) 358 | ) 359 | if b_s2 < 0: 360 | self.prior.g2 = self.prior.g2 + 1.0e-10 361 | b_s2 = self.prior.g2 + 0.5 * ( 362 | self.data.ssy 363 | - np.dot(self.bhat.T, self.Xty[0 : self.nc]) / (1 + self.tau) 364 | ) 365 | self.s2 = 1 / np.random.gamma(a_s2, 1 / b_s2, size=1) 366 | 367 | self.beta = self.bhat / (1 + self.tau) + np.dot( 368 | self.R_inv_t, np.random.normal(size=self.nc) 369 | ) * np.sqrt(self.s2 / (1 + self.tau)) 370 | 371 | a_lam = self.prior.h1 + self.nbasis 372 | b_lam = self.prior.h2 + 1 373 | self.lam = np.random.gamma(a_lam, 1 / b_lam, size=1) 374 | 375 | temp = np.dot(self.R, self.beta) 376 | qf2 = np.dot(temp, temp) 377 | a_tau = self.prior.a_tau + (self.nbasis + 1) / 2 378 | b_tau = self.prior.b_tau + 0.5 * qf2 / self.s2 379 | self.tau = np.random.gamma(a_tau, 1 / b_tau, size=1) 380 | 381 | 382 | class BassModel: 383 | """ 384 | The model structure, including the current RJMCMC state and previous 385 | saved states; with methods for saving the 386 | state, plotting MCMC traces, and predicting 387 | """ 388 | 389 | def __init__(self, data, prior, nstore): 390 | """Get starting state, build storage structures""" 391 | self.data = data 392 | self.prior = prior 393 | self.state = BassState(self.data, self.prior) 394 | self.nstore = nstore 395 | s2 = np.zeros(nstore) 396 | lam = np.zeros(nstore) 397 | tau = np.zeros(nstore) 398 | nbasis = np.zeros(nstore, dtype=int) 399 | nbasis_models = np.zeros(nstore, dtype=int) 400 | n_int = np.zeros([nstore, self.prior.maxBasis], dtype=int) 401 | signs = np.zeros([nstore, self.prior.maxBasis, self.prior.maxInt], 402 | dtype=int) 403 | vs = np.zeros([nstore, self.prior.maxBasis, self.prior.maxInt], 404 | dtype=int) 405 | knots = np.zeros([nstore, self.prior.maxBasis, self.prior.maxInt]) 406 | beta = np.zeros([nstore, self.prior.maxBasis + 1]) 407 | self.samples = Samples( 408 | s2, lam, tau, nbasis, nbasis_models, n_int, signs, vs, knots, beta 409 | ) 410 | self.k = 0 411 | self.k_mod = -1 412 | self.model_lookup = np.zeros(nstore, dtype=int) 413 | return 414 | 415 | def writeState(self): 416 | """ 417 | Take relevant parts of state and write to storage (only manipulates 418 | storage vectors created in init) 419 | """ 420 | 421 | self.samples.s2[self.k] = self.state.s2 422 | self.samples.lam[self.k] = self.state.lam 423 | self.samples.tau[self.k] = self.state.tau 424 | self.samples.beta[self.k, 0: (self.state.nbasis + 1)] = self.state.beta 425 | self.samples.nbasis[self.k] = self.state.nbasis 426 | 427 | if self.state.cmod: # basis part of state was changed 428 | self.k_mod = self.k_mod + 1 429 | self.samples.nbasis_models[self.k_mod] = self.state.nbasis 430 | self.samples.n_int[self.k_mod, 0: self.state.nbasis] = self.state.n_int[ 431 | 0: self.state.nbasis 432 | ] 433 | self.samples.signs[self.k_mod, 0: self.state.nbasis, :] = self.state.signs[ 434 | 0: self.state.nbasis, : 435 | ] 436 | self.samples.vs[self.k_mod, 0: self.state.nbasis, :] = self.state.vs[ 437 | 0: self.state.nbasis, : 438 | ] 439 | self.samples.knots[self.k_mod, 0: self.state.nbasis, :] = self.state.knots[ 440 | 0: self.state.nbasis, : 441 | ] 442 | self.state.cmod = False 443 | 444 | self.model_lookup[self.k] = self.k_mod 445 | self.k = self.k + 1 446 | 447 | def plot(self): 448 | """ 449 | Trace plots and predictions/residuals 450 | 451 | * top left - trace plot of number of basis functions 452 | (excluding burn-in and thinning) 453 | * top right - trace plot of residual variance 454 | * bottom left - training data against predictions 455 | * bottom right - histogram of residuals (posterior mean) with 456 | assumed Gaussian overlaid. 457 | """ 458 | fig = plt.figure() 459 | 460 | ax = fig.add_subplot(2, 2, 1) 461 | plt.plot(self.samples.nbasis) 462 | plt.ylabel("number of basis functions") 463 | plt.xlabel("MCMC iteration (post-burn)") 464 | 465 | ax = fig.add_subplot(2, 2, 2) 466 | plt.plot(self.samples.s2) 467 | plt.ylabel("error variance") 468 | plt.xlabel("MCMC iteration (post-burn)") 469 | 470 | ax = fig.add_subplot(2, 2, 3) 471 | # posterior predictive mean 472 | yhat = self.predict(self.data.xx_orig).mean(axis=0) 473 | plt.scatter(self.data.y, yhat) 474 | uf.abline(1, 0) 475 | plt.xlabel("observed") 476 | plt.ylabel("posterior prediction") 477 | 478 | ax = fig.add_subplot(2, 2, 4) 479 | plt.hist(self.data.y - yhat, color="skyblue", ec="white", density=True) 480 | axes = plt.gca() 481 | x = np.linspace(axes.get_xlim()[0], axes.get_xlim()[1], 100) 482 | plt.plot( 483 | x, sp.stats.norm.pdf(x, scale=np.sqrt(self.samples.s2.mean())), 484 | color="red" 485 | ) 486 | plt.xlabel("residuals") 487 | plt.ylabel("density") 488 | 489 | fig.tight_layout() 490 | 491 | plt.show() 492 | 493 | def makeBasisMatrix(self, model_ind, X): 494 | """Make basis matrix for model""" 495 | nb = self.samples.nbasis_models[model_ind] 496 | 497 | n = len(X) 498 | mat = np.zeros([n, nb + 1]) 499 | mat[:, 0] = 1 500 | for m in range(nb): 501 | ind = list(range(self.samples.n_int[model_ind, m])) 502 | mat[:, m + 1] = uf.makeBasis( 503 | self.samples.signs[model_ind, m, ind], 504 | self.samples.vs[model_ind, m, ind], 505 | self.samples.knots[model_ind, m, ind], 506 | X, 507 | ).reshape(n) 508 | return mat 509 | 510 | def predict(self, X, mcmc_use=None, nugget=False): 511 | """ 512 | BASS prediction using new inputs (after training). 513 | 514 | :param X: matrix (numpy array) of predictors with dimension nxp, where 515 | n is the number of prediction points and 516 | p is the number of inputs (features). p must match the 517 | number of training inputs, and the order of the columns must 518 | also match. 519 | :param mcmc_use: which MCMC samples to use (list of integers of length 520 | m). Defaults to all MCMC samples. 521 | :param nugget: whether to use the error variance when predicting. 522 | If False, predictions are for mean function. 523 | :return: a matrix (numpy array) of predictions with dimension mxn, 524 | with rows corresponding to MCMC samples and columns 525 | corresponding to prediction points. 526 | """ 527 | if X.ndim == 1: 528 | X = X[None, :] 529 | 530 | Xs = uf.normalize(X, self.data.bounds) 531 | if np.any(mcmc_use is None): 532 | mcmc_use = np.array(range(self.nstore)) 533 | out = np.zeros([len(mcmc_use), len(Xs)]) 534 | models = self.model_lookup[mcmc_use] 535 | umodels = set(models) 536 | k = 0 537 | for j in umodels: 538 | mcmc_use_j = mcmc_use[np.ix_(models == j)] 539 | nn = len(mcmc_use_j) 540 | out[range(k, nn + k), :] = np.dot( 541 | self.samples.beta[mcmc_use_j, 0: (self.samples.nbasis_models[j] + 1)], 542 | self.makeBasisMatrix(j, Xs).T, 543 | ) 544 | k += nn 545 | if nugget: 546 | out += np.random.normal( 547 | scale=np.sqrt(self.samples.s2[mcmc_use]), size=[len(Xs), 548 | len(mcmc_use)] 549 | ).T 550 | return out 551 | 552 | 553 | def bass( 554 | xx, 555 | y, 556 | nmcmc=10000, 557 | nburn=9000, 558 | thin=1, 559 | w1=5, 560 | w2=5, 561 | maxInt=3, 562 | maxBasis=1000, 563 | npart=None, 564 | g1=0, 565 | g2=0, 566 | s2_lower=0, 567 | h1=10, 568 | h2=10, 569 | a_tau=0.5, 570 | b_tau=None, 571 | verbose=True, 572 | ): 573 | """ 574 | **Bayesian Adaptive Spline Surfaces - model fitting** 575 | 576 | This function takes training data, priors, and algorithmic constants and 577 | fits a BASS model. The result is a set of posterior samples of the model. 578 | The resulting object has a predict function to generate posterior 579 | predictive samples. Default settings of priors and algorithmic parameters 580 | should only be changed by users who understand the model. 581 | 582 | :param xx: matrix (numpy array) of predictors of dimension nxp, where n is 583 | the number of training examples and p is the number of inputs 584 | (features). 585 | :param y: response vector (numpy array) of length n. 586 | :param nmcmc: total number of MCMC iterations (integer) 587 | :param nburn: number of MCMC iterations to throw away as burn-in (integer, 588 | less than nmcmc). 589 | :param thin: number of MCMC iterations to thin (integer). 590 | :param w1: nominal weight for degree of interaction, used in generating 591 | candidate basis functions. Should be greater than 0. 592 | :param w2: nominal weight for variables, used in generating candidate 593 | basis functions. Should be greater than 0. 594 | :param maxInt: maximum degree of interaction for spline basis functions 595 | (integer, less than p) 596 | :param maxBasis: maximum number of tensor product spline basis functions 597 | (integer) 598 | :param npart: minimum number of non-zero points in a basis function. If 599 | the response is functional, this refers only to the portion 600 | of the basis function coming from the non-functional 601 | predictors. Defaults to 20 or 0.1 times the number of 602 | observations, whichever is smaller. 603 | :param g1: shape for IG prior on residual variance. 604 | :param g2: scale for IG prior on residual variance. 605 | :param s2_lower: lower bound for residual variance. 606 | :param h1: shape for gamma prior on mean number of basis functions. 607 | :param h2: scale for gamma prior on mean number of basis functions. 608 | :param a_tau: shape for gamma prior on 1/g in g-prior. 609 | :param b_tau: scale for gamma prior on 1/g in g-prior. 610 | :param verbose: boolean for printing progress 611 | :return: an object of class BassModel, which includes predict and plot 612 | functions. 613 | """ 614 | 615 | t0 = time.time() 616 | if b_tau is None: 617 | b_tau = len(y) / 2 618 | if npart is None: 619 | npart = min(20, 0.1 * len(y)) 620 | bd = BassData(xx, y) 621 | if bd.p < maxInt: 622 | maxInt = bd.p 623 | bp = BassPrior( 624 | maxInt, maxBasis, npart, g1, g2, s2_lower, h1, h2, a_tau, b_tau, w1, w2 625 | ) 626 | nstore = int((nmcmc - nburn) / thin) 627 | bm = BassModel( 628 | bd, bp, nstore 629 | ) # if we add tempering, bm should have as many states as temperatures 630 | for i in range(nmcmc): # rjmcmc loop 631 | bm.state.update() 632 | if i > (nburn - 1) and ((i - nburn + 1) % thin) == 0: 633 | bm.writeState() 634 | if verbose and i % 500 == 0: 635 | print("\rBASS MCMC {:.1%} Complete".format(i / nmcmc), end="") 636 | # print(str(datetime.now()) + ', nbasis: ' + str(bm.state.nbasis)) 637 | t1 = time.time() 638 | print("\rBASS MCMC Complete. Time: {:f} seconds.".format(t1 - t0)) 639 | # del bm.writeState # the user should not have access to this 640 | return bm 641 | 642 | 643 | class PoolBass(object): 644 | # adapted from https://stackoverflow.com/questions/1816958/cant-pickle-type-instancemethod-when-using-multiprocessing-pool-map/41959862#41959862 answer by parisjohn 645 | # somewhat slow collection of results 646 | def __init__(self, x, y, **kwargs): 647 | self.x = x 648 | self.y = y 649 | self.kw = kwargs 650 | 651 | def rowbass(self, i): 652 | return bass(self.x, self.y[i, :], **self.kw) 653 | 654 | def fit(self, ncores, nrow_y): 655 | pool = Pool(ncores) 656 | out = pool.map(self, range(nrow_y)) 657 | return out 658 | 659 | def __call__(self, i): 660 | return self.rowbass(i) 661 | 662 | 663 | class PoolBassPredict(object): 664 | def __init__(self, X, mcmc_use, nugget, bm_list): 665 | self.X = X 666 | self.mcmc_use = mcmc_use 667 | self.nugget = nugget 668 | self.bm_list = bm_list 669 | 670 | def listpredict(self, i): 671 | return self.bm_list[i].predict(self.X, self.mcmc_use, self.nugget) 672 | 673 | def predict(self, ncores, nlist): 674 | pool = Pool(ncores) 675 | out = pool.map(self, range(nlist)) 676 | return out 677 | 678 | def __call__(self, i): 679 | return self.listpredict(i) 680 | 681 | 682 | class BassBasis: 683 | """ 684 | Structure for functional response BASS model using a basis 685 | decomposition, gets a list of BASS models 686 | """ 687 | 688 | def __init__( 689 | self, xx, y, basis, newy, y_mean, y_sd, trunc_error, ncores=1, **kwargs 690 | ): 691 | """ 692 | Fit BASS model with multivariate/functional response by projecting 693 | onto user specified basis. 694 | 695 | :param xx: matrix (numpy array) of predictors of dimension nxp, where 696 | n is the number of training examples and p is the number of 697 | inputs (features). 698 | :param y: response matrix (numpy array) of dimension nxq, where q is 699 | the number of multivariate/functional responses. 700 | :param basis: matrix (numpy array) of basis functions of dimension qxk. 701 | :param newy: matrix (numpy array) of y projected onto basis, dimension 702 | kxn. 703 | :param y_mean: vector (numpy array) of length q with the mean if y was 704 | centered before obtaining newy. 705 | :param y_sd: vector (numpy array) of length q with the standard 706 | deviation if y was scaled before obtaining newy. 707 | :param trunc_error: numpy array of projection truncation errors 708 | (dimension qxn) 709 | :param ncores: number of threads to use when fitting independent BASS 710 | models (integer less than or equal to npc). 711 | :param kwargs: optional arguments to bass function. 712 | """ 713 | self.basis = basis 714 | self.xx = xx 715 | self.y = y 716 | self.newy = newy 717 | self.y_mean = y_mean 718 | self.y_sd = y_sd 719 | self.trunc_error = trunc_error 720 | self.nbasis = len(basis[0]) 721 | 722 | if ncores == 1: 723 | self.bm_list = list( 724 | map( 725 | lambda ii: bass(self.xx, self.newy[ii, :], **kwargs), 726 | list(range(self.nbasis)), 727 | ) 728 | ) 729 | else: 730 | temp = PoolBass(self.xx, self.newy, **kwargs) 731 | self.bm_list = temp.fit(ncores, self.nbasis) 732 | return 733 | 734 | def predict(self, X, mcmc_use=None, nugget=False, trunc_error=False, 735 | ncores=1): 736 | """ 737 | Predict the functional response at new inputs. 738 | 739 | :param X: matrix (numpy array) of predictors with dimension nxp, where 740 | n is the number of prediction points and p is the number of 741 | inputs (features). p must match the number of training 742 | inputs, and the order of the columns must also match. 743 | :param mcmc_use: which MCMC samples to use (list of integers of length 744 | m). Defaults to all MCMC samples. 745 | :param nugget: whether to use the error variance when predicting. 746 | If False, predictions are for mean function. 747 | :param trunc_error: whether to use truncation error when predicting. 748 | :param ncores: number of cores to use while predicting (integer). 749 | In almost all cases, use ncores=1. 750 | :return: a numpy array of predictions with dimension mxnxq, with first 751 | dimension corresponding to MCMC samples, second dimension 752 | corresponding to prediction points, and third dimension 753 | corresponding to multivariate/functional response. 754 | """ 755 | if ncores == 1: 756 | pred_coefs = list( 757 | map( 758 | lambda ii: self.bm_list[ii].predict(X, mcmc_use, nugget), 759 | list(range(self.nbasis)), 760 | ) 761 | ) 762 | else: 763 | temp = PoolBassPredict(X, mcmc_use, nugget, self.bm_list) 764 | pred_coefs = temp.predict(ncores, self.nbasis) 765 | out = np.dot(np.dstack(pred_coefs), self.basis.T) 766 | out2 = out * self.y_sd + self.y_mean 767 | if trunc_error: 768 | out2 += self.trunc_error[ 769 | :, 770 | np.random.choice( 771 | np.arange(self.trunc_error.shape[1]), 772 | size=np.prod(out.shape[:2]), 773 | replace=True, 774 | ), 775 | ].reshape(out.shape) 776 | return out2 777 | 778 | def plot(self): 779 | """ 780 | Trace plots and predictions/residuals 781 | 782 | * top left - trace plot of number of basis functions 783 | (excluding burn-in and thinning) for each BASS model 784 | * top right - trace plot of residual variance for each BASS model 785 | * bottom left - training data against predictions 786 | * bottom right - histogram of residuals (posterior mean). 787 | """ 788 | 789 | fig = plt.figure() 790 | 791 | ax = fig.add_subplot(2, 2, 1) 792 | for i in range(self.nbasis): 793 | plt.plot(self.bm_list[i].samples.nbasis) 794 | plt.ylabel("number of basis functions") 795 | plt.xlabel("MCMC iteration (post-burn)") 796 | 797 | ax = fig.add_subplot(2, 2, 2) 798 | for i in range(self.nbasis): 799 | plt.plot(self.bm_list[i].samples.s2) 800 | plt.ylabel("error variance") 801 | plt.xlabel("MCMC iteration (post-burn)") 802 | 803 | ax = fig.add_subplot(2, 2, 3) 804 | yhat = self.predict(self.bm_list[0].data.xx_orig).mean( 805 | axis=0 806 | ) # posterior predictive mean 807 | plt.scatter(self.y, yhat) 808 | uf.abline(1, 0) 809 | plt.xlabel("observed") 810 | plt.ylabel("posterior prediction") 811 | 812 | ax = fig.add_subplot(2, 2, 4) 813 | plt.hist( 814 | (self.y - yhat).reshape(np.prod(yhat.shape)), 815 | color="skyblue", 816 | ec="white", 817 | density=True, 818 | ) 819 | plt.xlabel("residuals") 820 | plt.ylabel("density") 821 | 822 | fig.tight_layout() 823 | 824 | plt.show() 825 | 826 | 827 | class BassPCAsetup: 828 | """ 829 | Wrapper to get principal components that would be used for bassPCA. 830 | Mainly used for checking how many PCs should be used. 831 | 832 | :param y: response matrix (numpy array) of dimension nxq, where n is the 833 | number of training examples and q is the number of 834 | multivariate/functional responses. 835 | :param npc: number of principal components to use (integer, optional if 836 | percVar is specified). 837 | :param percVar: percent (between 0 and 100) of variation to explain when 838 | choosing number of principal components (if npc=None). 839 | :param center: whether to center the responses before principal component 840 | decomposition (boolean). 841 | :param scale: whether to scale the responses before principal component 842 | decomposition (boolean). 843 | :return: object with plot method. 844 | """ 845 | 846 | def __init__(self, y, center=True, scale=False): 847 | self.y = y 848 | self.y_mean = 0 849 | self.y_sd = 1 850 | if center: 851 | self.y_mean = np.mean(y, axis=0) 852 | if scale: 853 | self.y_sd = np.std(y, axis=0) 854 | self.y_sd[self.y_sd == 0] = 1 855 | self.y_scale = np.apply_along_axis( 856 | lambda row: (row - self.y_mean) / self.y_sd, 1, y 857 | ) 858 | # decomp = np.linalg.svd(y_scale.T) 859 | U, s, V = np.linalg.svd(self.y_scale.T, full_matrices=False) 860 | self.evals = s**2 861 | self.basis = np.dot(U, np.diag(s)) 862 | self.newy = V 863 | return 864 | 865 | def plot(self, npc=None, percVar=None): 866 | """ 867 | Plot of principal components, eigenvalues 868 | 869 | * left - principal components; grey are excluded by setting of npc or 870 | percVar 871 | * right - eigenvalues (squared singular values), colored according to 872 | principal components 873 | """ 874 | 875 | cs = np.cumsum(self.evals) / np.sum(self.evals) * 100.0 876 | 877 | if npc == None and percVar == 100: 878 | npc = len(self.evals) 879 | if npc == None and percVar is not None: 880 | npc = np.where(cs >= percVar)[0][0] + 1 881 | if npc == None or npc > len(self.evals): 882 | npc = len(self.evals) 883 | 884 | fig = plt.figure() 885 | 886 | cmap = plt.get_cmap("tab10") 887 | 888 | ax = fig.add_subplot(1, 2, 1) 889 | if npc < len(self.evals): 890 | plt.plot(self.basis[:, npc:], color="grey") 891 | for i in range(npc): 892 | plt.plot(self.basis[:, i], color=cmap(i % 10)) 893 | plt.ylabel("principal components") 894 | plt.xlabel("multivariate/functional index") 895 | 896 | ax = fig.add_subplot(1, 2, 2) 897 | x = np.arange(len(self.evals)) + 1 898 | if npc < len(self.evals): 899 | plt.scatter(x[npc:], cs[npc:], facecolors="none", color="grey") 900 | for i in range(npc): 901 | plt.scatter(x[i], cs[i], facecolors="none", color=cmap(i % 10)) 902 | plt.axvline(npc) 903 | # if percVar is not None: 904 | # plt.axhline(percVar) 905 | plt.ylabel("cumulative eigenvalues (percent variance)") 906 | plt.xlabel("index") 907 | 908 | fig.tight_layout() 909 | 910 | plt.show() 911 | 912 | 913 | def bassPCA( 914 | xx, y, npc=None, percVar=99.9, ncores=1, center=True, scale=False, **kwargs 915 | ): 916 | """ 917 | Wrapper to get principal components and call BassBasis, which then calls 918 | bass function to fit the BASS model for functional (or multivariate) 919 | response data. 920 | 921 | :param xx: matrix (numpy array) of predictors of dimension nxp, where n is 922 | the number of training examples and p is the number of inputs 923 | (features). 924 | :param y: response matrix (numpy array) of dimension nxq, where q is the 925 | number of multivariate/functional responses. 926 | :param npc: number of principal components to use (integer, optional if 927 | percVar is specified). 928 | :param percVar: percent (between 0 and 100) of variation to explain when 929 | choosing number of principal components(if npc=None). 930 | :param ncores: number of threads to use when fitting independent BASS 931 | models (integer less than or equal to npc). 932 | :param center: whether to center the responses before principal component 933 | decomposition (boolean). 934 | :param scale: whether to scale the responses before principal component 935 | decomposition (boolean). 936 | :param kwargs: optional arguments to bass function. 937 | :return: object of class BassBasis, with predict and plot functions. 938 | """ 939 | 940 | setup = BassPCAsetup(y, center, scale) 941 | 942 | if npc is None: 943 | cs = np.cumsum(setup.evals) / np.sum(setup.evals) * 100.0 944 | npc = np.where(cs > percVar)[0][0] + 1 945 | 946 | if ncores > npc: 947 | ncores = npc 948 | 949 | basis = setup.basis[:, :npc] 950 | newy = setup.newy[:npc, :] 951 | trunc_error = np.dot(basis, newy) - setup.y_scale.T 952 | 953 | print( 954 | "\rStarting bassPCA with {:d} components, using {:d} cores.".format(npc, ncores) 955 | ) 956 | 957 | return BassBasis( 958 | xx, y, basis, newy, setup.y_mean, setup.y_sd, trunc_error, ncores, **kwargs 959 | ) 960 | -------------------------------------------------------------------------------- /pyBASS/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | A python package for Bayesian Adaptive Spline Surfaces 3 | 4 | """ 5 | 6 | __all__ = ["utils", "sobol", "BASS"] 7 | 8 | __version__ = "0.3.2" 9 | 10 | import sys 11 | 12 | if sys.version_info[0] == 3 and sys.version_info[1] < 6: 13 | raise ImportError("Python Version 3.6 or above is required for pyBASS.") 14 | else: # Python 3 15 | pass 16 | # Here we can also check for specific Python 3 versions, if needed 17 | 18 | del sys 19 | 20 | from .BASS import * 21 | from .utils import * 22 | from .sobol import * 23 | -------------------------------------------------------------------------------- /pyBASS/sobol.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Copyright 2020. Triad National Security, LLC. All rights reserved. This 5 | program was produced under U.S. Government contract 89233218CNA000001 for 6 | Los Alamos National Laboratory (LANL), which is operated by Triad National 7 | Security, LLC for the U.S. Department of Energy/National Nuclear Security 8 | Administration. All rights in the program are reserved by Triad National 9 | 10 | Security, LLC, and the U.S. Department of Energy/National Nuclear Security 11 | Administration. The Government is granted for itself and others acting on 12 | its behalf a nonexclusive, paid-up, irrevocable worldwide license in this 13 | material to reproduce, prepare derivative works, distribute copies to the 14 | public, perform publicly and display publicly,and to permit others to do so. 15 | 16 | LANL software release C19112 17 | Author: J. Derek Tucker 18 | """ 19 | 20 | from pyBASS import BassBasis 21 | import numpy as np 22 | import scipy as sp 23 | import matplotlib.pyplot as plt 24 | import matplotlib.cm as cm 25 | import pyBASS.utils as uf 26 | import itertools 27 | from scipy import stats 28 | import re 29 | import time 30 | import math 31 | 32 | 33 | class sobolBasis: 34 | """ 35 | Decomposes the variance of the BASS model into variance due to the main 36 | effects, two way interactions, and so on, similar to the ANOVA 37 | decoposition for linear models. 38 | 39 | Uses the Sobol' decomposition, which can be done analytically for 40 | MARS-type models. This is for the Basis class 41 | 42 | :param mod: BassBasis model 43 | 44 | :return: object with plot method. 45 | """ 46 | 47 | def __init__(self, mod: BassBasis): 48 | self.mod = mod 49 | return 50 | 51 | def decomp(self, int_order, prior=None, mcmc_use=None, nind=None, ncores=1): 52 | """ 53 | Perform Sobol Decomp 54 | 55 | :param int_order: an integer indicating the highest order of 56 | interactions to include in the Sobol decomposition. 57 | :param prior: a list with the same number of elements as there are 58 | inputs to mod. Each element specifies the prior for the 59 | particular input. Each prior is specified as a 60 | dictionary with elements (one of "normal", "student", 61 | or "uniform"), "trunc" (a vector of dimension 2 62 | indicating the lower and upper truncation bounds, 63 | taken to be the data bounds if omitted), and for 64 | "normal" or "student" priors, "mean" (scalar mean of 65 | the Normal/Student, or a vector of means for a mixture 66 | of Normals or Students), "sd" (scalar standard deviation 67 | of the Normal/Student, or a vector of standard 68 | deviations for a mixture of Normals or Students), "df" 69 | (scalar degrees of freedom of the Student, or a vector 70 | of degrees of freedom for a mixture of Students), and 71 | "weights" (a vector of weights that sum to one for the 72 | mixture components, or the scalar 1). If unspecified, 73 | a uniform is assumed with the same bounds as are 74 | represented in the input to mod. 75 | :param mcmc_use: an integer indicating which MCMC iteration to use for 76 | sensitivity analysis. Defaults to the last iteration. 77 | :param nind: number of Sobol indices to keep 78 | (will keep the largest nind). 79 | :param ncores: number of cores to use (default = 1) 80 | """ 81 | self.int_order = int_order 82 | if mcmc_use is None: 83 | self.mcmc_use = self.mod.bm_list[0].nstore - 1 84 | else: 85 | self.mcmc_use = mcmc_use 86 | self.nind = nind 87 | self.ncores = ncores 88 | 89 | bassDat = self.mod.bm_list[0].data 90 | 91 | if prior is None: 92 | self.prior = [] 93 | else: 94 | self.prior = prior 95 | 96 | p = bassDat.p 97 | 98 | if len(self.prior) < p: 99 | for i in range(len(self.prior), p): 100 | tmp = {"dist": "uniform", "trunc": None} 101 | self.prior.append(tmp) 102 | 103 | for i in range(len(self.prior)): 104 | if self.prior[i]["trunc"] is None: 105 | self.prior[i]["trunc"] = np.array([0, 1]) 106 | else: 107 | self.prior[i]["trunc"] = uf.normalize( 108 | self.prior[i]["trunc"], bassDat.bounds[:, i] 109 | ) 110 | 111 | if self.prior[i]["dist"] == "normal" or self.prior[i]["dist"] == "student": 112 | self.prior[i]["mean"] = uf.normalize( 113 | self.prior[i]["mean"], bassDat.bounds[:, i] 114 | ) 115 | self.prior[i]["sd"] = prior[i]["sd"] / ( 116 | bassDat.bounds[1, i] - bassDat.bounds[0, i] 117 | ) 118 | if self.prior[i]["dist"] == "normal": 119 | self.prior[i]["z"] = stats.norm.pdf( 120 | (self.prior[i]["trunc"][1] - self.prior[i]["mean"]) 121 | / self.prior[i]["sd"] 122 | ) - stats.norm.pdf( 123 | (self.prior[i]["trunc"][0] - self.prior[i]["mean"]) 124 | / self.prior[i]["sd"] 125 | ) 126 | else: 127 | self.prior[i]["z"] = stats.t.pdf( 128 | (self.prior[i]["trunc"][1] - self.prior[i]["mean"]) 129 | / self.prior[i]["sd"], 130 | self.prior[i]["df"], 131 | ) - stats.t.pdf( 132 | (self.prior[i]["trunc"][0] - self.prior[i]["mean"]) 133 | / self.prior[i]["sd"], 134 | self.prior[i]["df"], 135 | ) 136 | 137 | cc = (self.prior[i]["weights"] * self.prior[i]["z"]).sum() 138 | self.prior[i]["weights"] = self.prior[i]["weights"] / cc 139 | 140 | pc_mod = self.mod.bm_list 141 | pcs = self.mod.basis 142 | 143 | tic = time.perf_counter() 144 | print("Start\n") 145 | 146 | if int_order > p: 147 | self.int_order = p 148 | print( 149 | "int_order > number of inputs, change to int_order = number of input\n" 150 | ) 151 | 152 | u_list = [ 153 | list(itertools.combinations(range(0, p), x)) 154 | for x in range(1, int_order + 1) 155 | ] 156 | ncombs_vec = [len(x) for x in u_list] 157 | ncombs = sum(ncombs_vec) 158 | nxfunc = pcs.shape[0] 159 | 160 | n_pc = self.mod.nbasis 161 | 162 | w0 = np.zeros(n_pc) 163 | for i in range(n_pc): 164 | w0[i] = self.get_f0(pc_mod, i) 165 | 166 | f0r2 = (pcs @ w0) ** 2 167 | 168 | tmp = [pc_mod[x].samples.nbasis[self.mcmc_use] for x in range(n_pc)] 169 | max_nbasis = max(tmp) 170 | 171 | C1Basis_array = np.zeros((n_pc, p, max_nbasis)) 172 | for i in range(n_pc): 173 | nb = pc_mod[i].samples.nbasis[self.mcmc_use] 174 | mcmc_mod_usei = pc_mod[i].model_lookup[self.mcmc_use] 175 | for j in range(p): 176 | for k in range(nb): 177 | C1Basis_array[i, j, k] = self.C1Basis( 178 | pc_mod, j, k, i, mcmc_mod_usei 179 | ) 180 | 181 | u_list1 = [] 182 | for i in range(int_order): 183 | u_list1.extend(u_list[i]) 184 | 185 | toc = time.perf_counter() 186 | print("Integrating: %0.2fs\n" % (toc - tic)) 187 | 188 | u_list_temp = u_list1 189 | u_list_temp.insert(0, list(np.arange(0, p))) 190 | 191 | if ncores > 1: 192 | # @todo write parallel version 193 | NameError("Parallel not Implemented\n") 194 | else: 195 | ints1_temp = [ 196 | self.func_hat(x, pc_mod, pcs, mcmc_use, f0r2, C1Basis_array) 197 | for x in u_list_temp 198 | ] 199 | 200 | V_tot = ints1_temp[0] 201 | ints1 = ints1_temp[1:] 202 | 203 | ints = [] 204 | ints.append(np.zeros((ints1[0].shape[0], len(u_list[0])))) 205 | for i in range(len(u_list[0])): 206 | ints[0][:, i] = ints1[i] 207 | 208 | if int_order > 1: 209 | for i in range(2, int_order + 1): 210 | idx = np.sum(ncombs_vec[0 : (i - 1)]) + np.arange(0, len(u_list[i - 1])) 211 | ints.append(np.zeros((ints1[0].shape[0], idx.shape[0]))) 212 | cnt = 0 213 | for j in idx: 214 | ints[i - 1][:, cnt] = ints1[j] 215 | cnt += 1 216 | 217 | sob = [] 218 | sob.append(ints[0]) 219 | toc = time.perf_counter() 220 | print("Shuffling: %0.2fs\n" % (toc - tic)) 221 | 222 | if len(u_list) > 1: 223 | for i in range(1, len(u_list)): 224 | sob.append(np.zeros((nxfunc, ints[i].shape[1]))) 225 | for j in range(len(u_list[i])): 226 | cc = np.zeros(nxfunc) 227 | for k in range(i): 228 | ind = [np.all(np.in1d(x, u_list[i][j])) for x in u_list[k]] 229 | cc += (-1) ** (i - k) * np.sum(ints[k][:, ind], axis=1) 230 | sob[i][:, j] = ints[i][:, j] + cc 231 | 232 | if nind is None: 233 | nind = ncombs 234 | 235 | sob_comb_var = np.concatenate(sob, axis=1) 236 | 237 | vv = np.mean(sob_comb_var, axis=0) 238 | ord = vv.argsort()[::-1] 239 | cutoff = vv[ord[nind - 1]] 240 | if nind > ord.shape[0]: 241 | cutoff = vv.min() 242 | 243 | use = np.sort(np.where(vv >= cutoff)[0]) 244 | 245 | V_other = V_tot - np.sum(sob_comb_var[:, use], axis=1) 246 | 247 | use = np.append(use, ncombs) 248 | 249 | sob_comb_var = np.hstack((sob_comb_var, V_other[:, np.newaxis])).T 250 | sob_comb = sob_comb_var / V_tot 251 | 252 | sob_comb_var = sob_comb_var[use, :] 253 | sob_comb = sob_comb[use, :] 254 | 255 | # Calculate "Total Sobol' Index" 256 | sob_comb_tot = np.zeros((p, nxfunc)) 257 | idx = 0 258 | for i in range(int_order): 259 | for j in range(len(u_list[i])): 260 | sob_comb_tot[u_list[i][j], :] += sob_comb_var[idx] 261 | idx += 1 262 | 263 | names_ind1 = [] 264 | for i in range(len(u_list)): 265 | for j in range(len(u_list[i])): 266 | tmp = u_list[i][j] 267 | tmp1 = [x + 1 for x in tmp] 268 | tmp1 = re.findall(r"\d+", str(tmp1)) 269 | if len(tmp1) == 1: 270 | names_ind1.append(tmp1[0]) 271 | else: 272 | separator = "x" 273 | names_ind1.append(separator.join(tmp1)) 274 | 275 | names_ind1.append("other") 276 | names_ind2 = [names_ind1[x] for x in use] 277 | 278 | toc = time.perf_counter() 279 | print("Finish: %0.2fs\n" % (toc - tic)) 280 | 281 | self.S = sob_comb 282 | self.S_var = sob_comb_var 283 | self.T_var = sob_comb_tot 284 | self.Var_tot = V_tot 285 | self.names_ind = names_ind2 286 | self.xx = np.linspace(0, 1, nxfunc) 287 | 288 | return 289 | 290 | def plot( 291 | self, 292 | int_order=1, 293 | total_sobol=True, 294 | text=False, 295 | labels=[], 296 | col="Paired", 297 | time=[], 298 | ): 299 | if len(time) == 0: 300 | time = self.xx 301 | 302 | p = np.shape(self.mod.xx)[1] 303 | ncomb = np.sum([math.comb(p, k) for k in range(1, int_order + 1)]) 304 | 305 | if len(labels) == 0: 306 | labels = self.names_ind[:ncomb] + [self.names_ind[-1]] 307 | 308 | map = cm.Paired(np.linspace(0, 1, 12)) 309 | map = np.resize(map, (len(labels), 4)) 310 | rgb = np.ones((map.shape[0] + 1, 4)) 311 | rgb[0 : map.shape[0], :] = map 312 | rgb[-1, 0:3] = np.array([153, 153, 153]) / 255 313 | 314 | ord = time.argsort() 315 | x_mean = np.vstack( 316 | [self.S[:ncomb, :], np.sum(self.S[ncomb:, :], axis=0, keepdims=True)] 317 | ) 318 | sens = np.cumsum(x_mean, axis=0).T 319 | fig, axs = plt.subplots(1, 2 + total_sobol) 320 | cnt = 0 321 | for i in range(ncomb + 1): 322 | x2 = np.concatenate((time[ord], np.flip(time[ord]))) 323 | if i == 0: 324 | inBetween = np.concatenate( 325 | (np.zeros(time[ord].shape[0]), np.flip(sens[ord, i])) 326 | ) 327 | else: 328 | inBetween = np.concatenate((sens[ord, i - 1], np.flip(sens[ord, i]))) 329 | if (cnt % rgb.shape[0] + 1) == 0: 330 | cnt = 0 331 | 332 | axs[0].fill(x2, inBetween, color=rgb[cnt, :]) 333 | cnt += 1 334 | 335 | axs[0].set( 336 | xlabel="x", 337 | ylabel="proportion variance", 338 | title="Sensitivity", 339 | ylim=[0, 1], 340 | xlim=[time.min(), time.max()], 341 | ) 342 | 343 | if text: 344 | lab_x = np.argmax(x_mean, axis=1) 345 | cs = np.zeros((sens.shape[1] + 1, sens.shape[0])) 346 | cs[1:, :] = np.cumsum(x_mean, axis=0) 347 | cs_diff = np.zeros((x_mean.shape[0], x_mean.shape[1])) 348 | for i in range(x_mean.shape[1]): 349 | cs_diff[:, i] = np.diff(np.cumsum(np.concatenate((0, x_mean[:, 0])))) 350 | tmp = np.concatenate((np.arange(0, lab_x.shape[0]), lab_x)) 351 | ind = np.ravel_multi_index( 352 | np.concatenate((tmp[:, 0], tmp[:, 1])), dims=cs.shape, order="F" 353 | ) 354 | ind1 = np.ravel_multi_index( 355 | np.concatenate((tmp[:, 0], tmp[:, 1])), dims=cs_diff.shape, order="F" 356 | ) 357 | cs_diff2 = cs_diff / 2 358 | plt.text(time[lab_x], cs[ind] + cs_diff2[ind1], self.names_ind) 359 | 360 | x_mean_var = np.vstack( 361 | [ 362 | self.S_var[:ncomb, :], 363 | np.sum(self.S_var[ncomb:, :], axis=0, keepdims=True), 364 | ] 365 | ) 366 | sens_var = np.cumsum(x_mean_var, axis=0).T 367 | cnt = 0 368 | for i in range(ncomb + 1): 369 | x2 = np.concatenate((time[ord], np.flip(time[ord]))) 370 | if i == 0: 371 | inBetween = np.concatenate( 372 | (np.zeros(time[ord].shape[0]), np.flip(sens_var[ord, i])) 373 | ) 374 | else: 375 | inBetween = np.concatenate( 376 | (sens_var[ord, i - 1], np.flip(sens_var[ord, i])) 377 | ) 378 | if (cnt % rgb.shape[0] + 1) == 0: 379 | cnt = 0 380 | 381 | axs[1].fill(x2, inBetween, color=rgb[cnt, :]) 382 | cnt += 1 383 | 384 | axs[1].set( 385 | xlabel="x", 386 | ylabel="variance", 387 | title="Variance Decomposition", 388 | xlim=[time.min(), time.max()], 389 | ylim=[0, inBetween.max() + 3], 390 | ) 391 | 392 | if not text: 393 | axs[1].legend(labels, loc="upper left") 394 | 395 | if total_sobol: 396 | x_mean_tot = self.T_var 397 | sens_tot = np.cumsum(x_mean_tot, axis=0).T 398 | cnt = 0 399 | for i in range(p): 400 | x2 = np.concatenate((time[ord], np.flip(time[ord]))) 401 | if i == 0: 402 | inBetween = np.concatenate( 403 | (np.zeros(time[ord].shape[0]), np.flip(sens_tot[ord, i])) 404 | ) 405 | else: 406 | inBetween = np.concatenate( 407 | (sens_tot[ord, i - 1], np.flip(sens_tot[ord, i])) 408 | ) 409 | if (cnt % rgb.shape[0] + 1) == 0: 410 | cnt = 0 411 | 412 | axs[2].fill(x2, inBetween, color=rgb[cnt, :]) 413 | cnt += 1 414 | 415 | axs[2].set( 416 | xlabel="x", 417 | ylabel="total variance", 418 | title="Total Sobol'", 419 | xlim=[time.min(), time.max()], 420 | ylim=[0, inBetween.max() + 3], 421 | ) 422 | 423 | fig.tight_layout() 424 | return 425 | 426 | def get_f0(self, pc_mod, pc): 427 | mcmc_mod_use = pc_mod[pc].model_lookup[self.mcmc_use] 428 | out = pc_mod[pc].samples.beta[self.mcmc_use, 0] 429 | if pc_mod[pc].samples.nbasis[self.mcmc_use] > 0: 430 | for m in range(pc_mod[pc].samples.nbasis[self.mcmc_use]): 431 | out1 = pc_mod[pc].samples.beta[self.mcmc_use, 1 + m] 432 | for l in range(pc_mod[pc].data.p): 433 | out1 = out1 * self.C1Basis(pc_mod, l, m, pc, mcmc_mod_use) 434 | out += out1 435 | return out 436 | 437 | def C1Basis(self, pc_mod, l, m, pc, mcmc_mod_use): 438 | n_int = pc_mod[pc].samples.n_int[mcmc_mod_use, m] 439 | int_use_l = np.where(pc_mod[pc].samples.vs[mcmc_mod_use, m, :][:n_int] == l)[0] 440 | 441 | if len(int_use_l) == 0: 442 | out = 1 443 | return out 444 | 445 | s = pc_mod[pc].samples.signs[mcmc_mod_use, m, int_use_l] 446 | t = pc_mod[pc].samples.knots[mcmc_mod_use, m, int_use_l] 447 | q = 1 448 | 449 | if s == 0: 450 | out = 0 451 | return out 452 | 453 | cc = uf.const(s, t) 454 | 455 | if s == 1: 456 | a = np.maximum(self.prior[l]["trunc"][0], t) 457 | b = self.prior[l]["trunc"][1] 458 | if b < t: 459 | out = 0 460 | return out 461 | out = self.intabq1(self.prior[l], a, b, t, q) / cc 462 | else: 463 | a = self.prior[l]["trunc"][0] 464 | b = np.minimum(self.prior[l]["trunc"][1], t) 465 | if t < a: 466 | out = 0 467 | return out 468 | out = self.intabq1(self.prior[l], a, b, t, q) * (-1) ** q / cc 469 | 470 | return out 471 | 472 | def intabq1(self, prior, a, b, t, q): 473 | if prior["dist"] == "normal": 474 | if q != 1: 475 | NameError("degree other than 1 not supported for normal priors") 476 | 477 | out = 0 478 | for k in range(len(prior["weights"])): 479 | zk = stats.norm.pdf( 480 | b, prior["mean"][k], prior["sd"][k] 481 | ) - stats.norm.pdf(a, prior["mean"][k], prior["sd"][k]) 482 | ast = (a - prior["mean"][k]) / prior["sd"][k] 483 | bst = (b - prior["mean"][k]) / prior["sd"][k] 484 | dnb = stats.norm.cdf(bst) 485 | dna = stats.norm.cdf(ast) 486 | tnorm_mean_zk = prior["mean"][k] * zk - prior["sd"][k] * (dnb - dna) 487 | out += prior["weights"][k] * (tnorm_mean_zk - t * zk) 488 | 489 | if prior["dist"] == "student": 490 | if q != 1: 491 | NameError("degree other than 1 not supported for normal priors") 492 | 493 | out = 0 494 | for k in range(len(prior["weights"])): 495 | int = self.intx1Student( 496 | b, prior["mean"][k], prior["sd"][k], prior["df"][k], t 497 | ) - self.intx1Student( 498 | a, prior["mean"][k], prior["sd"][k], prior["df"][k], t 499 | ) 500 | out += prior["weights"][k] * int 501 | 502 | if prior["dist"] == "uniform": 503 | out = ( 504 | 1 505 | / (q + 1) 506 | * ((b - t) ** (q + 1) - (a - t) ** (q + 1)) 507 | * 1 508 | / (prior["trunc"][1] - prior["trunc"][0]) 509 | ) 510 | 511 | return out 512 | 513 | def intx1Student(self, x, m, s, v, t): 514 | temp = (s**2 * v) / (m**2 + s**2 * v - 2 * m * x + x**2) 515 | out = -( 516 | (v / (v + (m - x) ** 2 / s ^ 2)) ** (v / 2) 517 | * np.sqrt(temp) 518 | * np.sqrt(1 / temp) 519 | * ( 520 | s**2 * v * (np.sqrt(1 / temp) - (1 / temp) ** (v / 2)) 521 | + (t - m) 522 | * (-1 + v) 523 | * (-m + x) 524 | * (1 / temp) ** (v / 2) 525 | * self.robust2f1( 526 | 1 / 2, (1 + v) / 2, 3 / 2, -((m - x) ** 2) / (s**2 * v) 527 | ) 528 | ) 529 | ) / (s * (-1 + v) * np.sqrt(v) * sp.special.beta(v / 2, 1 / 2)) 530 | 531 | return out 532 | 533 | def robust2f1(sself, a, b, c, x): 534 | if np.abs(x) < 1: 535 | z = sp.special.hyp2f1(a, b, c, np.array([0, x])) 536 | out = z[-1] 537 | else: 538 | z = sp.special.hyp2f1(a, c - b, c, 0) 539 | out = z[-1] 540 | 541 | return out 542 | 543 | def func_hat(self, u, pc_mod, pcs, mcmc_use, f0r2, C1Basis_array): 544 | res = np.zeros(pcs.shape[0]) 545 | n_pc = len(pc_mod) 546 | for i in range(n_pc): 547 | res += pcs[:, i] ** 2 * self.Ccross(pc_mod, i, i, u, C1Basis_array) 548 | 549 | if (i + 1) < n_pc: 550 | for j in range(i + 1, n_pc): 551 | res = res + 2 * pcs[:, i] * pcs[:, j] * self.Ccross( 552 | pc_mod, i, j, u, C1Basis_array 553 | ) 554 | 555 | out = res - f0r2 556 | 557 | return out 558 | 559 | def Ccross(self, pc_mod, i, j, u, C1Basis_array): 560 | p = pc_mod[0].data.p 561 | mcmc_mod_usei = pc_mod[i].model_lookup[self.mcmc_use] 562 | mcmc_mod_usej = pc_mod[j].model_lookup[self.mcmc_use] 563 | 564 | Mi = pc_mod[i].samples.nbasis[self.mcmc_use] 565 | Mj = pc_mod[j].samples.nbasis[self.mcmc_use] 566 | 567 | a0i = pc_mod[i].samples.beta[self.mcmc_use, 0] 568 | a0j = pc_mod[j].samples.beta[self.mcmc_use, 0] 569 | f0i = self.get_f0(pc_mod, i) 570 | f0j = self.get_f0(pc_mod, j) 571 | 572 | out = a0i * a0j + a0i * (f0j - a0j) + a0j * (f0i - a0i) 573 | 574 | if Mi > 0 and Mj > 0: 575 | ai = pc_mod[i].samples.beta[self.mcmc_use, 1 : (Mi + 1)] 576 | aj = pc_mod[j].samples.beta[self.mcmc_use, 1 : (Mj + 1)] 577 | 578 | for mi in range(Mi): 579 | for mj in range(Mj): 580 | temp1 = ai[mi] * aj[mj] 581 | temp2 = 1 582 | temp3 = 1 583 | idx = np.arange(0, p) 584 | idx2 = u 585 | idx = np.delete(idx, idx2) 586 | 587 | for l in idx: 588 | temp2 = temp2 * C1Basis_array[i, l, mi] * C1Basis_array[j, l, mj] 589 | 590 | for l in idx2: 591 | temp3 = temp3 * self.C2Basis( 592 | pc_mod, l, mi, mj, i, j, mcmc_mod_usei, mcmc_mod_usej 593 | ) 594 | 595 | out += temp1 * temp2 * temp3 596 | 597 | return out 598 | 599 | def C2Basis(self, pc_mod, l, m1, m2, pc1, pc2, mcmc_mod_use1, mcmc_mod_use2): 600 | 601 | if l < pc_mod[pc1].data.p: 602 | n_int1 = pc_mod[pc1].samples.n_int[mcmc_mod_use1, m1] 603 | int_use_l1 = np.where( 604 | pc_mod[pc1].samples.vs[mcmc_mod_use1, m1, :][:n_int1] == l 605 | )[0] 606 | n_int2 = pc_mod[pc2].samples.n_int[mcmc_mod_use2, m2] 607 | int_use_l2 = np.where( 608 | pc_mod[pc2].samples.vs[mcmc_mod_use2, m2, :][:n_int2] == l 609 | )[0] 610 | 611 | if int_use_l1.size == 0 and int_use_l2.size == 0: 612 | out = 1 613 | return out 614 | 615 | if int_use_l1.size == 0: 616 | out = self.C1Basis(pc_mod, l, m2, pc2, mcmc_mod_use2) 617 | return out 618 | 619 | if int_use_l2.size == 0: 620 | out = self.C1Basis(pc_mod, l, m1, pc1, mcmc_mod_use1) 621 | return out 622 | 623 | q = 1 624 | s1 = pc_mod[pc1].samples.signs[mcmc_mod_use1, m1, int_use_l1] 625 | s2 = pc_mod[pc2].samples.signs[mcmc_mod_use2, m2, int_use_l2] 626 | t1 = pc_mod[pc1].samples.knots[mcmc_mod_use1, m1, int_use_l1] 627 | t2 = pc_mod[pc2].samples.knots[mcmc_mod_use2, m2, int_use_l2] 628 | 629 | if t2 < t1: 630 | temp = t1 631 | t1 = t2 632 | t2 = temp 633 | temp = s1 634 | s1 = s2 635 | s2 = temp 636 | 637 | out = self.C22Basis(self.prior[l], t1, t2, s1, s2, q) 638 | 639 | return out 640 | 641 | def C22Basis(self, prior, t1, t2, s1, s2, q): 642 | cc = uf.const(np.array([s1, s2]), np.array([t1, t2])) 643 | out = 0 644 | if (s1 * s2) == 0: 645 | out = 0 646 | return out 647 | 648 | if s1 == 1: 649 | if s2 == 1: 650 | out = self.intabq2(prior, t2, 1, t1, t2, q) / cc 651 | return out 652 | else: 653 | out = self.intabq2(prior, t1, t2, t1, t2, q) * (-1) ** q / cc 654 | return out 655 | else: 656 | if s2 == 1: 657 | out = 0 658 | return out 659 | else: 660 | out = self.intabq2(prior, 0, t1, t1, t2, q) / cc 661 | return out 662 | 663 | return out 664 | 665 | def intabq2(self, prior, a, b, t1, t2, q): 666 | if prior["dist"] == "normal": 667 | if q != 1: 668 | NameError("degree other than 1 not supported for normal priors") 669 | 670 | out = 0 671 | for k in range(len(prior["weights"])): 672 | zk = stats.norm.pdf( 673 | b, prior["mean"][k], prior["sd"][k] 674 | ) - stats.norm.pdf(a, prior["mean"][k], prior["sd"][k]) 675 | if zk < np.finfo(float).eps: 676 | continue 677 | ast = (a - prior["mean"][k]) / prior["sd"][k] 678 | bst = (b - prior["mean"][k]) / prior["sd"][k] 679 | dnb = stats.norm.cdf(bst) 680 | dna = stats.norm.cdf(ast) 681 | tnorm_mean_zk = prior["mean"][k] * zk - prior["sd"][k] * (dnb - dna) 682 | tnorm_var_zk = ( 683 | zk 684 | * prior["sd"][k] ** 2 685 | * (1 + (ast * dna - bst * dnb) / zk - ((dna - dnb) / zk) ** 2) 686 | + tnorm_mean_zk**2 / zk 687 | ) 688 | out += prior["weights"][k] * ( 689 | tnorm_var_zk - (t1 + t2) * tnorm_mean_zk + t1 * t2 * zk 690 | ) 691 | if out < 0 and np.abs(out) < 1e-12: 692 | out = 0 693 | 694 | if prior["dist"] == "student": 695 | if q != 1: 696 | NameError("degree other than 1 not supported for normal priors") 697 | 698 | out = 0 699 | for k in range(len(prior["weights"])): 700 | int = self.intx2Student( 701 | b, prior["mean"][k], prior["sd"][k], prior["df"][k], t1, t2 702 | ) - self.intx2Student( 703 | a, prior["mean"][k], prior["sd"][k], prior["df"][k], t1, t2 704 | ) 705 | out += prior["weights"][k] * int 706 | 707 | if prior["dist"] == "uniform": 708 | out = ( 709 | ( 710 | np.sum( 711 | self.pCoef(np.arange(0, q + 1), q) 712 | * (b - t1) ** (q - np.arange(0, q + 1)) 713 | * (b - t2) ** (q + 1 + np.arange(0, q + 1)) 714 | ) 715 | - np.sum( 716 | self.pCoef(np.arange(0, q + 1), q) 717 | * (a - t1) ** (q - np.arange(0, q + 1)) 718 | * (a - t2) ** (q + 1 + np.arange(0, q + 1)) 719 | ) 720 | ) 721 | * 1 722 | / (prior["trunc"][1] - prior["trunc"][0]) 723 | ) 724 | 725 | return out 726 | 727 | def intx2Student(self, x, m, s, v, t1, t2): 728 | temp = (s**2 * v) / (m**2 + s**2 * v - 2 * m * x + x**2) 729 | out = ( 730 | (v / (v + (m - x) ** 2 / s**2)) ** (v / 2) 731 | * np.sqrt(temp) 732 | * np.sqrt(1 / temp) 733 | * ( 734 | -3 735 | * (-t1 - t2 + 2 * m) 736 | * s**2 737 | * v 738 | * (np.sqrt(1 / temp) - (1 / temp) ** (v / 2)) 739 | + 3 740 | * (-t1 + m) 741 | * (-t2 + m) 742 | * (-1 + v) 743 | * (-m + x) 744 | * (1 / temp) ** (v / 2) 745 | * self.robust2f1( 746 | 1 / 2, (1 + v) / 2, 3 / 2, -((m - x) ** 2) / (s**2 * v) 747 | ) 748 | + (-1 + v) 749 | * (-m + x) ** 3 750 | * (1 / temp) ** (v / 2) 751 | * self.robust2f1( 752 | 3 / 2, (1 + v) / 2, 5 / 2, -((m - x) ** 2) / (s**2 * v) 753 | ) 754 | ) 755 | ) / (3 * s * (-1 + v) * np.sqrt(v) * sp.special.beta(v / 2, 1 / 2)) 756 | 757 | return out 758 | 759 | def pCoef(self, i, q): 760 | out = ( 761 | sp.special.factorial(q) ** 2 762 | * (-1) ** i 763 | / (sp.special.factorial(q - i) * sp.special.factorial(q + 1 + i)) 764 | ) 765 | return out 766 | -------------------------------------------------------------------------------- /pyBASS/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Copyright 2020. Triad National Security, LLC. All rights reserved. This 5 | program was produced under U.S. Government contract 89233218CNA000001 for 6 | Los Alamos National Laboratory (LANL), which is operated by Triad National 7 | Security, LLC for the U.S. Department of Energy/National Nuclear Security 8 | Administration. All rights in the program are reserved by Triad National 9 | 10 | Security, LLC, and the U.S. Department of Energy/National Nuclear Security 11 | Administration. The Government is granted for itself and others acting on 12 | its behalf a nonexclusive, paid-up, irrevocable worldwide license in this 13 | material to reproduce, prepare derivative works, distribute copies to the 14 | public, perform publicly and display publicly,and to permit others to do so. 15 | 16 | LANL software release C19112 17 | Author: Devin Francom 18 | """ 19 | 20 | import matplotlib.pyplot as plt 21 | import numpy as np 22 | import scipy as sp 23 | from scipy.special import comb 24 | from itertools import combinations, chain 25 | from collections import namedtuple 26 | 27 | 28 | def abline(slope, intercept): 29 | """Plot a line from slope and intercept""" 30 | axes = plt.gca() 31 | x_vals = np.array(axes.get_xlim()) 32 | y_vals = intercept + slope * x_vals 33 | plt.plot(x_vals, y_vals, "--", color="red") 34 | 35 | 36 | def ismember(a, b): 37 | bind = {} 38 | for i, elt in enumerate(b): 39 | if elt not in bind: 40 | bind[elt] = i 41 | return [ 42 | bind.get(itm, None) for itm in a 43 | ] # None can be replaced by any other "not in b" value 44 | 45 | 46 | pos = lambda a: (abs(a) + a) / 2 # same as max(0,a) 47 | 48 | 49 | def const(signs, knots): 50 | """Get max value of BASS basis function, assuming 0-1 range of inputs""" 51 | cc = np.prod(((signs + 1) / 2 - signs * knots)) 52 | if cc == 0: 53 | return 1 54 | return cc 55 | 56 | 57 | def makeBasis(signs, vs, knots, xdata): 58 | """Make basis function using continuous variables""" 59 | cc = const(signs, knots) 60 | temp1 = pos(signs * (xdata[:, vs] - knots)) 61 | if len(signs) == 1: 62 | return temp1 / cc 63 | temp2 = np.prod(temp1, axis=1) / cc 64 | return temp2 65 | 66 | 67 | def normalize(x, bounds): 68 | """Normalize to 0-1 scale""" 69 | return (x - bounds[:, 0]) / (bounds[:, 1] - bounds[:, 0]) 70 | 71 | 72 | def unnormalize(z, bounds): 73 | """Inverse of normalize""" 74 | return z * (bounds[:, 1] - bounds[:, 0]) + bounds[:, 0] 75 | 76 | 77 | def comb_index(n, k): 78 | """Get all combinations of indices from 0:n of length k""" 79 | # https://stackoverflow.com/questions/16003217/n-d-version-of-itertools-combinations-in-numpy 80 | count = comb(n, k, exact=True) 81 | index = np.fromiter( 82 | chain.from_iterable(combinations(range(n), k)), int, count=count * k 83 | ) 84 | return index.reshape(-1, k) 85 | 86 | 87 | def dmwnchBass(z_vec, vars_use): 88 | """ 89 | Multivariate Walenius' noncentral hypergeometric density function with 90 | some variables fixed 91 | """ 92 | with np.errstate(divide="ignore"): 93 | alpha = z_vec[vars_use - 1] / sum(np.delete(z_vec, vars_use)) 94 | j = len(alpha) 95 | ss = 1 + (-1) ** j * 1 / (sum(alpha) + 1) 96 | for i in range(j - 1): 97 | idx = comb_index(j, i + 1) 98 | temp = alpha[idx] 99 | ss = ss + (-1) ** (i + 1) * sum(1 / (temp.sum(axis=1) + 1)) 100 | return ss 101 | 102 | 103 | Qf = namedtuple("Qf", "R bhat qf") 104 | 105 | 106 | def getQf(XtX, Xty): 107 | """ 108 | Get the quadratic form y'X solve(X'X) X'y, as well as least squares 109 | beta and cholesky of X'X 110 | """ 111 | try: 112 | R = sp.linalg.cholesky( 113 | XtX, lower=False 114 | ) # might be a better way to do this with sp.linalg.cho_factor 115 | except np.linalg.LinAlgError as e: 116 | return None 117 | dr = np.diag(R) 118 | if len(dr) > 1: 119 | if max(dr[1:]) / min(dr) > 1e3: 120 | return None 121 | bhat = sp.linalg.solve_triangular(R, sp.linalg.solve_triangular(R, Xty, trans=1)) 122 | qf = np.dot(bhat, Xty) 123 | return Qf(R, bhat, qf) 124 | 125 | 126 | def logProbChangeMod(n_int, vars_use, I_vec, z_vec, p, maxInt): 127 | """Get reversibility factor for RJMCMC acceptance ratio, and also prior""" 128 | if n_int == 1: 129 | out = ( 130 | np.log(I_vec[n_int - 1]) 131 | - np.log(2 * p) # proposal 132 | + np.log(2 * p) 133 | + np.log(maxInt) 134 | ) 135 | else: 136 | x = np.zeros(p) 137 | x[vars_use] = 1 138 | lprob_vars_noReplace = np.log(dmwnchBass(z_vec, vars_use)) 139 | out = ( 140 | np.log(I_vec[n_int - 1]) 141 | + lprob_vars_noReplace 142 | - n_int * np.log(2) # proposal 143 | + n_int * np.log(2) 144 | + np.log(comb(p, n_int)) 145 | + np.log(maxInt) 146 | ) # prior 147 | return out 148 | 149 | 150 | CandidateBasis = namedtuple("CandidateBasis", "basis n_int signs vs knots lbmcmp") 151 | 152 | 153 | def genCandBasis(maxInt, I_vec, z_vec, p, xdata): 154 | """ 155 | Generate a candidate basis for birth step, as well as the RJMCMC 156 | reversibility factor and prior 157 | """ 158 | n_int = int(np.random.choice(range(maxInt), p=I_vec) + 1) 159 | signs = np.random.choice([-1, 1], size=n_int, replace=True) 160 | # knots = np.random.rand(n_int) 161 | knots = np.zeros(n_int) 162 | if n_int == 1: 163 | vs = np.random.choice(p) 164 | knots = np.random.choice(xdata[:, vs], size=1) 165 | else: 166 | vs = np.sort(np.random.choice(p, size=n_int, p=z_vec, replace=False)) 167 | for i in range(n_int): 168 | knots[i] = np.random.choice(xdata[:, vs[i]], size=1) 169 | 170 | basis = makeBasis(signs, vs, knots, xdata) 171 | lbmcmp = logProbChangeMod(n_int, vs, I_vec, z_vec, p, maxInt) 172 | return CandidateBasis(basis, n_int, signs, vs, knots, lbmcmp) 173 | 174 | 175 | BasisChange = namedtuple("BasisChange", "basis signs vs knots") 176 | 177 | 178 | def genBasisChange(knots, signs, vs, tochange_int, xdata): 179 | """Generate a condidate basis for change step""" 180 | knots_cand = knots.copy() 181 | signs_cand = signs.copy() 182 | signs_cand[tochange_int] = np.random.choice([-1, 1], size=1) 183 | knots_cand[tochange_int] = np.random.choice( 184 | xdata[:, vs[tochange_int]], size=1 185 | ) # np.random.rand(1) 186 | basis = makeBasis(signs_cand, vs, knots_cand, xdata) 187 | return BasisChange(basis, signs_cand, vs, knots_cand) 188 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pyBASS" 3 | version = "0.3.2" 4 | description = "Bayesian Adaptive Spline Surfaces" 5 | authors = [ 6 | {name = "Devin Francom", email = "dfrancom@lanl.gov"}, 7 | {name = "Arthur Lui", email = "luiarthur@gmail.com"}, 8 | {name = "J. Derek Tucker", email = "jdtuck@sandia.gov"} 9 | ] 10 | license = {text = "BSD 3-Clause"} 11 | readme = "README.md" 12 | requires-python = ">=3.6" 13 | 14 | keywords = ["emulator"] 15 | 16 | dependencies=[ 17 | "numpy", 18 | "matplotlib", 19 | "scipy", 20 | ] 21 | 22 | classifiers = [ 23 | 'License :: OSI Approved :: BSD License', 24 | 'Operating System :: OS Independent', 25 | 'Programming Language :: Python', 26 | 'Topic :: Scientific/Engineering', 27 | 'Topic :: Scientific/Engineering :: Mathematics', 28 | 'Programming Language :: Python :: 3', 29 | 'Programming Language :: Python :: 3.6', 30 | ] 31 | 32 | [project.urls] 33 | repository = "http://www.github.com/lanl/pyBASS" 34 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | matplotlib 3 | scipy -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/pyBASS/d8ecc174288239951fde394ac83ffdc6f701c8b2/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_bassPCA_fit.py: -------------------------------------------------------------------------------- 1 | import pyBASS as pb 2 | import numpy as np 3 | from .util import rootmeansqerror 4 | 5 | def test_bassPCA_fit(): 6 | # Friedman function with functional response 7 | def f2(x): 8 | out = 10. * np.sin(np.pi * np.linspace(0, 1, 50) * x[1]) + 20. * (x[2] - .5) ** 2 + 10 * x[3] + 5. * x[4] 9 | return out 10 | 11 | np.random.seed(0) 12 | tt = np.linspace(0, 1, 50) # functional variable grid 13 | n = 500 # sample size 14 | p = 9 # number of predictors other (only 4 are used) 15 | x = np.random.rand(n, p) # training inputs 16 | xx = np.random.rand(1000, p) 17 | e = np.random.normal(size=[n, len(tt)]) * .1 # noise 18 | y = np.apply_along_axis(f2, 1, x) + e # training response 19 | ftest = np.apply_along_axis(f2, 1, xx) 20 | 21 | # fit BASS model with RJMCMC 22 | mod = pb.bassPCA(x, y, maxInt=5) 23 | 24 | # predict at new inputs (xnew) 25 | pred = mod.predict(xx, nugget=True) 26 | 27 | # Root mean squred error 28 | rmse = rootmeansqerror(pred.mean(0), ftest) 29 | print("RMSE: ", rmse) 30 | 31 | # Test that RMSE is less than 0.05 for this model, which should be the case 32 | # from previous tests. 33 | assert rmse < 0.05 34 | -------------------------------------------------------------------------------- /tests/test_bass_fit.py: -------------------------------------------------------------------------------- 1 | import pyBASS as pb 2 | import numpy as np 3 | from .util import rootmeansqerror 4 | 5 | def test_bass_fit(): 6 | # Friedman function (Friedman, 1991, Multivariate Adaptive Regression Splines) 7 | def f(x): 8 | return ( 9 | 10. * np.sin(np.pi * x[:, 0] * x[:, 1]) + 20. * 10 | (x[:, 2] - .5) ** 2 + 10 * x[:, 3] + 5. * x[:, 4] 11 | ) 12 | 13 | # Set random seed for reproducibility. 14 | np.random.seed(0) 15 | 16 | # Generate data. 17 | n = 500 # sample size 18 | p = 10 # number of predictors (only 5 are used) 19 | x = np.random.rand(n, p) # predictors (training set) 20 | y = np.random.normal(f(x), 0.1) # response (training set) with noise. 21 | 22 | # fit BASS model with RJMCMC 23 | mod = pb.bass(x, y, nmcmc=10000, nburn=9000) 24 | 25 | # predict at new inputs (xnew) 26 | xnew = np.random.rand(1000, p) 27 | pred = mod.predict(xnew, nugget=True) 28 | 29 | # True values at new inputs. 30 | ynew = f(xnew) 31 | 32 | # Root mean squred error 33 | rmse = rootmeansqerror(pred.mean(0), ynew) 34 | print("RMSE: ", rmse) 35 | 36 | # Test that RMSE is less than 0.1 for this model, which should be the case 37 | # from previous tests. 38 | assert rmse < 0.1 39 | -------------------------------------------------------------------------------- /tests/test_sobolBasis.py: -------------------------------------------------------------------------------- 1 | import pyBASS as pb 2 | import numpy as np 3 | 4 | tt = np.linspace(0, 1, 50) # functional variable grid 5 | 6 | 7 | # Get dataset 8 | # Friedman function with functional response 9 | def f2(x): 10 | out = ( 11 | 10.0 * np.sin(np.pi * tt * x[0]) 12 | + 20.0 * (x[1] - 0.5) ** 2 13 | + 10.0 * x[2] 14 | + 5.0 * x[3] 15 | ) 16 | return out 17 | 18 | 19 | np.random.seed(0) 20 | n = 500 # sample size 21 | p = 9 # number of predictors other (only 4 are used) 22 | x = np.random.rand(n, p) # training inputs 23 | e = np.random.normal(size=[n, len(tt)]) * 0.1 # noise 24 | y = np.apply_along_axis(f2, 1, x) + e # training response 25 | 26 | # fit BASS model with RJMCMC 27 | mod = pb.bassPCA(x, y) 28 | 29 | sob = pb.sobolBasis(mod) 30 | sob.decomp(int_order=1) 31 | sob.plot() 32 | 33 | # Check that T_var is computed correctly 34 | S_var = sob.S_var 35 | T_var = sob.T_var 36 | np.all( 37 | T_var[0] 38 | == np.sum( 39 | [S_var[i] for i in range(len(S_var)) if str(1) in sob.names_ind[i]], axis=0 40 | ) 41 | ) 42 | np.all( 43 | T_var[1] 44 | == np.sum( 45 | [S_var[i] for i in range(len(S_var)) if str(2) in sob.names_ind[i]], axis=0 46 | ) 47 | ) 48 | np.all( 49 | T_var[2] 50 | == np.sum( 51 | [S_var[i] for i in range(len(S_var)) if str(3) in sob.names_ind[i]], axis=0 52 | ) 53 | ) 54 | np.all( 55 | T_var[3] 56 | == np.sum( 57 | [S_var[i] for i in range(len(S_var)) if str(4) in sob.names_ind[i]], axis=0 58 | ) 59 | ) 60 | ### etc 61 | np.all( 62 | T_var[8] 63 | == np.sum( 64 | [S_var[i] for i in range(len(S_var)) if str(9) in sob.names_ind[i]], axis=0 65 | ) 66 | ) 67 | -------------------------------------------------------------------------------- /tests/util.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def rootmeansqerror(predictions, targets): 4 | return np.sqrt(((predictions - targets) ** 2).mean()) 5 | --------------------------------------------------------------------------------