├── .coveragerc ├── .flake8 ├── .gitattributes ├── .github └── workflows │ ├── publish-pypi.yml │ └── run-CI.yml ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.md ├── causalimpact ├── __init__.py ├── __version__.py ├── data.py ├── inferences.py ├── main.py ├── misc.py ├── model.py ├── plot.py ├── summary.py └── summary │ └── templates │ ├── report │ └── summary ├── notebooks ├── R │ ├── arma │ │ ├── Rplot.png │ │ └── summary.txt.txt │ ├── arma_data.csv │ ├── basque.csv │ ├── basque │ │ ├── Rplot.png │ │ └── summary.txt.txt │ ├── comparison │ │ ├── Pythonplot.png │ │ ├── Rplot.png │ │ └── summary.txt.txt │ ├── comparison_data.csv │ ├── google │ │ ├── Rplot.png │ │ └── summary.txt.txt │ ├── google_data.csv │ ├── volks │ │ ├── Rplot.png │ │ └── summary.txt.txt │ └── volks_data.csv ├── getting_started.ipynb ├── tfcausal_plot_example.png └── tfcausal_plot_original_example.png ├── pytest.ini ├── scripts └── build_wheels.sh ├── setup.cfg ├── setup.py ├── stubs └── tensorflow_probability │ ├── __init__.pyi │ └── sts │ └── __init__.pyi ├── test-requirements.txt ├── tests ├── conftest.py ├── fixtures │ ├── arma_data.csv │ ├── arma_sparse_reg.csv │ ├── basque.csv │ ├── btc.csv │ ├── comparison_data.csv │ ├── google_data.csv │ ├── season_data.csv │ ├── test_output_summary_1 │ ├── test_output_summary_single_digit │ ├── test_report_summary_1 │ ├── test_report_summary_2 │ ├── test_report_summary_3 │ ├── test_report_summary_4 │ ├── test_report_summary_single_digit │ └── volks_data.csv ├── test_data.py ├── test_inferences.py ├── test_main.py ├── test_misc.py ├── test_model.py ├── test_plot.py └── test_summary.py └── tox.ini /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | include = 4 | causalimpact/* 5 | omit = 6 | tests/* 7 | causalimpact/__version__.py 8 | 9 | 10 | [report] 11 | exclude_lines = 12 | pragma: no cover 13 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length=90 3 | exclude = 4 | setup.py 5 | causalimpact/__init__.py 6 | /*per-file-ignores =*/ 7 | /*causalimpact/model.py: E501*/ 8 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | notebooks/* linguist-documentation 2 | -------------------------------------------------------------------------------- /.github/workflows/publish-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish tfcausalimpct to PyPI 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | 8 | jobs: 9 | build-n-publish: 10 | name: Build and publish to PyPI 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | 15 | - name: Set up Python 3.12 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: '3.12' 19 | 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install -U pip 23 | python -m pip install -U setuptools wheel twine 24 | - name: Build dist 25 | run: | 26 | python setup.py sdist bdist_wheel 27 | - name: Upload to PyPI 28 | uses: pypa/gh-action-pypi-publish@release/v1 29 | with: 30 | user: __token__ 31 | password: ${{ secrets.PYPI_API_TOKEN }} 32 | -------------------------------------------------------------------------------- /.github/workflows/run-CI.yml: -------------------------------------------------------------------------------- 1 | name: Run Unit Tests 2 | on: [push, pull_request] 3 | jobs: 4 | run-CI: 5 | runs-on: ${{ matrix.os }} 6 | strategy: 7 | matrix: 8 | os: [ubuntu-latest, macos-latest, windows-latest] 9 | python: ['3.8', '3.9', '3.10', '3.11', '3.12'] 10 | exclude: 11 | - os: macos-latest 12 | python: '3.8' 13 | - os: macos-latest 14 | python: '3.9' 15 | - os: macos-latest 16 | python: '3.10' 17 | - os: macos-latest 18 | python: '3.11' 19 | - os: windows-latest 20 | python: '3.8' 21 | - os: windows-latest 22 | python: '3.9' 23 | - os: windows-latest 24 | python: '3.10' 25 | - os: windows-latest 26 | python: '3.11' 27 | steps: 28 | - uses: actions/checkout@v2 29 | 30 | - name: Python Version ${{ matrix.python }} 31 | uses: actions/setup-python@v2 32 | with: 33 | python-version: ${{ matrix.python }} 34 | 35 | - name: Cache pip 36 | uses: actions/cache@v3 37 | with: 38 | path: ~/.cache/pip 39 | key: ${{ runner.os }} 40 | restore-keys: | 41 | ${{ runner.os }} 42 | 43 | - name: Install dependencies 44 | run: | 45 | python -m pip install --upgrade pip 46 | pip install setuptools tox tox-gh-actions 47 | 48 | - name: Test with tox 49 | run: tox 50 | env: 51 | TOX_SKIP_ENV: coverage,GHA-coverage 52 | 53 | - name: Lint 54 | if: ${{ matrix.python == '3.12' && runner.os == 'Linux'}} 55 | run: tox -e lint 56 | 57 | - name: isort 58 | if: ${{ matrix.python == '3.12' && runner.os == 'Linux'}} 59 | run: tox -e isort-check 60 | 61 | 62 | - name: Build Coverage 63 | if: ${{ matrix.python == '3.12' && runner.os == 'Linux'}} 64 | run: tox -e GHA-coverage 65 | 66 | - name: Upload Coveralls 67 | if: ${{ matrix.python == '3.12' && runner.os == 'Linux'}} 68 | uses: coverallsapp/github-action@master 69 | with: 70 | github-token: ${{ secrets.GITHUB_TOKEN }} 71 | path-to-lcov: coverage.lcov 72 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | *.lcov 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .env* 108 | .venv 109 | env/ 110 | venv/ 111 | ENV/ 112 | env.bak/ 113 | venv.bak/ 114 | 115 | # Spyder project settings 116 | .spyderproject 117 | .spyproject 118 | 119 | # Rope project settings 120 | .ropeproject 121 | 122 | # mkdocs documentation 123 | /site 124 | 125 | # mypy 126 | .mypy_cache/ 127 | .dmypy.json 128 | dmypy.json 129 | 130 | # Pyre type checker 131 | .pyre/ 132 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE 3 | recursive-include causalimpact/summary/templates * 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tfcausalimpact 2 | [![Build Status](https://travis-ci.com/WillianFuks/tfcausalimpact.svg?branch=master)](https://travis-ci.com/WillianFuks/tfcausalimpact) [![Coverage Status](https://coveralls.io/repos/github/WillianFuks/tfcausalimpact/badge.svg?branch=master)](https://coveralls.io/github/WillianFuks/tfcausalimpact?branch=master) [![GitHub license](https://img.shields.io/github/license/WillianFuks/tfcausalimpact.svg)](https://github.com/WillianFuks/tfcausalimpact/blob/master/LICENSE) [![PyPI version](https://badge.fury.io/py/tfcausalimpact.svg)](https://badge.fury.io/py/tfcausalimpact) [![Pyversions](https://img.shields.io/pypi/pyversions/tfcausalimpact.svg)](https://pypi.python.org/pypi/tfcausalimpact) 3 | 4 | Google's [Causal Impact](https://github.com/google/CausalImpact) Algorithm Implemented on Top of [TensorFlow Probability](https://github.com/tensorflow/probability). 5 | 6 | ## How It Works 7 | The algorithm basically fits a [Bayesian structural](https://en.wikipedia.org/wiki/Bayesian_structural_time_series) model on past observed data to make predictions on what future data would look like. Past data comprises everything that happened before an intervention (which usually is the changing of a variable as being present or not, such as a marketing campaign that starts to run at a given point). It then compares the counter-factual (predicted) series against what was really observed in order to extract statistical conclusions. 8 | 9 | Running the model is quite straightforward, it requires the observed data `y`, covariates `X` that helps the model through a linear regression, a `pre-period` interval that selects everything that happened before the intervention and a `post-period` with data after the "impact" happened. 10 | 11 | Please refer to this medium [post](https://towardsdatascience.com/implementing-causal-impact-on-top-of-tensorflow-probability-c837ea18b126) for more on this subject. 12 | 13 | ## Installation 14 | 15 | pip install tfcausalimpact 16 | 17 | ## Requirements 18 | 19 | - python{3.7, 3.8, 3.9, 3.10, 3.11} 20 | - matplotlib 21 | - jinja2 22 | - tensorflow>=2.10.0 23 | - tensorflow_probability>=0.18.0 24 | - pandas >= 1.3.5 25 | 26 | 27 | ## Getting Started 28 | 29 | We recommend this [presentation](https://www.youtube.com/watch?v=GTgZfCltMm8) by Kay Brodersen (one of the creators of the Causal Impact in R). 30 | 31 | We also created this introductory [ipython notebook](https://github.com/WillianFuks/tfcausalimpact/blob/master/notebooks/getting_started.ipynb) with examples of how to use this package. 32 | 33 | This medium [article](https://towardsdatascience.com/implementing-causal-impact-on-top-of-tensorflow-probability-c837ea18b126) also offers some ideas and concepts behind the library. 34 | 35 | ### Example 36 | 37 | Here's a simple example (which can also be found in the original Google's R implementation) running in Python: 38 | 39 | ```python 40 | import pandas as pd 41 | from causalimpact import CausalImpact 42 | 43 | 44 | data = pd.read_csv('https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/master/tests/fixtures/arma_data.csv')[['y', 'X']] 45 | data.iloc[70:, 0] += 5 46 | 47 | pre_period = [0, 69] 48 | post_period = [70, 99] 49 | 50 | ci = CausalImpact(data, pre_period, post_period) 51 | print(ci.summary()) 52 | print(ci.summary(output='report')) 53 | ci.plot() 54 | ``` 55 | 56 | Summary should look like this: 57 | 58 | ``` 59 | Posterior Inference {Causal Impact} 60 | Average Cumulative 61 | Actual 125.23 3756.86 62 | Prediction (s.d.) 120.34 (0.31) 3610.28 (9.28) 63 | 95% CI [119.76, 120.97] [3592.67, 3629.06] 64 | 65 | Absolute effect (s.d.) 4.89 (0.31) 146.58 (9.28) 66 | 95% CI [4.26, 5.47] [127.8, 164.19] 67 | 68 | Relative effect (s.d.) 4.06% (0.26%) 4.06% (0.26%) 69 | 95% CI [3.54%, 4.55%] [3.54%, 4.55%] 70 | 71 | Posterior tail-area probability p: 0.0 72 | Posterior prob. of a causal effect: 100.0% 73 | 74 | For more details run the command: print(impact.summary('report')) 75 | ``` 76 | 77 | And here's the plot graphic: 78 | 79 | ![alt text](https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/master/notebooks/tfcausal_plot_example.png) 80 | 81 | ## Google R Package vs TensorFlow Python 82 | 83 | Both packages should give equivalent results. Here's an example using the `comparison_data.csv` dataset available in the `fixtures` folder. When running CausalImpact in the original R package, this is the result: 84 | 85 | ### R 86 | 87 | ```{r} 88 | data = read.csv.zoo('comparison_data.csv', header=TRUE) 89 | pre.period <- c(as.Date("2019-04-16"), as.Date("2019-07-14")) 90 | post.period <- c(as.Date("2019-07-15"), as.Date("2019-08-01")) 91 | ci = CausalImpact(data, pre.period, post.period) 92 | ``` 93 | 94 | Summary results: 95 | 96 | ``` 97 | Posterior inference {CausalImpact} 98 | 99 | Average Cumulative 100 | Actual 78574 1414340 101 | Prediction (s.d.) 79232 (736) 1426171 (13253) 102 | 95% CI [77743, 80651] [1399368, 1451711] 103 | 104 | Absolute effect (s.d.) -657 (736) -11831 (13253) 105 | 95% CI [-2076, 832] [-37371, 14971] 106 | 107 | Relative effect (s.d.) -0.83% (0.93%) -0.83% (0.93%) 108 | 95% CI [-2.6%, 1%] [-2.6%, 1%] 109 | 110 | Posterior tail-area probability p: 0.20061 111 | Posterior prob. of a causal effect: 80% 112 | 113 | For more details, type: summary(impact, "report") 114 | ``` 115 | 116 | And correspondent plot: 117 | 118 | ![alt text](https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/master/notebooks/R/comparison/Rplot.png) 119 | 120 | ### Python 121 | 122 | ```python 123 | import pandas as pd 124 | from causalimpact import CausalImpact 125 | 126 | 127 | data = pd.read_csv('https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/master/tests/fixtures/comparison_data.csv', index_col=['DATE']) 128 | pre_period = ['2019-04-16', '2019-07-14'] 129 | post_period = ['2019-7-15', '2019-08-01'] 130 | ci = CausalImpact(data, pre_period, post_period, model_args={'fit_method': 'hmc'}) 131 | ``` 132 | 133 | Summary is: 134 | 135 | ``` 136 | Posterior Inference {Causal Impact} 137 | Average Cumulative 138 | Actual 78574.42 1414339.5 139 | Prediction (s.d.) 79282.92 (727.48) 1427092.62 (13094.72) 140 | 95% CI [77849.5, 80701.18][1401290.94, 1452621.31] 141 | 142 | Absolute effect (s.d.) -708.51 (727.48) -12753.12 (13094.72) 143 | 95% CI [-2126.77, 724.92] [-38281.81, 13048.56] 144 | 145 | Relative effect (s.d.) -0.89% (0.92%) -0.89% (0.92%) 146 | 95% CI [-2.68%, 0.91%] [-2.68%, 0.91%] 147 | 148 | Posterior tail-area probability p: 0.16 149 | Posterior prob. of a causal effect: 84.12% 150 | 151 | For more details run the command: print(impact.summary('report')) 152 | ``` 153 | 154 | And plot: 155 | 156 | ![alt text](https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/master/notebooks/R/comparison/Pythonplot.png) 157 | 158 | Both results are equivalent. 159 | 160 | ## Performance 161 | 162 | This package uses as default the [`Variational Inference`](https://en.wikipedia.org/wiki/Variational_Bayesian_methods) method from `TensorFlow Probability` which is faster and should work for the most part. Convergence can take somewhere between 2~3 minutes on more complex time series. You could also try running the package on top of GPUs to see if results improve. 163 | 164 | If, on the other hand, precision is the top requirement when running causal impact analyzes, it's possible to switch algorithms by manipulating the input arguments like so: 165 | 166 | ```python 167 | ci = CausalImpact(data, pre_period, post_period, model_args={'fit_method': 'hmc'}) 168 | ``` 169 | 170 | This will make usage of the algorithm [`Hamiltonian Monte Carlo`](https://en.wikipedia.org/wiki/Hamiltonian_Monte_Carlo) which is State-of-the-Art for finding the Bayesian posterior of distributions. Still, keep in mind that on complex time series with thousands of data points and complex modeling involving various seasonal components this optimization can take 1 hour or even more to complete (on a GPU). Performance is sacrificed in exchange for better precision. 171 | 172 | ## Bugs & Issues 173 | 174 | If you find bugs or have any issues while running this library please consider opening an [`Issue`](https://github.com/WillianFuks/tfcausalimpact/issues) with a complete description and reproductible environment so we can better help you solving the problem. 175 | -------------------------------------------------------------------------------- /causalimpact/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | from causalimpact.__version__ import __version__ 17 | from causalimpact.main import CausalImpact 18 | -------------------------------------------------------------------------------- /causalimpact/__version__.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | __version__ = '0.0.18' 17 | -------------------------------------------------------------------------------- /causalimpact/inferences.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """ 17 | Uses the posterior distribution to prepare inferences for the Causal Impact summary and 18 | plotting functionalities. 19 | """ 20 | 21 | 22 | from typing import List, Optional, Tuple, Union 23 | 24 | import numpy as np 25 | import pandas as pd 26 | import tensorflow as tf 27 | import tensorflow_probability as tfp 28 | 29 | from causalimpact.misc import maybe_unstandardize 30 | 31 | tfd = tfp.distributions 32 | 33 | 34 | def get_lower_upper_percentiles(alpha: float) -> List[float]: 35 | """ 36 | Returns the lower and upper quantile values for the chosen `alpha` value. 37 | 38 | Args 39 | ---- 40 | alpha: float 41 | Sets the size of the credible interval. If `alpha=0.05` then extracts the 42 | 95% credible interval for forecasts. 43 | 44 | Returns 45 | ------- 46 | List[float] 47 | First value is the lower quantile and second value is upper. 48 | """ 49 | return [alpha * 100. / 2., 100 - alpha * 100. / 2.] 50 | 51 | 52 | def compile_posterior_inferences( 53 | original_index: pd.core.indexes.base.Index, 54 | mask, 55 | pre_data: pd.DataFrame, 56 | post_data: pd.DataFrame, 57 | one_step_dist: tfd.Distribution, 58 | posterior_dist: tfd.Distribution, 59 | mu_sig: Optional[Tuple[float, float]], 60 | alpha: float = 0.05, 61 | niter: int = 1000 62 | ) -> pd.DataFrame: 63 | """ 64 | Uses the posterior distribution of the structural time series probabilistic 65 | model to run predictions and forecasts for observed data. Results are stored for 66 | later usage on the summary and plotting functionalities. 67 | 68 | Args 69 | ---- 70 | original_index: pd.core.indexes.base.Index 71 | Original index from input data. If it's a `RangeIndex` then cast inferences 72 | index to be of the same type. 73 | mask: np.array 74 | Some points in `post_data` may be `NaN` values given the 75 | `tfp.sts.regularize_series` transformation. In order to avoid those points from 76 | breaking the posterior inference, a boolean mask is created to drop out those 77 | points. 78 | pre_data: pd.DataFrame 79 | This is the original input data, that is, it's not standardized. 80 | post_data: pd.DataFrame 81 | Same as `pre_data`. 82 | This is the original input data, that is, it's not standardized. 83 | one_step_dist: tfd.Distribution 84 | Uses posterior parameters to run one-step-prediction on past observed data. 85 | posterior_dist: tfd.Distribution 86 | Uses posterior parameters to run forecasts on post intervention data. 87 | mu_sig: Optional[Tuple[float, float]] 88 | First value is the mean used for standardization and second value is the 89 | standard deviation. 90 | alpha: float 91 | Sets credible interval size. 92 | niter: int 93 | Total mcmc samples to sample from the posterior structural model. 94 | 95 | Returns 96 | ------- 97 | inferences: pd.DataFrame 98 | Final dataframe with all data related to one-step predictions and forecasts. 99 | """ 100 | lower_percen, upper_percen = get_lower_upper_percentiles(alpha) 101 | post_data = post_data[mask] 102 | # Integrates pre and post index for cumulative index data. 103 | cum_index = build_cum_index(pre_data.index, post_data.index) 104 | # We create a pd.Series with a single 0 (zero) value to work as the initial value 105 | # when computing the cumulative inferences. Without this value the plotting of 106 | # cumulative data breaks at the initial point. 107 | zero_series = pd.Series([0]) 108 | simulated_pre_ys = one_step_dist.sample(niter) # shape (niter, n_train_timestamps, 1) 109 | simulated_pre_ys = maybe_unstandardize( 110 | np.squeeze(simulated_pre_ys.numpy()), 111 | mu_sig 112 | ) # shape (niter, n_forecasts) 113 | simulated_post_ys = posterior_dist.sample(niter) # shape (niter, n_forecasts, 1) 114 | simulated_post_ys = maybe_unstandardize( 115 | np.squeeze(simulated_post_ys.numpy())[:, mask], 116 | mu_sig 117 | ) # shape (niter, n_forecasts) 118 | # Pre inference 119 | pre_preds_means = one_step_dist.mean() 120 | pre_preds_means = pd.Series( 121 | np.squeeze( 122 | maybe_unstandardize(pre_preds_means, mu_sig) 123 | ), 124 | index=pre_data.index 125 | ) 126 | pre_preds_lower, pre_preds_upper = np.percentile( 127 | simulated_pre_ys, 128 | [lower_percen, upper_percen], 129 | axis=0 130 | ) 131 | pre_preds_lower = pd.Series(pre_preds_lower, index=pre_data.index) 132 | pre_preds_upper = pd.Series(pre_preds_upper, index=pre_data.index) 133 | # Post inference 134 | post_preds_means = posterior_dist.mean() # shape (len(post_data), 1) 135 | post_preds_means = pd.Series( 136 | np.squeeze( 137 | maybe_unstandardize(post_preds_means[mask], mu_sig) 138 | ), 139 | index=post_data.index 140 | ) 141 | post_preds_lower, post_preds_upper = np.percentile( 142 | simulated_post_ys, 143 | [lower_percen, upper_percen], 144 | axis=0 145 | ) 146 | post_preds_lower = pd.Series(post_preds_lower, index=post_data.index) 147 | post_preds_upper = pd.Series(post_preds_upper, index=post_data.index) 148 | # Concatenations 149 | complete_preds_means = pd.concat([pre_preds_means, post_preds_means]) 150 | complete_preds_lower = pd.concat([pre_preds_lower, post_preds_lower]) 151 | complete_preds_upper = pd.concat([pre_preds_upper, post_preds_upper]) 152 | # Cumulative 153 | post_cum_y = np.cumsum(post_data.iloc[:, 0]) 154 | post_cum_y = pd.concat([zero_series, post_cum_y], axis=0) 155 | post_cum_y.index = cum_index 156 | post_cum_preds_means = np.cumsum(post_preds_means) 157 | post_cum_preds_means = pd.concat([zero_series, post_cum_preds_means]) 158 | post_cum_preds_means.index = cum_index 159 | post_cum_preds_lower, post_cum_preds_upper = np.percentile( 160 | np.cumsum(simulated_post_ys, axis=1), 161 | [lower_percen, upper_percen], 162 | axis=0 163 | ) 164 | # Sets index properly 165 | post_cum_preds_lower = pd.Series( 166 | np.squeeze( 167 | np.concatenate([[0], post_cum_preds_lower]) 168 | ), 169 | index=cum_index 170 | ) 171 | post_cum_preds_upper = pd.Series( 172 | np.squeeze( 173 | np.concatenate([[0], post_cum_preds_upper]) 174 | ), 175 | index=cum_index 176 | ) 177 | # Using a net value of data to accomodate cases where there're gaps between pre 178 | # and post intervention periods. 179 | net_data = pd.concat([pre_data, post_data]) 180 | # Point effects 181 | point_effects_means = net_data.iloc[:, 0] - complete_preds_means 182 | point_effects_upper = net_data.iloc[:, 0] - complete_preds_lower 183 | point_effects_lower = net_data.iloc[:, 0] - complete_preds_upper 184 | post_point_effects_means = post_data.iloc[:, 0] - post_preds_means 185 | # Cumulative point effects analysis 186 | post_cum_effects_means = np.cumsum(post_point_effects_means) 187 | post_cum_effects_means = pd.concat([zero_series, post_cum_effects_means]) 188 | post_cum_effects_means.index = cum_index 189 | post_cum_effects_lower, post_cum_effects_upper = np.percentile( 190 | np.cumsum(post_data.iloc[:, 0].values - simulated_post_ys, axis=1), 191 | [lower_percen, upper_percen], 192 | axis=0 193 | ) 194 | # Sets index properly. 195 | post_cum_effects_lower = pd.Series( 196 | np.squeeze( 197 | np.concatenate([[0], post_cum_effects_lower]) 198 | ), 199 | index=cum_index 200 | ) 201 | post_cum_effects_upper = pd.Series( 202 | np.squeeze( 203 | np.concatenate([[0], post_cum_effects_upper]) 204 | ), 205 | index=cum_index 206 | ) 207 | 208 | inferences = pd.concat( 209 | [ 210 | complete_preds_means, 211 | complete_preds_lower, 212 | complete_preds_upper, 213 | post_preds_means, 214 | post_preds_lower, 215 | post_preds_upper, 216 | post_cum_y, 217 | post_cum_preds_means, 218 | post_cum_preds_lower, 219 | post_cum_preds_upper, 220 | point_effects_means, 221 | point_effects_lower, 222 | point_effects_upper, 223 | post_cum_effects_means, 224 | post_cum_effects_lower, 225 | post_cum_effects_upper 226 | ], 227 | axis=1 228 | ) 229 | inferences.columns = [ 230 | 'complete_preds_means', 231 | 'complete_preds_lower', 232 | 'complete_preds_upper', 233 | 'post_preds_means', 234 | 'post_preds_lower', 235 | 'post_preds_upper', 236 | 'post_cum_y', 237 | 'post_cum_preds_means', 238 | 'post_cum_preds_lower', 239 | 'post_cum_preds_upper', 240 | 'point_effects_means', 241 | 'point_effects_lower', 242 | 'point_effects_upper', 243 | 'post_cum_effects_means', 244 | 'post_cum_effects_lower', 245 | 'post_cum_effects_upper' 246 | ] 247 | if isinstance(original_index, pd.RangeIndex): 248 | inferences.set_index(pd.RangeIndex(start=0, stop=len(inferences)), inplace=True) 249 | return inferences 250 | 251 | 252 | def build_cum_index( 253 | pre_data_index: Union[pd.core.indexes.range.RangeIndex, 254 | pd.core.indexes.datetimes.DatetimeIndex], 255 | post_data_index: Union[pd.core.indexes.range.RangeIndex, 256 | pd.core.indexes.datetimes.DatetimeIndex] 257 | ) -> Union[pd.core.indexes.range.RangeIndex, 258 | pd.core.indexes.datetimes.DatetimeIndex]: 259 | """ 260 | As the cumulative data has one more data point (the first point is a zero), 261 | we add to the post-intervention data the first index of the pre-data right at the 262 | beginning of the index. This helps in the plotting functionality. 263 | 264 | Args 265 | ---- 266 | pre_data_index: Union[pd.core.indexes.range.RangeIndex, 267 | pd.core.indexes.datetimes.DatetimeIndex] 268 | post_data_index: Union[pd.core.indexes.range.RangeIndex, 269 | pd.core.indexes.datetimes.DatetimeIndex] 270 | 271 | Returns 272 | ------- 273 | Union[pd.core.indexes.range.RangeIndex, pd.core.indexes.datetimes.DatetimeIndex] 274 | `post_data_index` extended with the latest index value from `pre_data`. 275 | """ 276 | # In newer versions of Numpy/Pandas, the union operation between indices returns 277 | # an Index with `dtype=object`. We, therefore, create this variable in order to 278 | # restore the original value which is used later on by the plotting interface. 279 | index_dtype = post_data_index.dtype 280 | new_idx = post_data_index.union([pre_data_index[-1]]) 281 | new_idx = new_idx.astype(index_dtype) 282 | return new_idx 283 | 284 | 285 | def summarize_posterior_inferences( 286 | post_preds_means: pd.core.series.Series, 287 | post_data: pd.DataFrame, 288 | simulated_ys: Union[np.array, tf.Tensor], 289 | alpha: float = 0.05 290 | ) -> pd.DataFrame: 291 | """ 292 | After running the posterior inferences compilation, this function aggregates the 293 | results and gets the final interpretation for the Causal Impact results, such as 294 | the expected absolute impact of the given intervention and its credible interval. 295 | 296 | Args 297 | ---- 298 | post_preds_means: pd.core.series.Series 299 | Forecats means of post intervention data. 300 | post_data: pd.DataFrame 301 | simulated_ys: Union[np.array, tf.tensor] 302 | Array of simulated forecasts for response `y` extract from running mcmc samples 303 | from the posterior `P(z | y)`. 304 | alpha: float 305 | 306 | Returns 307 | ------- 308 | summary: pd.DataFrame 309 | Summary data which is used in the `summary` functionality. 310 | """ 311 | lower_percen, upper_percen = get_lower_upper_percentiles(alpha) 312 | # Compute the mean of metrics 313 | mean_post_y = post_data.iloc[:, 0].mean() 314 | mean_post_pred = post_preds_means.mean() 315 | mean_post_pred_lower, mean_post_pred_upper = np.percentile( 316 | simulated_ys.mean(axis=1), [lower_percen, upper_percen] 317 | ) 318 | # Compute the sum of metrics 319 | sum_post_y = post_data.iloc[:, 0].sum() 320 | sum_post_pred = post_preds_means.sum() 321 | sum_post_pred_lower, sum_post_pred_upper = np.percentile( 322 | simulated_ys.sum(axis=1), [lower_percen, upper_percen] 323 | ) 324 | # Causal Impact analysis metrics 325 | abs_effect = mean_post_y - mean_post_pred 326 | abs_effect_lower = mean_post_y - mean_post_pred_upper 327 | abs_effect_upper = mean_post_y - mean_post_pred_lower 328 | # Sum 329 | sum_abs_effect = sum_post_y - sum_post_pred 330 | sum_abs_effect_lower = sum_post_y - sum_post_pred_upper 331 | sum_abs_effect_upper = sum_post_y - sum_post_pred_lower 332 | # Relative 333 | rel_effect = abs_effect / mean_post_pred 334 | rel_effect_lower = abs_effect_lower / mean_post_pred 335 | rel_effect_upper = abs_effect_upper / mean_post_pred 336 | # Sum relative 337 | sum_rel_effect = sum_abs_effect / sum_post_pred 338 | sum_rel_effect_lower = sum_abs_effect_lower / sum_post_pred 339 | sum_rel_effect_upper = sum_abs_effect_upper / sum_post_pred 340 | 341 | summary = [ 342 | [mean_post_y, sum_post_y], 343 | [mean_post_pred, sum_post_pred], 344 | [mean_post_pred_lower, sum_post_pred_lower], 345 | [mean_post_pred_upper, sum_post_pred_upper], 346 | [abs_effect, sum_abs_effect], 347 | [abs_effect_lower, sum_abs_effect_lower], 348 | [abs_effect_upper, sum_abs_effect_upper], 349 | [rel_effect, sum_rel_effect], 350 | [rel_effect_lower, sum_rel_effect_lower], 351 | [rel_effect_upper, sum_rel_effect_upper] 352 | ] 353 | summary = pd.DataFrame( 354 | summary, 355 | columns=['average', 'cumulative'], 356 | index=[ 357 | 'actual', 358 | 'predicted', 359 | 'predicted_lower', 360 | 'predicted_upper', 361 | 'abs_effect', 362 | 'abs_effect_lower', 363 | 'abs_effect_upper', 364 | 'rel_effect', 365 | 'rel_effect_lower', 366 | 'rel_effect_upper' 367 | ] 368 | ) 369 | return summary 370 | 371 | 372 | def compute_p_value( 373 | simulated_ys: Union[np.array, tf.Tensor], 374 | post_data_sum: float 375 | ) -> float: 376 | """ 377 | Computes the p-value for the hypothesis testing that there's signal in the 378 | observed data. The computation follows the same idea as the one implemented in the 379 | origina R package which consists of simulating with the fitted parameters several 380 | time series for the post-intervention period and counting how many either surpass the 381 | total summation of `y` (positive relative effect) or how many falls under its 382 | summation (negative relative effect). 383 | 384 | Args 385 | ---- 386 | simulated_ys: Union[np.array, tf.Tensor] 387 | Forecast simulations for value of `y` extracted from `P(z | y)`. 388 | post_data_sum: float 389 | sum of post intervention data. 390 | 391 | Returns 392 | ------- 393 | p_value: float 394 | Ranging between 0 and 1, represents the likelihood of obtaining the observed 395 | data by random chance. 396 | """ 397 | sim_sum = tf.reduce_sum(simulated_ys, axis=1) 398 | # The minimum value between positive and negative signals reveals how many times 399 | # either the summation of the simulation could surpass ``y_post_sum`` or be 400 | # surpassed by the same (in which case it means the sum of the simulated time 401 | # series is bigger than ``y_post_sum`` most of the time, meaning the signal in 402 | # this case reveals the impact caused the response variable to decrease from what 403 | # was expected had no effect taken place. 404 | signal = min( 405 | np.sum(sim_sum > post_data_sum), 406 | np.sum(sim_sum < post_data_sum) 407 | ) 408 | return signal / (len(simulated_ys) + 1) 409 | -------------------------------------------------------------------------------- /causalimpact/misc.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """Miscellaneous functions to help in the implementation of Causal Impact.""" 17 | 18 | 19 | from typing import Optional, Tuple 20 | 21 | import pandas as pd 22 | import tensorflow_probability as tfp 23 | 24 | tfd = tfp.distributions 25 | 26 | 27 | def standardize(data: pd.DataFrame) -> Tuple[pd.DataFrame, Tuple[float, float]]: 28 | """ 29 | Applies standardization to input data. Result should have mean zero and standard 30 | deviation of one. 31 | 32 | Args 33 | ---- 34 | data: pd.DataFrame 35 | 36 | Returns 37 | ------- 38 | Tuple[pd.DataFrame, Tuple[float, float]] 39 | data: pd.DataFrame 40 | standardized data with zero mean and std of one. 41 | Tuple[float, float] 42 | mean and standard deviation used on each column of input data to make 43 | standardization. These values should be used to obtain the original dataframe. 44 | 45 | Raises 46 | ------ 47 | ValueError: if data has only one value. 48 | """ 49 | if data.shape[0] == 1: 50 | raise ValueError('Input data must have more than one value') 51 | mu = data.mean(skipna=True) 52 | std = data.std(skipna=True, ddof=0) 53 | data = (data - mu) / std.fillna(1) 54 | return [data, (mu, std)] 55 | 56 | 57 | def unstandardize(data: pd.DataFrame, mus_sigs: Tuple[float, float]) -> pd.DataFrame: 58 | """ 59 | Applies the inverse transformation to return to original data using `mus_sigs` as 60 | reference. Final result should have mean `mu` and standard deviation `std` both 61 | present in `mus_sigs`. 62 | 63 | Args 64 | ---- 65 | data: pd.DataFrame 66 | mus_sigs: Tuple[float, float] 67 | tuple where first value is the mean used for the standardization and 68 | second value is the respective standard deviation. 69 | 70 | Returns 71 | ------- 72 | data: pd.DataFrame 73 | input data with mean and standard deviation given by `mus_sigs`. 74 | """ 75 | mu, sig = mus_sigs 76 | data = (data * sig) + mu 77 | return data 78 | 79 | 80 | def maybe_unstandardize( 81 | data: pd.DataFrame, 82 | mu_sig: Optional[Tuple[float, float]] = None 83 | ) -> pd.DataFrame: 84 | """ 85 | If input data was standardized this method is used to bring back data to its 86 | original values. The parameter `mu_sig` from holds the values used for 87 | standardizing (average and std, respectively) the response variable `y`. In case 88 | `mu_sig` is `None`, it means no standardization was applied; in this case, we 89 | just return data itself. 90 | 91 | Args 92 | ---- 93 | mu_sig: Tuple[float, float] 94 | First value is the mean and second is the standard deviation used for 95 | normalizing the response variable `y`. 96 | data: pd.DataFrame 97 | Input dataframe to apply unstardization. 98 | 99 | Returns 100 | ------- 101 | pd.DataFrame 102 | returns original input `data` if `mu_sig` is None and the "unstardardized" 103 | data otherwise. 104 | """ 105 | if mu_sig is None: 106 | return data 107 | return unstandardize(data, mu_sig) 108 | 109 | 110 | def get_z_score(p: float) -> float: 111 | """ 112 | Returns the correspondent z-score (quantile) with probability area `p` derived from 113 | a standard normal distribution. 114 | 115 | Args 116 | ---- 117 | p: float 118 | ranges between 0 and 1 representing the probability area to convert. 119 | 120 | Returns 121 | ------- 122 | float 123 | The z-score (quantile) correspondent of p. 124 | """ 125 | norm = tfd.Normal(0, 1) 126 | return norm.quantile(p).numpy() 127 | -------------------------------------------------------------------------------- /causalimpact/plot.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """ 17 | Plots the analysis obtained in causal impact algorithm. 18 | """ 19 | 20 | 21 | import numpy as np 22 | import pandas as pd 23 | 24 | 25 | def plot( 26 | inferences: pd.DataFrame, 27 | pre_data: pd.DataFrame, 28 | post_data: pd.DataFrame, 29 | panels=['original', 'pointwise', 'cumulative'], 30 | figsize=(10, 7), 31 | show=True 32 | ) -> None: 33 | """Plots inferences results related to causal impact analysis. 34 | 35 | Args 36 | ---- 37 | panels: list. 38 | Indicates which plot should be considered in the graphics. 39 | figsize: tuple. 40 | Changes the size of the graphics plotted. 41 | show: bool. 42 | If true, runs plt.show(), i.e., displays the figure. 43 | If false, it gives acess to the axis, i.e., the figure can be saved 44 | and the style of the plot can be modified by getting the axis with 45 | `ax = plt.gca()` or the figure with `fig = plt.gcf()`. 46 | Defaults to True. 47 | Raises 48 | ------ 49 | RuntimeError: if inferences were not computed yet. 50 | """ 51 | plt = get_plotter() 52 | plt.figure(figsize=figsize) 53 | valid_panels = ['original', 'pointwise', 'cumulative'] 54 | for panel in panels: 55 | if panel not in valid_panels: 56 | raise ValueError( 57 | '"{}" is not a valid panel. Valid panels are: {}.'.format( 58 | panel, ', '.join(['"{}"'.format(e) for e in valid_panels]) 59 | ) 60 | ) 61 | pre_data, post_data, inferences = build_data(pre_data, post_data, inferences) 62 | pre_post_index = pre_data.index.union(post_data.index) 63 | 64 | post_period_init = post_data.index[0] 65 | intervention_idx = pre_post_index.get_loc(post_period_init) 66 | n_panels = len(panels) 67 | ax = plt.subplot(n_panels, 1, 1) 68 | idx = 1 69 | color = (1.0, 0.4981, 0.0549) 70 | # The operation `iloc[1:]` is used mainly to remove the uncertainty associated to the 71 | # predictions of the first points. As the predictions follow 72 | # `P(z[t] | y[1...t-1], z[1...t-1])` the very first point ends up being quite noisy 73 | # as there's no previous point observed. 74 | if 'original' in panels: 75 | ax.plot( 76 | pre_post_index, 77 | pd.concat([pre_data.iloc[:, 0], post_data.iloc[:, 0]]), 78 | 'k', 79 | label='y' 80 | ) 81 | ax.plot( 82 | pre_post_index[1:], 83 | inferences['complete_preds_means'].iloc[1:], 84 | color='orangered', 85 | ls='dashed', 86 | label='Predicted' 87 | ) 88 | ax.axvline(pre_post_index[intervention_idx - 1], c='gray', linestyle='--') 89 | ax.fill_between( 90 | pre_post_index[1:], 91 | inferences['complete_preds_lower'].iloc[1:], 92 | inferences['complete_preds_upper'].iloc[1:], 93 | color=color, 94 | alpha=0.4 95 | ) 96 | ax.legend() 97 | ax.grid(True, color='gainsboro') 98 | if idx != n_panels: 99 | plt.setp(ax.get_xticklabels(), visible=False) 100 | idx += 1 101 | if 'pointwise' in panels: 102 | ax = plt.subplot(n_panels, 1, idx, sharex=ax) 103 | ax.plot( 104 | pre_post_index[1:], 105 | inferences['point_effects_means'].iloc[1:], 106 | ls='dashed', 107 | color='orangered', 108 | label='Point Effects' 109 | ) 110 | ax.axvline(pre_post_index[intervention_idx - 1], c='gray', linestyle='--') 111 | ax.fill_between( 112 | pre_post_index[1:], 113 | inferences['point_effects_lower'].iloc[1:], 114 | inferences['point_effects_upper'].iloc[1:], 115 | color=color, 116 | alpha=0.4 117 | ) 118 | ax.axhline(y=0, color='gray') 119 | ax.legend() 120 | ax.grid(True, color='gainsboro') 121 | if idx != n_panels: 122 | plt.setp(ax.get_xticklabels(), visible=False) 123 | idx += 1 124 | if 'cumulative' in panels: 125 | ax = plt.subplot(n_panels, 1, idx, sharex=ax) 126 | ax.plot( 127 | pre_post_index[1:], 128 | inferences['post_cum_effects_means'].iloc[1:], 129 | ls='dashed', 130 | color='orangered', 131 | label='Cumulative Effect' 132 | ) 133 | ax.axvline(pre_post_index[intervention_idx - 1], c='gray', linestyle='--') 134 | ax.fill_between( 135 | pre_post_index[1:], 136 | inferences['post_cum_effects_lower'].iloc[1:], 137 | inferences['post_cum_effects_upper'].iloc[1:], 138 | color=color, 139 | alpha=0.4 140 | ) 141 | ax.axhline(y=0, color='gray', linestyle='--') 142 | ax.legend() 143 | ax.grid(True, color='gainsboro') 144 | if show: 145 | plt.show() 146 | 147 | 148 | def build_data( 149 | pre_data: pd.DataFrame, 150 | post_data: pd.DataFrame, 151 | inferences: pd.DataFrame 152 | ) -> [pd.DataFrame, pd.DataFrame, pd.DataFrame]: 153 | """ 154 | Input pre_data may contain NaN points due TFP requirement for a valid frequency set. 155 | As it may break the plotting API, this function removes those points. 156 | 157 | `post_data` has its potential `NaN` values already removed in main function. 158 | """ 159 | if isinstance(inferences.index, pd.RangeIndex): 160 | pre_data = pre_data.set_index(pd.RangeIndex(start=0, stop=len(pre_data))) 161 | post_data = post_data.set_index(pd.RangeIndex(start=len(pre_data), 162 | stop=len(pre_data) + len(post_data))) 163 | pre_data_null_index = pre_data[pre_data.iloc[:, 0].isnull()].index 164 | pre_data = pre_data.drop(pre_data_null_index).astype(np.float64) 165 | post_data = post_data.astype(np.float64) 166 | inferences = inferences.drop(pre_data_null_index).astype(np.float64) 167 | return pre_data, post_data, inferences 168 | 169 | 170 | def get_plotter(): # pragma: no cover 171 | """As some environments do not have matplotlib then we import the library through 172 | this method which prevents import exceptions. 173 | 174 | Returns 175 | ------- 176 | plotter: `matplotlib.pyplot`. 177 | """ 178 | import matplotlib.pyplot as plt 179 | return plt 180 | -------------------------------------------------------------------------------- /causalimpact/summary.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """ 17 | Summarizes performance information inferred in post-inferences compilation process. 18 | """ 19 | 20 | 21 | import os 22 | 23 | import pandas as pd 24 | from jinja2 import Template 25 | 26 | from causalimpact.misc import get_z_score 27 | 28 | _here = os.path.dirname(os.path.abspath(__file__)) 29 | summary_tmpl_path = os.path.join(_here, 'summary', 'templates', 'summary') 30 | report_tmpl_path = os.path.join(_here, 'summary', 'templates', 'report') 31 | 32 | with open(summary_tmpl_path) as f: 33 | SUMMARY_TMPL = Template(f.read()) 34 | with open(report_tmpl_path) as f: 35 | REPORT_TMPL = Template(f.read()) 36 | 37 | 38 | def summary( 39 | summary_data: pd.DataFrame, 40 | p_value: float, 41 | alpha: float = 0.05, 42 | output: str = 'summary', 43 | digits: int = 2 44 | ) -> str: 45 | """ 46 | Returns final results from causal impact analysis, such as absolute observed effect, 47 | the relative effect between prediction and observed variable, cumulative performances 48 | in post-intervention period among other metrics. 49 | 50 | Args 51 | ---- 52 | summary_data: pd.DataFrame 53 | Contains information such as means and cumulatives averages. 54 | p_value: float 55 | p-value test for testing presence of signal in data. 56 | alpha: float 57 | Sets credible interval width. 58 | output: str 59 | Can be either "summary" or "report". The first is a simpler output just 60 | informing general metrics such as expected absolute or relative effect. 61 | digits: int 62 | Defines the number of digits after the decimal point to round. For `digits=2`, 63 | value 1.566 becomes 1.57. 64 | 65 | Returns 66 | ------- 67 | summary: str 68 | Contains results of the causal impact analysis. 69 | 70 | Raises 71 | ------ 72 | ValueError: If input `output` is not either 'summary' or 'report'. 73 | """ 74 | if output not in {'summary', 'report'}: 75 | raise ValueError('Please choose either summary or report for output.') 76 | if output == 'summary': 77 | summary = SUMMARY_TMPL.render( 78 | summary=summary_data.to_dict(), 79 | alpha=alpha, 80 | z_score=get_z_score(1 - alpha / 2.), 81 | p_value=p_value, 82 | digits=digits 83 | ) 84 | else: 85 | summary = REPORT_TMPL.render( 86 | summary=summary_data.to_dict(), 87 | alpha=alpha, 88 | p_value=p_value, 89 | digits=digits 90 | ) 91 | return summary 92 | -------------------------------------------------------------------------------- /causalimpact/summary/templates/report: -------------------------------------------------------------------------------- 1 | {% set detected_sig = not (summary.average.rel_effect_lower < 0 and summary.average.rel_effect_upper > 0) -%} 2 | {% set positive_sig = summary.average.rel_effect > 0 -%} 3 | {% macro CI(alpha) %}{{(((1 - alpha) * 100) | string).rstrip('0').rstrip('.')}}%{% endmacro -%} 4 | Analysis report {CausalImpact} 5 | 6 | 7 | During the post-intervention period, the response variable had 8 | an average value of approx. {{summary.average.actual | round(digits)}}. {% if detected_sig -%}By contrast, in{% else %}In{% endif %} the absence of an 9 | intervention, we would have expected an average response of {{summary.average.predicted | round(digits)}}. 10 | The {{CI(alpha)}} interval of this counterfactual prediction is [{{summary.average.predicted_lower | round(digits)}}, {{summary.average.predicted_upper | round(digits)}}]. 11 | Subtracting this prediction from the observed response yields 12 | an estimate of the causal effect the intervention had on the 13 | response variable. This effect is {{summary.average.abs_effect | round(digits)}} with a {{CI(alpha)}} interval of 14 | {{[summary.average.abs_effect_lower | round(digits), summary.average.abs_effect_upper | round(digits)] | sort}}. For a discussion of the significance of this effect, 15 | see below. 16 | 17 | 18 | Summing up the individual data points during the post-intervention 19 | period (which can only sometimes be meaningfully interpreted), the 20 | response variable had an overall value of {{summary.cumulative.actual | round(digits)}}. 21 | {% if detected_sig %}By contrast, had{% else %}Had{% endif %} the intervention not taken place, we would have expected 22 | a sum of {{summary.cumulative.predicted| round(digits)}}. The {{CI(alpha)}} interval of this prediction is {{[summary.cumulative.predicted_lower | round(digits), summary.cumulative.predicted_upper | round(digits)]|sort}}. 23 | 24 | 25 | The above results are given in terms of absolute numbers. In relative 26 | terms, the response variable showed {% if positive_sig %}an increase of +{% else %}a decrease of {% endif %}{{(100 * summary.average.rel_effect) | round(digits)}}%. The {{CI(alpha)}} 27 | interval of this percentage is [{{([(100 * summary.average.rel_effect_lower) | round(digits), (100 * summary.average.rel_effect_upper) | round(digits)] | min)}}%, {{([(100 * summary.average.rel_effect_upper) | round(digits), (100 * summary.average.rel_effect_lower) | round(digits)] | max)}}%]. 28 | {% if detected_sig and positive_sig %} 29 | 30 | This means that the positive effect observed during the intervention 31 | period is statistically significant and unlikely to be due to random 32 | fluctuations. It should be noted, however, that the question of whether 33 | this increase also bears substantive significance can only be answered 34 | by comparing the absolute effect ({{summary.average.abs_effect | round(digits)}}) to the original goal 35 | of the underlying intervention. 36 | {% elif detected_sig and not positive_sig %} 37 | 38 | This means that the negative effect observed during the intervention 39 | period is statistically significant. 40 | If the experimenter had expected a positive effect, it is recommended 41 | to double-check whether anomalies in the control variables may have 42 | caused an overly optimistic expectation of what should have happened 43 | in the response variable in the absence of the intervention. 44 | {% elif not detected_sig and positive_sig %} 45 | 46 | This means that, although the intervention appears to have caused a 47 | positive effect, this effect is not statistically significant when 48 | considering the entire post-intervention period as a whole. Individual 49 | days or shorter stretches within the intervention period may of course 50 | still have had a significant effect, as indicated whenever the lower 51 | limit of the impact time series (lower plot) was above zero. 52 | {% elif not detected_sig and not positive_sig -%} 53 | 54 | This means that, although it may look as though the intervention has 55 | exerted a negative effect on the response variable when considering 56 | the intervention period as a whole, this effect is not statistically 57 | significant and so cannot be meaningfully interpreted. 58 | {% endif %} 59 | {%- if not detected_sig %} 60 | 61 | The apparent effect could be the result of random fluctuations that 62 | are unrelated to the intervention. This is often the case when the 63 | intervention period is very long and includes much of the time when 64 | the effect has already worn off. It can also be the case when the 65 | intervention period is too short to distinguish the signal from the 66 | noise. Finally, failing to find a significant effect can happen when 67 | there are not enough control variables or when these variables do not 68 | correlate well with the response variable during the learning period. 69 | {% endif %} 70 | {%- if p_value < alpha %} 71 | 72 | The probability of obtaining this effect by chance is very small 73 | (Bayesian one-sided tail-area probability p = {{p_value | round(digits)}}). 74 | This means the causal effect can be considered statistically 75 | significant. 76 | {%- else %} 77 | 78 | The probability of obtaining this effect by chance is p = {{(p_value * 100) | round(digits)}}%. 79 | This means the effect may be spurious and would generally not be 80 | considered statistically significant. 81 | {%- endif -%} 82 | -------------------------------------------------------------------------------- /causalimpact/summary/templates/summary: -------------------------------------------------------------------------------- 1 | {% macro CI(alpha) %}{{(((1 - alpha) * 100) | string).rstrip('0').rstrip('.')}}% CI{% endmacro -%} 2 | {% macro SD(lower, upper, z_score, digits=2) %}{{((([upper, lower]|max) - ([upper, lower]|min)) / (2 * z_score)) | round(digits)}}{% endmacro -%} 3 | {% macro add_remaining_spaces(n) %}{{' ' * (19 -n)}}{% endmacro -%} 4 | Posterior Inference {Causal Impact} 5 | Average Cumulative 6 | Actual {{summary.average.actual | round(digits)}}{{add_remaining_spaces(summary.average.actual | round(digits) | string | length)}}{{summary.cumulative.actual | round(digits)}} 7 | Prediction (s.d.) {{summary.average.predicted | round(digits)}} ({{SD(summary.average.predicted_lower, summary.average.predicted_upper, z_score, digits)}}){{add_remaining_spaces(summary.average.predicted | round(digits) | string | length + 3 + SD(summary.average.predicted_lower, summary.average.predicted_upper, z_score, digits) | string | length)}}{{summary.cumulative.predicted | round(digits)}} ({{SD(summary.cumulative.predicted_lower, summary.cumulative.predicted_upper, z_score, digits)}}) 8 | {{CI(alpha)}} [{{summary.average.predicted_lower | round(digits)}}, {{summary.average.predicted_upper | round(digits)}}]{{add_remaining_spaces(4 + summary.average.predicted_lower | round(digits) | string | length + summary.average.predicted_upper | round(digits) | string | length)}}[{{summary.cumulative.predicted_lower | round(digits)}}, {{summary.cumulative.predicted_upper | round(digits)}}] 9 | 10 | Absolute effect (s.d.) {{summary.average.abs_effect | round(digits)}} ({{SD(summary.average.abs_effect_lower, summary.average.abs_effect_upper, z_score, digits)}}){{add_remaining_spaces(3 + summary.average.abs_effect | round(digits) | string | length + SD(summary.average.abs_effect_lower, summary.average.abs_effect_upper, z_score, digits) | string | length)}}{{summary.cumulative.abs_effect | round(digits)}} ({{SD(summary.cumulative.abs_effect_lower, summary.cumulative.abs_effect_upper, z_score, digits)}}) 11 | {{CI(alpha)}} {{[summary.average.abs_effect_lower | round(digits), summary.average.abs_effect_upper | round(digits)] | sort}}{{add_remaining_spaces(4 + summary.average.abs_effect_lower | round(digits) | string | length + summary.average.abs_effect_upper | round(digits) | string | length)}}{{[summary.cumulative.abs_effect_lower | round(digits), summary.cumulative.abs_effect_upper | round(digits)] | sort}} 12 | 13 | Relative effect (s.d.) {{(summary.average.rel_effect * 100) | round(digits)}}% ({{(100 * SD(summary.average.rel_effect_lower, summary.average.rel_effect_upper, z_score, 4) | float) | round(digits)}}%){{add_remaining_spaces(5 + (summary.average.rel_effect * 100) | round(digits) | string | length + (100 * SD(summary.average.rel_effect_lower, summary.average.rel_effect_upper, z_score, 4) | float) | round(digits) | string | length)}}{{(100 * summary.cumulative.rel_effect) | round(digits)}}% ({{(100 * SD(summary.cumulative.rel_effect_lower, summary.cumulative.rel_effect_upper, z_score, 4)|float) | round(digits)}}%) 14 | {{CI(alpha)}} [{{([(summary.average.rel_effect_lower * 100) | round(digits), (summary.average.rel_effect_upper * 100) | round(digits)] | min)}}%, {{([(100 * summary.average.rel_effect_upper) | round(digits), (100 * summary.average.rel_effect_lower) | round(digits)] | max)}}%]{{add_remaining_spaces(6 + ([(summary.average.rel_effect_lower * 100) | round(digits), (summary.average.rel_effect_upper * 100) | round(digits)] | min) | string | length + ([(100 * summary.average.rel_effect_upper) | round(digits), (100 * summary.average.rel_effect_lower) | round(digits)] | max) | string | length)}}[{{([(100 * summary.cumulative.rel_effect_lower) | round(digits), (100 * summary.cumulative.rel_effect_upper) | round(digits)] | min)}}%, {{([(100 * summary.cumulative.rel_effect_upper) | round(digits), (100 * summary.cumulative.rel_effect_lower) | round(digits)] | max)}}%] 15 | 16 | Posterior tail-area probability p: {{p_value|round(digits)}} 17 | Posterior prob. of a causal effect: {{((1 - p_value) * 100) | round(digits)}}% 18 | 19 | For more details run the command: print(impact.summary('report')) 20 | -------------------------------------------------------------------------------- /notebooks/R/arma/Rplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/R/arma/Rplot.png -------------------------------------------------------------------------------- /notebooks/R/arma/summary.txt.txt: -------------------------------------------------------------------------------- 1 | Posterior inference {CausalImpact} 2 | 3 | Average Cumulative 4 | Actual 121 3637 5 | Prediction (s.d.) 120 (0.3) 3610 (9.1) 6 | 95% CI [120, 121] [3592, 3629] 7 | 8 | Absolute effect (s.d.) 0.89 (0.3) 26.67 (9.1) 9 | 95% CI [0.27, 1.5] [7.99, 44.4] 10 | 11 | Relative effect (s.d.) 0.74% (0.25%) 0.74% (0.25%) 12 | 95% CI [0.22%, 1.2%] [0.22%, 1.2%] 13 | 14 | Posterior tail-area probability p: 0.00301 15 | Posterior prob. of a causal effect: 99.6988% 16 | 17 | For more details, type: summary(impact, "report") -------------------------------------------------------------------------------- /notebooks/R/arma_data.csv: -------------------------------------------------------------------------------- 1 | y,X 2 | 118.18869351480215,99.79529234051529 3 | 120.23327608983782,100.66318023159378 4 | 118.62777475265177,98.88369907650885 5 | 119.6097224299191,100.44894052679454 6 | 121.39150827240464,101.56173409838762 7 | 119.94915032720753,99.98784514442413 8 | 121.22000675625384,100.103847246762 9 | 121.20686883438057,100.1882836307444 10 | 120.12999307372677,100.59956729994188 11 | 122.42988909392176,100.7068241664386 12 | 119.91652230322246,100.37104760778827 13 | 118.50080512395002,98.3698360438783 14 | 123.02053579170352,101.74213919394167 15 | 116.82391452006347,98.66098760438783 16 | 122.04714661141381,102.5580279912164 17 | 117.89821805378324,98.58420414849682 18 | 119.34758161129614,99.27257895669247 19 | 120.14600257430331,100.28283640183653 20 | 122.36652539391166,101.414472547872 21 | 116.29710114579274,98.28840497107963 22 | 118.948035649806,101.00069408050668 23 | 118.32480018928234,99.5763603379563 24 | 124.70987064039863,103.6302196152701 25 | 115.55599995776038,95.71157482190132 26 | 123.66442522413078,103.28249535724804 27 | 116.46470835685861,97.16987545415017 28 | 123.52732800531399,102.84972565317736 29 | 116.94427765101415,97.9590189802908 30 | 121.78832342638832,101.83782319551362 31 | 119.61071566435602,99.68976891765188 32 | 118.25465150262693,99.56566398901693 33 | 117.67085873173771,99.55974887103085 34 | 119.26592943914865,98.02599436211548 35 | 120.88802144458363,99.91584428554532 36 | 119.5159194365873,99.21498274457745 37 | 122.07514739781087,101.26666082290832 38 | 118.41960464615387,97.59407076768882 39 | 122.30164255029206,102.28516343374062 40 | 115.9544582740155,96.8798404615799 41 | 124.26802305013551,103.14102630018576 42 | 112.9523979870474,94.81365752244913 43 | 126.68236417198419,104.4681652744601 44 | 113.83195165593062,94.43665572511179 45 | 124.1900910470809,104.0362739351402 46 | 114.2593673630214,95.06032320740292 47 | 125.28674538353751,104.73205886035153 48 | 115.07066438870487,96.11913113662101 49 | 123.26953309506948,102.7388954422512 50 | 118.11985645134362,97.86627975216844 51 | 124.55962088172443,103.2700904375472 52 | 115.79497785160612,97.12679529503828 53 | 124.11715584781372,102.83255834444778 54 | 116.62232999073395,97.43883588892592 55 | 122.29404090314283,103.30985929075375 56 | 118.44497158767757,98.34832125210725 57 | 119.11695933270022,100.56724931535842 58 | 117.16095549198862,97.94036917789298 59 | 123.29693533952079,101.8758523385545 60 | 120.10406961330044,99.07009604031653 61 | 119.96396796699204,100.17638923599056 62 | 120.42836359241525,100.70382977013855 63 | 119.11107856148179,99.35652130478655 64 | 118.91018060181236,100.62914018159628 65 | 122.19349516834825,100.1039894309136 66 | 120.13428197970708,100.7593745444178 67 | 119.47299973810199,98.36069405772956 68 | 121.63170107325712,101.45188202735156 69 | 116.89427375074946,96.38907229658645 70 | 125.03762357178593,102.59736609161129 71 | 116.47931528740291,96.44406854096869 72 | 125.46593391669431,101.86772860544485 73 | 119.59299992373522,99.39366694519461 74 | 123.25057828712784,101.26934125462277 75 | 119.58838293399887,99.54759472364866 76 | 121.47659892388366,101.40870819121 77 | 118.4042593526453,98.22907523655066 78 | 121.24662797737021,100.97155806204476 79 | 118.00648827474254,98.2044291361467 80 | 122.50291564633136,100.88980028480714 81 | 119.74868563027614,99.42207528950875 82 | 121.11828313743037,100.571448340363 83 | 118.62093088012,98.32797702736057 84 | 122.85927623225994,102.3215276112661 85 | 120.76819009242308,98.34423475551462 86 | 125.21092078533083,102.50092566604046 87 | 121.75887526602952,99.57404211190962 88 | 119.28863449538171,99.38584385157088 89 | 122.0492887035768,101.40333163232312 90 | 118.93080796330037,98.60542392969717 91 | 123.31160323843288,102.16753261531487 92 | 118.15062052005919,98.23743132665285 93 | 124.77842321393275,103.7557732437024 94 | 117.64462612397605,96.50487587608501 95 | 127.44606410145911,105.14930907394003 96 | 116.19547272181907,95.39523198576826 97 | 127.8384358842968,104.9395443686699 98 | 114.78612117300449,95.67251982188765 99 | 123.36202469537284,103.14620061180939 100 | 119.6006748666655,97.7533891872239 101 | 123.86182677404162,102.17462630429846 102 | -------------------------------------------------------------------------------- /notebooks/R/basque.csv: -------------------------------------------------------------------------------- 1 | "date","Basque","another" 2 | 1955-01-01,3.85318463000527,3.54662963030373 3 | 1956-01-01,3.94565829615088,3.69044556954152 4 | 1957-01-01,4.03356173487263,3.82683499817574 5 | 1958-01-01,4.02342189689665,3.8756783776064 6 | 1959-01-01,4.01378196840523,3.92173673384055 7 | 1960-01-01,4.28591839622273,4.24178820002321 8 | 1961-01-01,4.57433609579741,4.57533547892566 9 | 1962-01-01,4.89895735356304,4.83804641196265 10 | 1963-01-01,5.19701498162913,5.08133409636867 11 | 1964-01-01,5.33890297875272,5.15809787814531 12 | 1965-01-01,5.46515300525185,5.22365052507319 13 | 1966-01-01,5.54591562706414,5.33247650503874 14 | 1967-01-01,5.61489572663949,5.42944892070458 15 | 1968-01-01,5.85218493307158,5.67437885353069 16 | 1969-01-01,6.08140541736959,5.91552394419117 17 | 1970-01-01,6.17009424134957,6.06683787193614 18 | 1971-01-01,6.28363340454625,6.22764920820614 19 | 1972-01-01,6.55555539865284,6.53906012902564 20 | 1973-01-01,6.81076856110308,6.83797505609446 21 | 1974-01-01,7.1051843028108,6.98736082380481 22 | 1975-01-01,7.37789168217563,7.12489302721544 23 | 1976-01-01,7.23293362192275,7.13538981886872 24 | 1977-01-01,7.08983137211913,7.1429590673591 25 | 1978-01-01,6.78670360714461,7.01935154409008 26 | 1979-01-01,6.6398173868571,7.01099691034147 27 | 1980-01-01,6.56283917136956,7.07883467098128 28 | 1981-01-01,6.50078545499277,7.1822335603611 29 | 1982-01-01,6.54505860699956,7.28720365609826 30 | 1983-01-01,6.59532980113941,7.3978863181948 31 | 1984-01-01,6.76149675009149,7.48429002846285 32 | 1985-01-01,6.93716067172772,7.5699798313763 33 | 1986-01-01,7.33219115130052,8.07769173930216 34 | 1987-01-01,7.74278812359415,8.58397583251883 35 | 1988-01-01,8.12053664075889,9.05741234228836 36 | 1989-01-01,8.50971116232416,9.52584975721223 37 | 1990-01-01,8.7767778890741,9.78506175969812 38 | 1991-01-01,9.02527866619582,10.0506998000571 39 | 1992-01-01,8.87389282470633,9.83790310748268 40 | 1993-01-01,8.71822353908928,9.62510728658999 41 | 1994-01-01,9.01813784928637,10.0064270838912 42 | 1995-01-01,9.44087386165337,10.33990288489 43 | 1996-01-01,9.68651813767495,10.5762637502566 44 | 1997-01-01,10.1706658728087,11.0454159442168 45 | -------------------------------------------------------------------------------- /notebooks/R/basque/Rplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/R/basque/Rplot.png -------------------------------------------------------------------------------- /notebooks/R/basque/summary.txt.txt: -------------------------------------------------------------------------------- 1 | Posterior inference {CausalImpact} 2 | 3 | Average Cumulative 4 | Actual 7.9 173.1 5 | Prediction (s.d.) 8.6 (0.31) 189.8 (6.89) 6 | 95% CI [8.1, 9.2] [177.4, 203.0] 7 | 8 | Absolute effect (s.d.) -0.76 (0.31) -16.75 (6.89) 9 | 95% CI [-1.4, -0.2] [-30.0, -4.3] 10 | 11 | Relative effect (s.d.) -8.8% (3.6%) -8.8% (3.6%) 12 | 95% CI [-16%, -2.3%] [-16%, -2.3%] 13 | 14 | Posterior tail-area probability p: 0.00413 15 | Posterior prob. of a causal effect: 99.58678% 16 | 17 | For more details, type: summary(impact, "report") -------------------------------------------------------------------------------- /notebooks/R/comparison/Pythonplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/R/comparison/Pythonplot.png -------------------------------------------------------------------------------- /notebooks/R/comparison/Rplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/R/comparison/Rplot.png -------------------------------------------------------------------------------- /notebooks/R/comparison/summary.txt.txt: -------------------------------------------------------------------------------- 1 | Posterior inference {CausalImpact} 2 | 3 | Average Cumulative 4 | Actual 78574 1414340 5 | Prediction (s.d.) 79232 (736) 1426171 (13253) 6 | 95% CI [77743, 80651] [1399368, 1451711] 7 | 8 | Absolute effect (s.d.) -657 (736) -11831 (13253) 9 | 95% CI [-2076, 832] [-37371, 14971] 10 | 11 | Relative effect (s.d.) -0.83% (0.93%) -0.83% (0.93%) 12 | 95% CI [-2.6%, 1%] [-2.6%, 1%] 13 | 14 | Posterior tail-area probability p: 0.20061 15 | Posterior prob. of a causal effect: 80% 16 | 17 | For more details, type: summary(impact, "report") 18 | -------------------------------------------------------------------------------- /notebooks/R/comparison_data.csv: -------------------------------------------------------------------------------- 1 | DATE,CHANGED,NOT_CHANGED_1,NOT_CHANGED_2,NOT_CHANGED_3 2 | 2019-04-16,83836.5,85642.5,86137.5,81241.5 3 | 2019-04-17,83887.5,86326.5,85036.5,80877 4 | 2019-04-18,82662,87456,84409.5,80910 5 | 2019-04-19,83271,89551.5,87568.5,82150.5 6 | 2019-04-20,84210,90256.5,86602.5,83083.5 7 | 2019-04-21,79039.5,86418,82476,78921 8 | 2019-04-22,83040,89076,85146,82989 9 | 2019-04-23,84906,90870,86901,83745 10 | 2019-04-24,85089,86722.5,85267.5,82348.5 11 | 2019-04-25,82953,87619.5,83488.5,80907 12 | 2019-04-26,80076,85671,81501,78462 13 | 2019-04-27,80088,85320,80931,79519.5 14 | 2019-04-28,83431.5,88345.5,84702,81304.5 15 | 2019-04-29,85744.5,86787,87007.5,82500 16 | 2019-04-30,84747,85066.5,85149,81642 17 | 2019-05-01,82051.5,85293,85062,80187 18 | 2019-05-02,83794.5,86668.5,84967.5,79845 19 | 2019-05-03,80802,85710,81979.5,78684 20 | 2019-05-04,78313.5,83244,80875.5,78499.5 21 | 2019-05-05,78931.5,84523.5,80277,78462 22 | 2019-05-06,80523,87694.5,82500,80268 23 | 2019-05-07,114019.5,102448.5,121420.5,114240 24 | 2019-05-08,120520.5,107023.5,122865,115831.5 25 | 2019-05-09,87372,88839,86068.5,82033.5 26 | 2019-05-10,90376.5,87847.5,96744,92529 27 | 2019-05-11,85422,86869.5,84742.5,83779.5 28 | 2019-05-12,78696,83637,80853,79390.5 29 | 2019-05-13,80496,84703.5,83292,82272 30 | 2019-05-14,82659,85020,83163,82983 31 | 2019-05-15,81018,82870.5,81750,82171.5 32 | 2019-05-16,79528.5,83644.5,82396.5,82411.5 33 | 2019-05-17,79146,82326,82164,79459.5 34 | 2019-05-18,79645.5,82638,82150.5,79828.5 35 | 2019-05-19,84414,88573.5,86011.5,82750.5 36 | 2019-05-20,82899,89371.5,86311.5,84153 37 | 2019-05-21,82974,84795,86337,85107 38 | 2019-05-22,78043.5,79705.5,83578.5,79143 39 | 2019-05-23,75567,76149,79428,78142.5 40 | 2019-05-24,73971,77277,78478.5,75268.5 41 | 2019-05-25,70807.5,74059.5,73674,72775.5 42 | 2019-05-26,68449.5,75600,72816,69562.5 43 | 2019-05-27,75682.5,80227.5,77728.5,77661 44 | 2019-05-28,74301,79639.5,77962.5,77854.5 45 | 2019-05-29,75523.5,78537,79843.5,78288 46 | 2019-05-30,74298,77182.5,76795.5,76453.5 47 | 2019-05-31,69274.5,72475.5,72121.5,70200 48 | 2019-06-01,68787,74340,72805.5,69526.5 49 | 2019-06-02,71407.5,77667,75006,71412 50 | 2019-06-03,74041.5,79485,77041.5,73351.5 51 | 2019-06-04,74829,79294.5,76557,75373.5 52 | 2019-06-05,73030.5,76639.5,77014.5,74985 53 | 2019-06-06,71533.5,76116,75408,74311.5 54 | 2019-06-07,70441.5,74577,73089,71988 55 | 2019-06-08,71476.5,77071.5,75435,72768 56 | 2019-06-09,72342,78253.5,74493,71745 57 | 2019-06-10,75549,81898.5,78918,77343 58 | 2019-06-11,73360.5,78333,76722,74418 59 | 2019-06-12,75052.5,79086,78048,75807 60 | 2019-06-13,74577,77136,76663.5,74341.5 61 | 2019-06-14,70398,75723,73162.5,70561.5 62 | 2019-06-15,71511,78015,75091.5,72811.5 63 | 2019-06-16,72981,80059.5,75930,73260 64 | 2019-06-17,75700.5,80071.5,78834,76275 65 | 2019-06-18,76126.5,81747,79234.5,77827.5 66 | 2019-06-19,75819,79822.5,77856,76245 67 | 2019-06-20,74436,78069,76314,75525 68 | 2019-06-21,72717,74418,75436.5,73684.5 69 | 2019-06-22,72309,76647,75193.5,74148 70 | 2019-06-23,74296.5,76903.5,75774,75559.5 71 | 2019-06-24,76222.5,78928.5,79677,77593.5 72 | 2019-06-25,77190,80506.5,77746.5,78598.5 73 | 2019-06-26,74283,76129.5,74694,76170 74 | 2019-06-27,74715,78864,76060.5,75702 75 | 2019-06-28,72889.5,76474.5,74448,73899 76 | 2019-06-29,73645.5,78024,75457.5,74944.5 77 | 2019-06-30,74923.5,79464,76024.5,76746 78 | 2019-07-01,75862.5,78504,76608,76771.5 79 | 2019-07-02,76506,77835,76671,76267.5 80 | 2019-07-03,75157.5,76818,75820.5,75765 81 | 2019-07-04,70852.5,74109,71260.5,70942.5 82 | 2019-07-05,68314.5,72430.5,70762.5,68914.5 83 | 2019-07-06,74968.5,78663,76440,76345.5 84 | 2019-07-07,74503.5,77574,75442.5,75067.5 85 | 2019-07-08,77452.5,81088.5,78934.5,80007 86 | 2019-07-09,77817,81534,79449,81091.5 87 | 2019-07-10,77274,81549,79366.5,79345.5 88 | 2019-07-11,77259,80713.5,78501,77644.5 89 | 2019-07-12,76804.5,80631,78795,76782 90 | 2019-07-13,76639.5,79656,78729,76777.5 91 | 2019-07-14,77949,80001,77461.5,76137 92 | 2019-07-15,76608,77917.5,76702.5,78174 93 | 2019-07-16,76554,79224,77437.5,78528 94 | 2019-07-17,74914.5,78832.5,76596,76498.5 95 | 2019-07-18,76384.5,80602.5,79272,78270 96 | 2019-07-19,77904,84619.5,80229,79615.5 97 | 2019-07-20,79125,85050,81780,80104.5 98 | 2019-07-21,78469.5,85371,80848.5,79836 99 | 2019-07-22,80817,83827.5,84658.5,82258.5 100 | 2019-07-23,81544.5,84777,86796,83638.5 101 | 2019-07-24,77050.5,79228.5,80088,79930.5 102 | 2019-07-25,77593.5,78652.5,80061,81315 103 | 2019-07-26,77590.5,77715,80113.5,80227.5 104 | 2019-07-27,77482.5,77965.5,79423.5,77418 105 | 2019-07-28,79911,81244.5,81961.5,82143 106 | 2019-07-29,82662,84888,86212.5,85449 107 | 2019-07-30,84900,85710,89061,85162.5 108 | 2019-07-31,79314,80242.5,83842.5,79912.5 109 | 2019-08-01,75514.5,76407,78832.5,78643.5 -------------------------------------------------------------------------------- /notebooks/R/google/Rplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/R/google/Rplot.png -------------------------------------------------------------------------------- /notebooks/R/google/summary.txt.txt: -------------------------------------------------------------------------------- 1 | Posterior inference {CausalImpact} 2 | 3 | Average Cumulative 4 | Actual 156 4687 5 | Prediction (s.d.) 129 (4.5) 3883 (134.2) 6 | 95% CI [120, 138] [3614, 4148] 7 | 8 | Absolute effect (s.d.) 27 (4.5) 804 (134.2) 9 | 95% CI [18, 36] [539, 1073] 10 | 11 | Relative effect (s.d.) 21% (3.5%) 21% (3.5%) 12 | 95% CI [14%, 28%] [14%, 28%] 13 | 14 | Posterior tail-area probability p: 0.001 15 | Posterior prob. of a causal effect: 99.8996% 16 | 17 | For more details, type: summary(impact, "report") -------------------------------------------------------------------------------- /notebooks/R/google_data.csv: -------------------------------------------------------------------------------- 1 | y,x1,x2,date 2 | 110.0,134.0,128.0,2020-01-01 3 | 125.0,134.0,128.0,2020-01-02 4 | 123.0,134.0,128.0,2020-01-03 5 | 128.0,134.0,128.0,2020-01-04 6 | 114.0,134.0,128.0,2020-01-05 7 | 125.0,133.0,128.0,2020-01-06 8 | 119.0,133.0,128.0,2020-01-07 9 | 121.0,133.0,128.0,2020-01-08 10 | 139.0,133.0,128.0,2020-01-09 11 | 107.0,133.0,128.0,2020-01-10 12 | 115.0,132.0,128.0,2020-01-11 13 | 91.0,132.0,128.0,2020-01-12 14 | 107.0,132.0,128.0,2020-01-13 15 | 124.0,132.0,128.0,2020-01-14 16 | 116.0,131.0,128.0,2020-01-15 17 | 110.0,131.0,128.0,2020-01-16 18 | 100.0,131.0,128.0,2020-01-17 19 | 110.0,131.0,128.0,2020-01-18 20 | 113.0,129.0,128.0,2020-01-19 21 | 103.0,129.0,128.0,2020-01-20 22 | 117.0,129.0,128.0,2020-01-21 23 | 125.0,129.0,128.0,2020-01-22 24 | 115.0,129.0,128.0,2020-01-23 25 | 114.0,128.0,128.0,2020-01-24 26 | 138.0,128.0,128.0,2020-01-25 27 | 117.0,128.0,128.0,2020-01-26 28 | 104.0,128.0,128.0,2020-01-27 29 | 123.0,128.0,128.0,2020-01-28 30 | 122.0,128.0,128.0,2020-01-29 31 | 150.0,128.0,128.0,2020-01-30 32 | 127.0,128.0,128.0,2020-01-31 33 | 139.0,128.0,128.0,2020-02-01 34 | 139.0,127.0,127.0,2020-02-02 35 | 109.0,127.0,127.0,2020-02-03 36 | 107.0,127.0,127.0,2020-02-04 37 | 94.0,127.0,127.0,2020-02-05 38 | 112.0,127.0,127.0,2020-02-06 39 | 107.0,127.0,127.0,2020-02-07 40 | 126.0,127.0,127.0,2020-02-08 41 | 114.0,127.0,127.0,2020-02-09 42 | 129.0,127.0,127.0,2020-02-10 43 | 113.0,126.0,127.0,2020-02-11 44 | 114.0,126.0,127.0,2020-02-12 45 | 116.0,126.0,127.0,2020-02-13 46 | 110.0,125.0,126.0,2020-02-14 47 | 131.0,125.0,126.0,2020-02-15 48 | 109.0,125.0,126.0,2020-02-16 49 | 114.0,125.0,127.0,2020-02-17 50 | 116.0,125.0,126.0,2020-02-18 51 | 113.0,124.0,125.0,2020-02-19 52 | 108.0,124.0,125.0,2020-02-20 53 | 120.0,124.0,125.0,2020-02-21 54 | 106.0,123.0,125.0,2020-02-22 55 | 123.0,123.0,125.0,2020-02-23 56 | 123.0,123.0,124.0,2020-02-24 57 | 135.0,123.0,124.0,2020-02-25 58 | 127.0,123.0,124.0,2020-02-26 59 | 140.0,123.0,123.0,2020-02-27 60 | 139.0,123.0,123.0,2020-02-28 61 | 137.0,123.0,123.0,2020-02-29 62 | 123.0,123.0,123.0,2020-03-01 63 | 160.0,122.0,123.0,2020-03-02 64 | 173.0,122.0,123.0,2020-03-03 65 | 236.0,122.0,123.0,2020-03-04 66 | 233.0,122.0,123.0,2020-03-05 67 | 193.0,122.0,123.0,2020-03-06 68 | 169.0,122.0,123.0,2020-03-07 69 | 167.0,122.0,123.0,2020-03-08 70 | 172.0,121.0,123.0,2020-03-09 71 | 148.0,121.0,123.0,2020-03-10 72 | 125.0,121.0,123.0,2020-03-11 73 | 132.0,121.0,123.0,2020-03-12 74 | 165.0,121.0,123.0,2020-03-13 75 | 154.0,120.0,123.0,2020-03-14 76 | 158.0,120.0,123.0,2020-03-15 77 | 135.0,120.0,123.0,2020-03-16 78 | 145.0,120.0,123.0,2020-03-17 79 | 163.0,119.0,122.0,2020-03-18 80 | 146.0,119.0,122.0,2020-03-19 81 | 120.0,119.0,121.0,2020-03-20 82 | 149.0,118.0,121.0,2020-03-21 83 | 140.0,118.0,121.0,2020-03-22 84 | 150.0,117.0,121.0,2020-03-23 85 | 133.0,117.0,120.0,2020-03-24 86 | 143.0,117.0,120.0,2020-03-25 87 | 145.0,117.0,120.0,2020-03-26 88 | 145.0,117.0,120.0,2020-03-27 89 | 176.0,117.0,120.0,2020-03-28 90 | 134.0,117.0,120.0,2020-03-29 91 | 147.0,117.0,120.0,2020-03-30 92 | 131.0,117.0,120.0,2020-03-31 93 | -------------------------------------------------------------------------------- /notebooks/R/volks/Rplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/R/volks/Rplot.png -------------------------------------------------------------------------------- /notebooks/R/volks/summary.txt.txt: -------------------------------------------------------------------------------- 1 | Posterior inference {CausalImpact} 2 | 3 | Average Cumulative 4 | Actual 127 10026 5 | Prediction (s.d.) 171 (5.6) 13522 (438.5) 6 | 95% CI [159, 182] [12597, 14364] 7 | 8 | Absolute effect (s.d.) -44 (5.6) -3496 (438.5) 9 | 95% CI [-55, -33] [-4338, -2571] 10 | 11 | Relative effect (s.d.) -26% (3.2%) -26% (3.2%) 12 | 95% CI [-32%, -19%] [-32%, -19%] 13 | 14 | Posterior tail-area probability p: 0.00106 15 | Posterior prob. of a causal effect: 99.89418% 16 | 17 | For more details, type: summary(impact, "report") 18 | 19 | -------------------------------------------------------------------------------- /notebooks/R/volks_data.csv: -------------------------------------------------------------------------------- 1 | "Date" "VolksWagen" "BMW" "Allianz" 2 | "2011-01-02" 99.142822 45.039032 60.006882 3 | "2011-01-09" 100.908623 44.75806 63.032661 4 | "2011-01-16" 96.084999 42.297653 64.578583 5 | "2011-01-23" 96.558739 43.360786 66.296272 6 | "2011-01-30" 94.965218 43.170944 69.962891 7 | "2011-02-06" 99.831917 47.947468 70.359276 8 | "2011-02-13" 97.678513 45.988258 71.383286 9 | "2011-02-20" 94.103867 44.575809 67.716675 10 | "2011-02-27" 91.433624 43.877167 69.137062 11 | "2011-03-06" 91.089073 42.753281 65.946129 12 | "2011-03-13" 87.77285 41.9939 62.041691 13 | "2011-03-20" 90.701469 43.634163 65.252441 14 | "2011-03-27" 93.242485 46.489449 66.527496 15 | "2011-04-03" 91.692032 44.454304 68.476425 16 | "2011-04-10" 90.012383 44.135361 68.245209 17 | "2011-04-17" 95.395897 46.322388 67.518471 18 | "2011-04-24" 103.708038 48.349937 70.227142 19 | "2011-05-01" 103.105087 47.704464 67.287254 20 | "2011-05-08" 103.164993 46.975456 67.789459 21 | "2011-05-15" 99.919174 47.279564 66.367401 22 | "2011-05-22" 100.708694 47.062473 65.891068 23 | "2011-05-29" 101.673668 46.744591 64.192894 24 | "2011-06-05" 103.472038 48.334011 63.695862 25 | "2011-06-12" 104.261574 49.760624 64.676117 26 | "2011-06-19" 107.858315 52.04784 63.419735 27 | "2011-06-26" 112.156845 52.544052 67.257912 28 | "2011-07-03" 116.89402 53.117794 65.911789 29 | "2011-07-10" 120.13987 56.343163 61.707737 30 | "2011-07-17" 119.350334 55.498055 64.393089 31 | "2011-07-24" 112.025261 54.133469 62.860573 32 | "2011-07-31" 95.13813 46.891903 54.604343 33 | "2011-08-07" 97.550583 46.008026 52.816418 34 | "2011-08-14" 84.225113 40.549709 47.494049 35 | "2011-08-21" 87.462158 41.906536 45.6647 36 | "2011-08-28" 89.830742 42.038342 46.941788 37 | "2011-09-04" 84.014549 40.076759 42.413284 38 | "2011-09-11" 91.497543 44.333313 46.182434 39 | "2011-09-18" 81.558258 40.456665 40.74271 40 | "2011-09-25" 81.751266 38.739319 48.916103 41 | "2011-10-02" 83.163628 39.456497 52.71286 42 | "2011-10-09" 89.962341 43.759571 53.844982 43 | "2011-10-16" 90.137794 44.341064 54.231571 44 | "2011-10-23" 102.112312 47.923092 57.37252 45 | "2011-10-30" 101.805267 44.860538 52.616215 46 | "2011-11-06" 103.033409 45.535072 52.636929 47 | "2011-11-13" 96.585602 41.518871 49.737591 48 | "2011-11-20" 88.602592 39.014553 45.41618 49 | "2011-11-27" 99.655998 43.116051 54.342022 50 | "2011-12-04" 96.936501 41.712704 54.804535 51 | "2011-12-11" 90.751862 38.758694 50.234612 52 | "2011-12-18" 92.725693 40.704777 52.443638 53 | "2011-12-25" 90.927322 40.131027 51.021591 54 | "2012-01-01" 96.980377 43.06953 51.000874 55 | "2012-01-08" 100.313934 45.317982 53.879513 56 | "2012-01-15" 107.331955 49.303173 57.952396 57 | "2012-01-22" 108.472389 50.086262 59.070721 58 | "2012-01-29" 112.200722 54.257526 61.107155 59 | "2012-02-05" 111.586639 54.172245 60.044075 60 | "2012-02-12" 113.516602 55.808182 61.894123 61 | "2012-02-19" 111.060287 54.521145 62.936508 62 | "2012-02-26" 114.481583 54.745983 62.915806 63 | "2012-03-04" 113.165703 54.660702 61.162388 64 | "2012-03-11" 113.867508 56.614529 64.890106 65 | "2012-03-18" 108.384674 52.644848 62.943413 66 | "2012-03-25" 106.059944 52.280437 61.76297 67 | "2012-04-01" 103.252731 51.77647 59.836967 68 | "2012-04-08" 102.857971 52.334709 56.523426 69 | "2012-04-15" 99.217369 54.148979 58.442539 70 | "2012-04-22" 114.51725 55.924484 58.663429 71 | "2012-04-29" 115.868217 53.489952 57.966206 72 | "2012-05-06" 114.472221 52.412243 55.342983 73 | "2012-05-13" 106.59156 47.535423 54.942265 74 | "2012-05-20" 109.428589 49.711437 55.292458 75 | "2012-05-27" 105.87104 47.1576 51.082954 76 | "2012-06-03" 105.87104 47.237915 52.848465 77 | "2012-06-10" 105.195557 44.941071 54.497242 78 | "2012-06-17" 104.159813 45.350643 55.263275 79 | "2012-06-24" 107.041885 45.720066 57.71455 80 | "2012-07-01" 112.896088 45.310486 56.817204 81 | "2012-07-08" 119.245644 46.394665 57.67807 82 | "2012-07-15" 118.480103 46.804241 58.261711 83 | "2012-07-22" 112.535828 47.502934 58.130394 84 | "2012-07-29" 118.29998 46.675747 60.603569 85 | "2012-08-05" 121.227089 48.105251 63.142399 86 | "2012-08-12" 121.497261 49.358074 63.908409 87 | "2012-08-19" 117.534416 48.225716 63.733322 88 | "2012-08-26" 116.633781 46.306324 63.667656 89 | "2012-09-02" 115.823189 46.603462 67.36647 90 | "2012-09-09" 124.424362 49.791748 70.415985 91 | "2012-09-16" 123.163467 48.016911 69.292488 92 | "2012-09-23" 117.264229 45.704002 67.548866 93 | "2012-09-30" 121.587334 48.980625 67.811493 94 | "2012-10-07" 119.831078 48.506798 67.140312 95 | "2012-10-14" 124.96476 49.799774 69.701027 96 | "2012-10-21" 130.143478 48.964565 68.519165 97 | "2012-10-28" 138.339355 52.201023 69.79586 98 | "2012-11-04" 130.143478 51.582645 68.139801 99 | "2012-11-11" 126.045532 49.783722 67.41024 100 | "2012-11-18" 135.862579 52.827438 71.58326 101 | "2012-11-25" 140.861176 54.76289 72.918327 102 | "2012-12-02" 141.311508 55.445522 74.997551 103 | "2012-12-09" 144.1035 57.605839 75.69062 104 | "2012-12-16" 146.580292 58.38483 76.857887 105 | "2012-12-23" 146.580292 58.569553 76.456635 106 | "2012-12-30" 150.903397 60.874424 78.937103 107 | "2013-01-06" 148.246475 58.802444 77.040276 108 | "2013-01-13" 149.957718 58.890789 75.763565 109 | "2013-01-20" 155.586746 60.914577 76.748459 110 | "2013-01-27" 156.71257 59.814342 77.003807 111 | "2013-02-03" 152.38945 59.027309 74.815163 112 | "2013-02-10" 150.723267 57.541588 74.632774 113 | "2013-02-17" 141.221451 55.244751 76.018921 114 | "2013-02-24" 139.285034 55.574013 75.909477 115 | "2013-03-03" 144.1035 58.344685 81.855301 116 | "2013-03-10" 136.493042 56.216488 81.891777 117 | "2013-03-17" 134.106323 55.887218 79.520744 118 | "2013-03-24" 132.169952 54.056168 77.295616 119 | "2013-03-31" 131.044128 53.253071 76.821426 120 | "2013-04-07" 126.450806 54.128448 78.280518 121 | "2013-04-14" 122.713142 52.072529 75.617661 122 | "2013-04-21" 128.071991 55.108219 80.979828 123 | "2013-04-28" 139.545349 57.919044 84.773491 124 | "2013-05-05" 138.761124 57.694176 85.247704 125 | "2013-05-12" 148.033401 57.766453 90.63298 126 | "2013-05-19" 151.308685 58.932926 88.738472 127 | "2013-05-26" 152.27742 61.269291 90.746658 128 | "2013-06-02" 149.601837 60.030434 88.586922 129 | "2013-06-09" 143.051285 58.226192 86.389297 130 | "2013-06-16" 133.871262 53.511898 79.94799 131 | "2013-06-23" 138.161423 55.856579 85.063148 132 | "2013-06-30" 139.499222 55.041756 83.774879 133 | "2013-07-07" 148.356323 58.633606 87.867012 134 | "2013-07-14" 151.493195 60.055386 89.155266 135 | "2013-07-21" 152.508087 61.660076 88.890045 136 | "2013-07-28" 160.903885 60.263245 90.822433 137 | "2013-08-04" 161.96489 60.113586 90.064629 138 | "2013-08-11" 164.179153 62.799152 87.601784 139 | "2013-08-18" 163.348801 62.691074 86.048294 140 | "2013-08-25" 155.18367 59.273823 82.145615 141 | "2013-09-01" 154.076523 64.603394 84.873703 142 | "2013-09-08" 162.426193 66.432571 85.442047 143 | "2013-09-15" 155.18367 66.906494 88.397469 144 | "2013-09-22" 156.659851 66.598869 89.231049 145 | "2013-09-29" 151.216415 66.249649 88.96582 146 | "2013-10-06" 155.322052 67.130989 90.519318 147 | "2013-10-13" 157.951477 68.203552 93.436852 148 | "2013-10-20" 157.213394 69.558807 93.664185 149 | "2013-10-27" 167.085388 68.744003 93.550514 150 | "2013-11-03" 169.760941 67.987373 94.156761 151 | "2013-11-10" 171.421661 67.804459 97.112175 152 | "2013-11-17" 173.589798 69.650269 96.013367 153 | "2013-11-24" 176.818954 70.315437 96.884834 154 | "2013-12-01" 171.790695 68.295013 94.876671 155 | "2013-12-08" 168.146393 66.34111 93.057945 156 | "2013-12-15" 175.804077 69.833183 97.604752 157 | "2013-12-22" 180.832336 71.022156 99.120346 158 | "2013-12-29" 177.879974 69.824883 96.922729 159 | "2014-01-05" 179.817444 69.134773 97.869972 160 | "2014-01-12" 182.077866 71.678993 101.469521 161 | "2014-01-19" 170.683578 68.153671 94.53566 162 | "2014-01-26" 166.393433 67.197502 93.815758 163 | "2014-02-02" 168.377045 68.012321 95.558685 164 | "2014-02-09" 180.647797 71.504402 98.248871 165 | "2014-02-16" 178.987106 70.855873 97.832092 166 | "2014-02-23" 170.775848 70.00779 98.286766 167 | "2014-03-02" 165.793716 68.06221 94.118858 168 | "2014-03-09" 160.765472 65.900444 91.315002 169 | "2014-03-16" 160.903885 73.49987 90.936111 170 | "2014-03-23" 170.176132 76.684311 93.209511 171 | "2014-03-30" 172.85173 78.03125 93.626297 172 | "2014-04-06" 175.481155 75.004791 90.557205 173 | "2014-04-13" 176.449921 76.59285 90.216202 174 | "2014-04-20" 174.512436 74.464348 91.883347 175 | "2014-04-27" 173.958862 74.023689 94.043083 176 | "2014-05-04" 173.866592 73.267059 91.580223 177 | "2014-05-11" 172.113632 71.861923 96.017281 178 | "2014-05-18" 180.447098 77.002655 96.373344 179 | "2014-05-25" 182.191238 78.896011 98.430565 180 | "2014-06-01" 182.804031 79.572838 98.03495 181 | "2014-06-08" 182.04982 77.756569 97.995392 182 | "2014-06-15" 183.558258 79.298676 97.32283 183 | "2014-06-22" 178.514435 79.127335 96.056847 184 | "2014-06-29" 180.352829 81.517601 99.459183 185 | "2014-07-06" 173.706253 78.878883 101.041664 186 | "2014-07-13" 173.093475 80.19825 102.584595 187 | "2014-07-20" 169.180954 80.789383 101.951607 188 | "2014-07-27" 161.120224 75.07502 97.085457 189 | "2014-08-03" 154.992172 74.012672 96.452461 190 | "2014-08-10" 156.264938 73.884163 99.815239 191 | "2014-08-17" 162.062973 76.300125 101.476852 192 | "2014-08-24" 160.743103 75.906044 102.703285 193 | "2014-08-31" 169.03952 78.861748 105.353935 194 | "2014-09-07" 164.797043 76.617119 105.274826 195 | "2014-09-14" 164.184235 76.240158 109.547531 196 | "2014-09-21" 156.406357 73.370125 101.437302 197 | "2014-09-28" 147.732819 71.639534 99.142693 198 | "2014-10-05" 142.076157 68.135513 97.283264 199 | "2014-10-12" 152.682388 69.02652 95.305153 200 | "2014-10-19" 152.399536 70.500092 97.362389 201 | "2014-10-26" 159.988876 73.09597 100.250427 202 | "2014-11-02" 160.790237 71.391083 103.811012 203 | "2014-11-09" 158.716141 71.159767 103.811012 204 | "2014-11-16" 165.928375 75.529083 107.490303 205 | "2014-11-23" 171.490738 78.776077 109.547531 206 | "2014-11-30" 174.88472 79.118759 109.864029 207 | "2014-12-07" 168.002472 75.374878 106.422119 208 | "2014-12-14" 172.103546 77.054054 109.547531 209 | "2014-12-21" 171.679291 77.816544 110.536575 210 | "2014-12-28" 167.766769 75.400574 108.162857 211 | "2015-01-04" 167.295395 75.529083 107.490303 212 | "2015-01-11" 176.864563 80.181107 112.752068 213 | "2015-01-18" 188.272141 88.071564 116.906075 214 | "2015-01-25" 185.868073 88.585609 115.837898 215 | "2015-02-01" 199.585419 91.541313 118.013817 216 | "2015-02-08" 197.699875 92.355202 117.578651 217 | "2015-02-15" 206.279144 95.182404 115.996162 218 | "2015-02-22" 208.683212 96.810188 118.369881 219 | "2015-03-01" 214.009888 98.780663 119.675438 220 | "2015-03-08" 226.925888 102.764442 123.038208 221 | "2015-03-15" 228.434341 100.022911 127.983482 222 | "2015-03-22" 222.400589 99.466042 126.796608 223 | "2015-03-29" 224.191849 98.480797 129.368149 224 | "2015-04-05" 230.791275 100.536949 134.273849 225 | "2015-04-12" 215.518326 94.582695 127.587868 226 | "2015-04-19" 217.969543 94.625526 123.473404 227 | "2015-04-26" 214.292725 90.898766 120.743614 228 | "2015-05-03" 212.454315 93.811646 119.477631 229 | "2015-05-10" 207.402023 87.386192 121.678383 230 | "2015-05-17" 221.129288 92.337738 125.859909 231 | "2015-05-24" 211.447968 88.725281 118.200684 232 | "2015-05-31" 205.475403 87.747276 118.490486 233 | "2015-06-07" 204.415756 88.038025 117.993675 234 | "2015-06-14" 198.780365 86.117264 116.006409 235 | "2015-06-21" 209.376846 91.192314 121.305763 236 | "2015-06-28" 203.067123 87.139328 117.248444 237 | "2015-07-05" 195.938553 83.879318 121.264374 238 | "2015-07-12" 195.264236 84.214127 127.64016 239 | "2015-07-19" 182.596649 80.742645 126.480927 240 | "2015-07-26" 177.298401 80.443085 123.458633 241 | "2015-08-02" 184.523285 81.835197 126.894943 242 | "2015-08-09" 173.396957 75.905495 126.232506 243 | "2015-08-16" 158.94722 70.601357 115.840805 244 | "2015-08-23" 164.245453 73.077217 117.496841 245 | "2015-08-30" 154.082474 70.768761 115.882202 246 | "2015-09-06" 160.151367 75.359222 116.627434 247 | "2015-09-13" 155.431122 75.517815 113.439522 248 | "2015-09-20" 111.311218 69.508812 114.557373 249 | "2015-09-27" 97.439461 68.909676 115.468185 250 | "2015-10-04" 121.281548 77.359283 121.14016 251 | "2015-10-11" 116.75396 76.425331 122.837624 252 | "2015-10-18" 117.380112 82.196442 129.254822 253 | "2015-10-25" 121.474205 82.258118 131.945908 254 | "2015-11-01" 117.428276 84.610611 130.248459 255 | "2015-11-08" 114.104836 83.297806 130.496872 256 | "2015-11-15" 119.354912 87.37722 133.891769 257 | "2015-11-22" 129.325241 89.430153 138.528717 258 | "2015-11-29" 132.263351 86.496132 134.429993 259 | "2015-12-06" 132.552353 82.91893 131.117874 260 | "2015-12-13" 135.056961 85.209747 134.09877 261 | "2015-12-20" 137.946915 86.830948 134.968201 262 | "2015-12-27" 137.079941 86.02034 135.423615 263 | "2016-01-03" 120.46273 73.517754 125.694313 264 | "2016-01-10" 115.935127 68.742264 123.582832 265 | "2016-01-17" 120.414558 70.2313 122.671997 266 | "2016-01-24" 116.175964 67.543999 123.086021 267 | "2016-01-31" 114.490166 63.799389 114.30896 268 | "2016-02-07" 106.542801 61.905048 106.815331 269 | "2016-02-14" 115.068146 65.050522 110.996864 270 | "2016-02-21" 117.765434 65.103394 114.143356 271 | "2016-02-28" 133.419342 72.80407 116.875832 272 | "2016-03-06" 125.664619 69.359024 119.773918 273 | "2016-03-13" 125.182961 71.878937 120.022324 274 | "2016-03-20" 121.811371 70.11676 118.076469 275 | "2016-03-27" 118.825089 68.724648 116.089218 276 | "2016-04-03" 115.935127 65.46463 115.012772 277 | "2016-04-10" 120.270058 69.050652 122.175201 278 | "2016-04-17" 131.637177 72.936234 128.509598 279 | "2016-04-24" 133.274841 70.927361 122.754822 280 | "2016-05-01" 127.061432 66.689339 115.385391 281 | "2016-05-08" 131.155533 64.99765 126.639931 282 | "2016-05-15" 130.529373 65.961395 126.50219 283 | "2016-05-22" 134.91246 69.144478 134.767303 284 | "2016-05-29" 129.999557 66.485779 131.00209 285 | "2016-06-05" 127.20594 65.041435 123.747154 286 | "2016-06-12" 124.460487 63.449902 119.706436 287 | "2016-06-19" 122.919182 63.164715 116.446304 288 | "2016-06-26" 121.480194 62.364342 116.905479 289 | "2016-07-03" 117.334435 62.962318 112.451492 290 | "2016-07-10" 122.974594 68.684502 117.548317 291 | "2016-07-17" 125.433128 69.457268 115.573875 292 | "2016-07-24" 127.89164 70.883209 117.823822 293 | "2016-07-31" 126.831116 71.683578 116.12487 294 | "2016-08-07" 127.747025 73.256714 123.563484 295 | "2016-08-14" 123.312035 71.131599 118.558502 296 | "2016-08-21" 125.674164 71.048805 119.017677 297 | "2016-08-28" 127.072136 72.281548 124.619583 298 | "2016-09-04" 126.975731 70.772812 125.400177 299 | "2016-09-11" 122.299698 67.543739 119.38501 300 | "2016-09-18" 124.420792 69.540062 125.446098 301 | "2016-09-25" 124.517197 68.822487 121.313538 302 | "2016-10-02" 126.204422 71.214394 124.711426 303 | "2016-10-09" 127.168541 70.69001 126.823624 304 | "2016-10-16" 129.096802 72.336746 128.568466 305 | "2016-10-23" 130.784027 73.753494 130.313324 306 | "2016-10-30" 122.974594 69.420464 125.308342 307 | "2016-11-06" 124.083336 73.698296 138.486603 308 | "2016-11-13" 123.456657 74.517059 138.440704 309 | "2016-11-20" 127.843445 76.163803 138.394775 310 | "2016-11-27" 122.974594 73.431511 136.19075 311 | "2016-12-04" 130.012741 81.665176 144.409943 312 | "2016-12-11" 134.640533 82.796738 143.858932 313 | "2016-12-18" 136.761642 82.759941 144.134445 314 | "2016-12-25" 131.844574 81.646782 144.180359 315 | "2017-01-01" 137.484741 83.265923 147.945587 316 | "2017-01-08" 146.306503 80.782013 147.670074 317 | "2017-01-15" 144.474655 80.174843 146.246643 318 | "2017-01-22" 147.559875 80.41404 148.772095 319 | "2017-01-29" 143.606949 77.304558 145.374207 320 | "2017-02-05" 141.148407 78.261314 144.042618 321 | "2017-02-12" 139.654022 78.22451 149.50676 322 | "2017-02-19" 140.714554 77.506943 150.379196 323 | "2017-02-26" 142.064346 80.036842 153.777069 324 | "2017-03-05" 138.545258 76.853775 155.338257 325 | "2017-03-12" 135.845703 76.255798 156.486191 326 | "2017-03-19" 134.977997 75.897003 156.210693 327 | -------------------------------------------------------------------------------- /notebooks/tfcausal_plot_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/tfcausal_plot_example.png -------------------------------------------------------------------------------- /notebooks/tfcausal_plot_original_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WillianFuks/tfcausalimpact/3688b83e9f52909c4ab0b8c4f2d885bb4402fbfd/notebooks/tfcausal_plot_original_example.png -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore::numpy.ComplexWarning 4 | markers= 5 | slow: marks a test as being slow to run. 6 | -------------------------------------------------------------------------------- /scripts/build_wheels.sh: -------------------------------------------------------------------------------- 1 | docker run -v $(pwd):/tfcausalimpact quay.io/pypa/manylinux1_x86_64 sh -c ''' 2 | yum update 3 | cd /tfcausalimpact 4 | for PY in /opt/python/*/bin/; do 5 | if [[ ($PY != *"27"*) || ($PY != *"39"*)]]; then 6 | "${PY}/pip" install -U pip 7 | "${PY}/pip" install -U setuptools wheel auditwheel 8 | "${PY}/python" setup.py sdist bdist_wheel 9 | fi 10 | done 11 | for whl in dist/*.whl; do 12 | auditwheel repair "$whl" --plat "manylinux2010_x86_64" -w dist/ 13 | rm $whl 14 | done 15 | ''' 16 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | license_file = LICENSE 3 | 4 | [isort] 5 | known_first_party = causalimpact 6 | default_section = THIRDPARTY 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | import os 17 | import sys 18 | 19 | from setuptools import setup 20 | 21 | 22 | here = os.path.abspath(os.path.dirname(__file__)) 23 | 24 | if sys.argv[-1] == 'test-publish': 25 | """ 26 | Publishes to test pypi repository. 27 | """ 28 | if os.path.exists('dist') or os.path.exists('build'): 29 | raise RuntimeError('Please first delete dist/build folders') 30 | os.system('pip install -U twine') 31 | os.system('sh ./scripts/build_wheels.sh') 32 | os.system('twine upload --repository testpypi dist/*') 33 | # os.system('rm -rf build dist .egg *.egg-info') 34 | sys.exit() 35 | 36 | if sys.argv[-1] == 'publish': 37 | """ 38 | Publishes to original pypi repository. 39 | """ 40 | if os.path.exists('dist') or os.path.exists('build'): 41 | raise RuntimeError('Please first delete dist/build folders') 42 | os.system('pip install -U twine') 43 | os.system('sh ./scripts/build_wheels.sh') 44 | os.system('twine upload dist/*') 45 | # os.system('rm -rf build dist .egg *.egg-info') 46 | sys.exit() 47 | 48 | install_requires = [ 49 | 'jinja2', 50 | 'pandas >= 1.3.5, <= 2.2', 51 | 'tensorflow >= 2.10', 52 | 'tensorflow-probability[tf] >= 0.18, <= 0.25', 53 | 'matplotlib', 54 | ] 55 | 56 | tests_require = [ 57 | 'pytest', 58 | 'pytest-cov', 59 | 'mock', 60 | 'pytest-xdist', 61 | 'tox' 62 | ] 63 | setup_requires = [ 64 | 'flake8', 65 | 'isort' 66 | ] 67 | extras_require = { 68 | 'docs': [ 69 | 'ipython', 70 | 'jupyter' 71 | ] 72 | } 73 | 74 | packages = ['causalimpact'] 75 | 76 | _version = {} 77 | _version_path = os.path.join(here, 'causalimpact', '__version__.py') 78 | 79 | with open(_version_path, 'r') as f: 80 | exec(f.read(), _version) 81 | 82 | with open('README.md', 'r') as f: 83 | readme = f.read() 84 | 85 | setup( 86 | name='tfcausalimpact', 87 | version=_version['__version__'], 88 | author='Willian Fuks', 89 | author_email='willian.fuks@gmail.com', 90 | url='https://github.com/WillianFuks/tfcausalimpact', 91 | description= "Python version of Google's Causal Impact model on top of Tensorflow Probability.", 92 | long_description=readme, 93 | long_description_content_type='text/markdown', 94 | packages=packages, 95 | include_package_data=True, 96 | install_requires=install_requires, 97 | tests_require=tests_require, 98 | setup_requires=setup_requires, 99 | extras_require=extras_require, 100 | license='Apache License 2.0', 101 | keywords='causal impact', 102 | classifiers=[ 103 | 'Development Status :: 3 - Alpha', 104 | 'Environment :: Console', 105 | 'Intended Audience :: Developers', 106 | 'Intended Audience :: Education', 107 | 'Intended Audience :: Science/Research', 108 | 'License :: OSI Approved :: Apache Software License', 109 | 'Natural Language :: English', 110 | 'Operating System :: POSIX :: Linux', 111 | 'Operating System :: MacOS', 112 | 'Operating System :: Microsoft :: Windows', 113 | 'Programming Language :: Python :: 3.8', 114 | 'Programming Language :: Python :: 3.9', 115 | 'Programming Language :: Python :: 3.10', 116 | 'Programming Language :: Python :: 3.11', 117 | 'Programming Language :: Python :: 3.12', 118 | 'Programming Language :: Python :: Implementation :: CPython', 119 | 'Topic :: Scientific/Engineering', 120 | ], 121 | project_urls={ 122 | 'Source': 'https://github.com/WillianFuks/tfcausalimpact' 123 | }, 124 | python_requires='>=3, <3.13', 125 | test_suite='tests' 126 | ) 127 | -------------------------------------------------------------------------------- /stubs/tensorflow_probability/__init__.pyi: -------------------------------------------------------------------------------- 1 | from .sts import LocalLevel # noqa: F401 2 | -------------------------------------------------------------------------------- /stubs/tensorflow_probability/sts/__init__.pyi: -------------------------------------------------------------------------------- 1 | class StructuralTimeSeries: 2 | pass 3 | 4 | 5 | class LocalLevel(StructuralTimeSeries): 6 | pass 7 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | mock 3 | pytest-xdist 4 | flake8 5 | isort 6 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """ 17 | General fixtures for tests. 18 | """ 19 | 20 | import os 21 | 22 | import numpy as np 23 | import pandas as pd 24 | import pytest 25 | 26 | 27 | @pytest.fixture 28 | def fix_path(): 29 | p = os.path.dirname(os.path.abspath(__file__)) 30 | p = os.path.join(p, 'fixtures') 31 | return p 32 | 33 | 34 | @pytest.fixture 35 | def rand_data(): 36 | return pd.DataFrame(np.random.randn(200, 3), columns=["y", "x1", "x2"]) 37 | 38 | 39 | @pytest.fixture 40 | def date_rand_data(rand_data): 41 | date_rand_data = rand_data.set_index(pd.date_range( 42 | start='20200101', 43 | periods=len(rand_data)) 44 | ) 45 | return date_rand_data 46 | 47 | 48 | @pytest.fixture 49 | def pre_int_gap_period(): 50 | return [10, 89] 51 | 52 | 53 | @pytest.fixture 54 | def post_int_gap_period(): 55 | return [110, 189] 56 | 57 | 58 | @pytest.fixture 59 | def pre_int_period(): 60 | return [0, 99] 61 | 62 | 63 | @pytest.fixture 64 | def post_int_period(): 65 | return [100, 199] 66 | 67 | 68 | @pytest.fixture 69 | def pre_str_period(): 70 | return ['20200101', '20200410'] 71 | 72 | 73 | @pytest.fixture 74 | def post_str_period(): 75 | return ['20200411', '20200718'] 76 | 77 | 78 | @pytest.fixture 79 | def pre_str_gap_period(): 80 | return ['20200201', '20200401'] 81 | 82 | 83 | @pytest.fixture 84 | def post_str_gap_period(): 85 | return ['20200501', '20200701'] 86 | -------------------------------------------------------------------------------- /tests/fixtures/arma_data.csv: -------------------------------------------------------------------------------- 1 | X,y 2 | 99.79529234051529,118.18869351480215 3 | 100.66318023159378,120.23327608983782 4 | 98.88369907650885,118.62777475265177 5 | 100.44894052679454,119.6097224299191 6 | 101.56173409838762,121.39150827240464 7 | 99.98784514442413,119.94915032720753 8 | 100.103847246762,121.22000675625384 9 | 100.1882836307444,121.20686883438057 10 | 100.59956729994188,120.12999307372677 11 | 100.7068241664386,122.42988909392176 12 | 100.37104760778827,119.91652230322246 13 | 98.3698360438783,118.50080512395002 14 | 101.74213919394167,123.02053579170352 15 | 98.66098760438783,116.82391452006347 16 | 102.5580279912164,122.04714661141381 17 | 98.58420414849682,117.89821805378324 18 | 99.27257895669247,119.34758161129614 19 | 100.28283640183653,120.14600257430331 20 | 101.414472547872,122.36652539391166 21 | 98.28840497107963,116.29710114579274 22 | 101.00069408050668,118.948035649806 23 | 99.5763603379563,118.32480018928234 24 | 103.6302196152701,124.70987064039863 25 | 95.71157482190132,115.55599995776038 26 | 103.28249535724804,123.66442522413078 27 | 97.16987545415017,116.46470835685861 28 | 102.84972565317736,123.52732800531399 29 | 97.9590189802908,116.94427765101415 30 | 101.83782319551362,121.78832342638832 31 | 99.68976891765188,119.61071566435602 32 | 99.56566398901693,118.25465150262693 33 | 99.55974887103085,117.67085873173771 34 | 98.02599436211548,119.26592943914865 35 | 99.91584428554532,120.88802144458363 36 | 99.21498274457745,119.5159194365873 37 | 101.26666082290832,122.07514739781087 38 | 97.59407076768882,118.41960464615387 39 | 102.28516343374062,122.30164255029206 40 | 96.8798404615799,115.9544582740155 41 | 103.14102630018576,124.26802305013551 42 | 94.81365752244913,112.9523979870474 43 | 104.4681652744601,126.68236417198419 44 | 94.43665572511179,113.83195165593062 45 | 104.0362739351402,124.1900910470809 46 | 95.06032320740292,114.2593673630214 47 | 104.73205886035153,125.28674538353751 48 | 96.11913113662101,115.07066438870487 49 | 102.7388954422512,123.26953309506948 50 | 97.86627975216844,118.11985645134362 51 | 103.2700904375472,124.55962088172443 52 | 97.12679529503828,115.79497785160612 53 | 102.83255834444778,124.11715584781372 54 | 97.43883588892592,116.62232999073395 55 | 103.30985929075375,122.29404090314283 56 | 98.34832125210725,118.44497158767757 57 | 100.56724931535842,119.11695933270022 58 | 97.94036917789298,117.16095549198862 59 | 101.8758523385545,123.29693533952079 60 | 99.07009604031653,120.10406961330044 61 | 100.17638923599056,119.96396796699204 62 | 100.70382977013855,120.42836359241525 63 | 99.35652130478655,119.11107856148179 64 | 100.62914018159628,118.91018060181236 65 | 100.1039894309136,122.19349516834825 66 | 100.7593745444178,120.13428197970708 67 | 98.36069405772956,119.47299973810199 68 | 101.45188202735156,121.63170107325712 69 | 96.38907229658645,116.89427375074946 70 | 102.59736609161129,125.03762357178593 71 | 96.44406854096869,116.47931528740291 72 | 101.86772860544485,124.46593391669431 73 | 99.39366694519461,118.59299992373522 74 | 101.26934125462277,122.25057828712784 75 | 99.54759472364866,118.58838293399887 76 | 101.40870819121,120.47659892388366 77 | 98.22907523655066,117.4042593526453 78 | 100.97155806204476,120.24662797737021 79 | 98.2044291361467,117.00648827474254 80 | 100.88980028480714,121.50291564633136 81 | 99.42207528950875,118.74868563027614 82 | 100.571448340363,120.11828313743037 83 | 98.32797702736057,117.62093088012 84 | 102.3215276112661,121.85927623225994 85 | 98.34423475551462,119.76819009242308 86 | 102.50092566604046,124.21092078533083 87 | 99.57404211190962,120.75887526602952 88 | 99.38584385157088,118.28863449538171 89 | 101.40333163232312,121.0492887035768 90 | 98.60542392969717,117.93080796330037 91 | 102.16753261531487,122.31160323843288 92 | 98.23743132665285,117.15062052005919 93 | 103.7557732437024,123.77842321393275 94 | 96.50487587608501,116.64462612397605 95 | 105.14930907394003,126.44606410145911 96 | 95.39523198576826,115.19547272181907 97 | 104.9395443686699,126.8384358842968 98 | 95.67251982188765,113.78612117300449 99 | 103.14620061180939,122.36202469537284 100 | 97.7533891872239,118.6006748666655 101 | 102.17462630429846,122.86182677404162 102 | -------------------------------------------------------------------------------- /tests/fixtures/arma_sparse_reg.csv: -------------------------------------------------------------------------------- 1 | y,X,X2 2 | 118.82779390735897,99.79529234051529,0.41536335472100294 3 | 122.55092466371808,100.66318023159378,0.2644399745754834 4 | 119.87024887789289,98.88369907650885,0.09739316536577858 5 | 121.80875336389143,100.44894052679454,0.4858442218780017 6 | 120.8997027915618,101.56173409838762,0.46466286286095304 7 | 119.35070491809803,99.98784514442413,0.029759316991142204 8 | 119.72891594377818,100.103847246762,0.6942774618573755 9 | 119.9365044569483,100.1882836307444,0.716947112365226 10 | 119.98518368800602,100.59956729994188,0.7298114232911694 11 | 120.1196843212162,100.7068241664386,0.4143510171895457 12 | 121.28403220201997,100.37104760778827,0.015098844794159016 13 | 118.31069646538502,98.3698360438783,0.9089751574841473 14 | 122.81176137162716,101.74213919394167,0.78937871811036 15 | 119.30416776715832,98.66098760438783,0.16519916916317656 16 | 122.048730976199,102.5580279912164,0.312785961326271 17 | 116.88762893939776,98.58420414849682,0.610945305819596 18 | 120.42370259002777,99.27257895669247,0.3644902867188591 19 | 120.59167889108731,100.28283640183653,0.1560385892014995 20 | 122.82484816100373,101.414472547872,0.17730381342455181 21 | 117.37772251842746,98.28840497107963,0.8678896710470461 22 | 121.510195064733,101.00069408050668,0.2900946684444935 23 | 118.91424693278974,99.5763603379563,0.5851796213146614 24 | 123.18762946499795,103.6302196152701,0.45399487590946896 25 | 114.02886981445118,95.71157482190132,0.41117813217253885 26 | 121.29458493827372,103.28249535724804,0.8826344451768418 27 | 116.45086474163317,97.16987545415017,0.692708014794749 28 | 122.66774978075887,102.84972565317736,0.2792733551957505 29 | 117.41821352441917,97.9590189802908,0.06444023118528353 30 | 123.66268753925488,101.83782319551362,0.19862361375884896 31 | 120.23723454581736,99.68976891765188,0.9316827447029558 32 | 118.98501752984414,99.56566398901693,0.8544135677806904 33 | 120.71167852752005,99.55974887103085,0.9547347345609252 34 | 117.49547109430144,98.02599436211548,0.05225334824317296 35 | 121.3290549486127,99.91584428554532,0.579471680590934 36 | 118.21112684216483,99.21498274457745,0.48049626664912937 37 | 122.1232751175363,101.26666082290832,0.02170897897556423 38 | 118.37645717770268,97.59407076768882,0.37362046373439495 39 | 122.48670556432667,102.28516343374062,0.4140918011765592 40 | 115.81012017402341,96.8798404615799,0.6039072339140044 41 | 124.23759824145849,103.14102630018576,0.6717487273552375 42 | 112.81478510259193,94.81365752244913,0.8388657004383164 43 | 123.53729379209987,104.4681652744601,0.779526208318874 44 | 113.94941502663168,94.43665572511179,0.4007010440412312 45 | 125.8664011064698,104.0362739351402,0.794529231389683 46 | 115.17981244965232,95.06032320740292,0.893124310397603 47 | 125.7694077218868,104.73205886035153,0.2624896908588915 48 | 114.99284870719505,96.11913113662101,0.9891970073781059 49 | 123.504631547181,102.7388954422512,0.8533070990157379 50 | 116.54472257210303,97.86627975216844,0.731477156037757 51 | 122.18261457033441,103.2700904375472,0.35556562470635356 52 | 115.49989861447634,97.12679529503828,0.8832894907073365 53 | 124.83567279913285,102.83255834444778,0.8679590908270853 54 | 116.35039568035445,97.43883588892592,0.9557664458336379 55 | 121.55153671981695,103.30985929075375,0.0001072565197369757 56 | 116.95565587422412,98.34832125210725,0.016541041918974653 57 | 120.91807143998835,100.56724931535842,0.31407005725874404 58 | 117.52940038253553,97.94036917789298,0.9953161745927157 59 | 122.31627598710527,101.8758523385545,0.14884422315599055 60 | 117.51659114336434,99.07009604031653,0.16737711870850436 61 | 120.1813870313144,100.17638923599056,0.7585720369179193 62 | 121.78508504556196,100.70382977013855,0.0695296661543231 63 | 118.58538881523907,99.35652130478655,0.7054734385962705 64 | 121.79514746381054,100.62914018159628,0.46916528177909556 65 | 119.04186506142776,100.1039894309136,0.010188216125312843 66 | 121.34046304163596,100.7593745444178,0.774823862866262 67 | 117.7966092007572,98.36069405772956,0.794201008360474 68 | 122.38407624908236,101.45188202735156,0.14956945174586556 69 | 115.3352261992049,96.38907229658645,0.02370363179008894 70 | 124.51091153611854,102.59736609161129,0.7620637708943899 71 | 114.65614030512168,96.44406854096869,0.22367017989334437 72 | 127.04880834457201,101.86772860544485,0.2621743977651142 73 | 123.40121268353182,99.39366694519461,0.4568695027754949 74 | 126.94406150245283,101.26934125462277,0.2499269185070182 75 | 123.24570260171626,99.54759472364866,0.5682835615907468 76 | 126.43158291733643,101.40870819121,0.8469429969102438 77 | 122.29324343380432,98.22907523655066,0.37809953702957055 78 | 124.90544904680416,100.97155806204476,0.43246511643203667 79 | 123.30988975687491,98.2044291361467,0.8326191762741114 80 | 124.99751943423686,100.88980028480714,0.3711319356941939 81 | 125.1107130452493,99.42207528950875,0.040553271926162204 82 | 125.52900250071599,100.571448340363,0.5546714859719869 83 | 125.00396244403287,98.32797702736057,0.4512462428109202 84 | 126.89872870313488,102.3215276112661,0.7253009767267311 85 | 122.03514547426589,98.34423475551462,0.37845052231459564 86 | 127.73389344972205,102.50092566604046,0.8406624957089064 87 | 124.97218835616923,99.57404211190962,0.4693146935836243 88 | 123.86267988849738,99.38584385157088,0.5626434290121737 89 | 127.13387837394157,101.40333163232312,0.661199339245059 90 | 123.7261026690508,98.60542392969717,0.46224186752467133 91 | 127.4494643346309,102.16753261531487,0.6236369454887165 92 | 120.32698353598066,98.23743132665285,0.22188063118536006 93 | 129.66773473344097,103.7557732437024,0.732863116755199 94 | 120.88237611904991,96.50487587608501,0.38168208869599585 95 | 130.88196672266628,105.14930907394003,0.19483454177056447 96 | 118.18000435921215,95.39523198576826,0.27116277479396844 97 | 130.0422732296236,104.9395443686699,0.24922505196281253 98 | 119.61952726016338,95.67251982188765,0.15213906225546658 99 | 128.28188073432665,103.14620061180939,0.7713737042013263 100 | 122.18865406068944,97.7533891872239,0.25541173236397685 101 | 127.25880695767154,102.17462630429846,0.12754340504853445 102 | -------------------------------------------------------------------------------- /tests/fixtures/basque.csv: -------------------------------------------------------------------------------- 1 | "date","Basque","another" 2 | 1955-01-01,3.85318463000527,3.54662963030373 3 | 1956-01-01,3.94565829615088,3.69044556954152 4 | 1957-01-01,4.03356173487263,3.82683499817574 5 | 1958-01-01,4.02342189689665,3.8756783776064 6 | 1959-01-01,4.01378196840523,3.92173673384055 7 | 1960-01-01,4.28591839622273,4.24178820002321 8 | 1961-01-01,4.57433609579741,4.57533547892566 9 | 1962-01-01,4.89895735356304,4.83804641196265 10 | 1963-01-01,5.19701498162913,5.08133409636867 11 | 1964-01-01,5.33890297875272,5.15809787814531 12 | 1965-01-01,5.46515300525185,5.22365052507319 13 | 1966-01-01,5.54591562706414,5.33247650503874 14 | 1967-01-01,5.61489572663949,5.42944892070458 15 | 1968-01-01,5.85218493307158,5.67437885353069 16 | 1969-01-01,6.08140541736959,5.91552394419117 17 | 1970-01-01,6.17009424134957,6.06683787193614 18 | 1971-01-01,6.28363340454625,6.22764920820614 19 | 1972-01-01,6.55555539865284,6.53906012902564 20 | 1973-01-01,6.81076856110308,6.83797505609446 21 | 1974-01-01,7.1051843028108,6.98736082380481 22 | 1975-01-01,7.37789168217563,7.12489302721544 23 | 1976-01-01,7.23293362192275,7.13538981886872 24 | 1977-01-01,7.08983137211913,7.1429590673591 25 | 1978-01-01,6.78670360714461,7.01935154409008 26 | 1979-01-01,6.6398173868571,7.01099691034147 27 | 1980-01-01,6.56283917136956,7.07883467098128 28 | 1981-01-01,6.50078545499277,7.1822335603611 29 | 1982-01-01,6.54505860699956,7.28720365609826 30 | 1983-01-01,6.59532980113941,7.3978863181948 31 | 1984-01-01,6.76149675009149,7.48429002846285 32 | 1985-01-01,6.93716067172772,7.5699798313763 33 | 1986-01-01,7.33219115130052,8.07769173930216 34 | 1987-01-01,7.74278812359415,8.58397583251883 35 | 1988-01-01,8.12053664075889,9.05741234228836 36 | 1989-01-01,8.50971116232416,9.52584975721223 37 | 1990-01-01,8.7767778890741,9.78506175969812 38 | 1991-01-01,9.02527866619582,10.0506998000571 39 | 1992-01-01,8.87389282470633,9.83790310748268 40 | 1993-01-01,8.71822353908928,9.62510728658999 41 | 1994-01-01,9.01813784928637,10.0064270838912 42 | 1995-01-01,9.44087386165337,10.33990288489 43 | 1996-01-01,9.68651813767495,10.5762637502566 44 | 1997-01-01,10.1706658728087,11.0454159442168 45 | -------------------------------------------------------------------------------- /tests/fixtures/btc.csv: -------------------------------------------------------------------------------- 1 | Date,Bitcoin,CARL-B.CO,CRSP,DE,GOOG,JD,KTOS,SQ,TRMB,ZAL.DE 2 | 2020-12-01,18802.998046875,907.9114990234375,129.5800018310547,253.12423706054688,1798.0999755859375,85.36000061035156,21.479999542236328,203.0,60.72999954223633,82.30000305175781 3 | 2020-12-02,19201.091796875,916.7147827148438,131.5399932861328,256.9638366699219,1827.949951171875,84.37999725341797,21.670000076293945,202.0,60.66999816894531,81.22000122070312 4 | 2020-12-03,19445.3984375,914.1715698242188,140.4600067138672,250.69349670410156,1826.77001953125,86.29000091552734,22.299999237060547,205.52999877929688,61.0099983215332,80.05999755859375 5 | 2020-12-04,18699.765625,921.0185546875,147.5399932861328,251.39791870117188,1827.989990234375,85.19000244140625,23.100000381469727,208.14999389648438,63.02000045776367,78.77999877929688 6 | 2020-12-07,19191.630859375,911.0415649414062,149.72000122070312,250.02877807617188,1819.47998046875,83.48999786376953,23.469999313354492,212.5800018310547,63.08000183105469,79.04000091552734 7 | 2020-12-08,18321.14453125,912.8021850585938,146.47000122070312,250.6141357421875,1818.550048828125,82.01000213623047,24.239999771118164,213.00999450683594,63.099998474121094,78.73999786376953 8 | 2020-12-09,18553.916015625,929.2349853515625,146.6300048828125,252.82659912109375,1784.1300048828125,81.81999969482422,23.719999313354492,207.0399932861328,63.2400016784668,79.45999908447266 9 | 2020-12-10,18264.9921875,922.3880004882812,154.25999450683594,251.4574432373047,1775.3299560546875,81.70999908447266,23.579999923706055,217.25,62.380001068115234,78.87999725341797 10 | 2020-12-11,18058.904296875,919.4535522460938,148.41000366210938,253.75921630859375,1781.77001953125,82.61000061035156,23.850000381469727,216.58999633789062,62.84000015258789,79.23999786376953 11 | 2020-12-14,19246.64453125,932.16943359375,150.05999755859375,250.7332000732422,1760.06005859375,79.95999908447266,23.510000228881836,215.86000061035156,64.63999938964844,84.5199966430664 12 | 2020-12-15,19417.076171875,926.6918334960938,149.97000122070312,259.9302978515625,1767.77001953125,79.5,24.43000030517578,219.99000549316406,65.30000305175781,82.5199966430664 13 | 2020-12-16,21310.59765625,928.8436889648438,146.61000061035156,259.067138671875,1763.0,81.7699966430664,23.530000686645508,227.0800018310547,65.62000274658203,85.05999755859375 14 | 2020-12-17,22805.162109375,946.0590209960938,149.1300048828125,262.4503479003906,1747.9000244140625,82.70999908447266,25.09000015258789,230.74000549316406,65.97000122070312,89.68000030517578 15 | 2020-12-18,23137.9609375,942.928955078125,148.61000061035156,268.57183837890625,1731.010009765625,81.52999877929688,25.760000228881836,235.4499969482422,66.1500015258789,91.0999984741211 16 | 2020-12-21,22803.08203125,926.6918334960938,166.83999633789062,265.64501953125,1739.3699951171875,80.63999938964844,26.219999313354492,233.5,66.62999725341797,89.4000015258789 17 | 2020-12-22,23783.029296875,929.8218383789062,170.0800018310547,267.1431579589844,1723.5,82.79000091552734,26.079999923706055,241.5800018310547,66.18000030517578,90.5999984741211 18 | 2020-12-23,23241.345703125,931.9737548828125,173.22999572753906,265.8335266113281,1732.3800048828125,85.45999908447266,26.25,230.67999267578125,66.58000183105469,89.19999694824219 19 | 2020-12-28,27084.80859375,948.6021728515625,168.92999267578125,265.39697265625,1776.0899658203125,81.61000061035156,27.850000381469727,223.33999633789062,66.43000030517578,88.4000015258789 20 | 2020-12-29,27362.4375,962.1005249023438,157.8300018310547,263.2539367675781,1758.719970703125,86.13999938964844,27.469999313354492,214.0,65.7699966430664,91.4000015258789 21 | 2020-12-30,28840.953125,953.8841552734375,161.57000732421875,264.81610107421875,1739.52001953125,89.5199966430664,27.770000457763672,221.0399932861328,66.33000183105469,91.05999755859375 22 | 2021-01-04,31971.9140625,944.4939575195312,158.00999450683594,266.29864501953125,1728.239990234375,86.33000183105469,26.3700008392334,221.16000366210938,66.29000091552734,94.08000183105469 23 | 2021-01-05,33992.4296875,950.9497680664062,160.6199951171875,272.9053649902344,1740.9200439453125,95.5,26.90999984741211,229.75,66.8499984741211,96.5999984741211 24 | 2021-01-06,36824.36328125,926.4962158203125,163.85000610351562,288.9047546386719,1735.2900390625,88.18000030517578,26.690000534057617,226.88999938964844,68.41000366210938,96.0999984741211 25 | 2021-01-07,39371.04296875,920.236083984375,194.42999267578125,298.2278137207031,1787.25,87.91999816894531,26.950000762939453,239.47999572753906,71.83999633789062,96.54000091552734 26 | 2021-01-08,40797.609375,918.6710815429688,193.82000732421875,292.4369812011719,1807.2099609375,91.54000091552734,26.56999969482422,241.4499969482422,72.31999969482422,97.58000183105469 27 | 2021-01-11,35566.65625,894.6087036132812,188.8800048828125,293.4419250488281,1766.719970703125,90.37999725341797,26.360000610351562,225.5,73.08000183105469,95.0 28 | 2021-01-12,33922.9609375,892.0655517578125,190.33999633789062,300.57598876953125,1746.550048828125,88.54000091552734,28.079999923706055,227.52000427246094,71.91000366210938,96.37999725341797 29 | 2021-01-13,37316.359375,901.4557495117188,193.83999633789062,299.7103271484375,1754.4000244140625,90.44000244140625,27.760000228881836,230.9199981689453,70.0199966430664,97.19999694824219 30 | 2021-01-14,39187.328125,900.8688354492188,210.0399932861328,301.3321838378906,1740.1800537109375,89.23999786376953,30.399999618530273,232.7899932861328,69.87000274658203,97.18000030517578 31 | 2021-01-15,36825.3671875,902.6295166015625,199.8800048828125,293.9195251464844,1736.18994140625,87.7699966430664,29.549999237060547,227.75,69.45999908447266,93.23999786376953 32 | 2021-01-19,36069.8046875,904.7814331054688,198.2899932861328,293.51153564453125,1790.8599853515625,91.1500015258789,30.0,227.25999450683594,69.19000244140625,93.69999694824219 33 | 2021-01-20,35547.75,925.7136840820312,189.77000427246094,299.4317626953125,1886.9000244140625,95.30999755859375,29.360000610351562,226.64999389648438,71.41999816894531,95.19999694824219 34 | 2021-01-21,30825.69921875,912.8021850585938,185.10000610351562,303.9688720703125,1891.25,95.0999984741211,29.43000030517578,219.6199951171875,70.30999755859375,98.45999908447266 35 | 2021-01-22,33005.76171875,910.6502685546875,188.49000549316406,303.64056396484375,1901.050048828125,94.91000366210938,29.950000762939453,222.8800048828125,70.76000213623047,98.4800033569336 36 | 2021-01-25,32366.392578125,905.7595825195312,181.24000549316406,296.70550537109375,1899.4000244140625,98.37999725341797,29.969999313354492,216.63999938964844,70.86000061035156,100.25 37 | 2021-01-26,32569.849609375,904.39013671875,169.52999877929688,294.5264587402344,1917.239990234375,96.97000122070312,29.15999984741211,209.1699981689453,69.66999816894531,100.0 38 | 2021-01-27,30432.546875,885.60986328125,163.9499969482422,279.3230285644531,1830.7900390625,90.08999633789062,27.90999984741211,202.4600067138672,66.6500015258789,97.04000091552734 39 | 2021-01-28,31649.60546875,896.7606201171875,167.4199981689453,291.4917297363281,1863.1099853515625,91.41000366210938,27.84000015258789,219.91000366210938,67.62999725341797,96.08000183105469 40 | 2021-01-29,34316.38671875,880.5234985351562,165.6999969482422,287.3525695800781,1835.739990234375,88.69000244140625,26.540000915527344,215.9600067138672,65.91000366210938,94.72000122070312 41 | 2021-02-01,33537.17578125,890.304931640625,165.67999267578125,290.7454833984375,1901.3499755859375,91.2699966430664,27.010000228881836,221.94000244140625,67.72000122070312,95.94000244140625 42 | 2021-02-02,35510.2890625,906.5420532226562,168.14999389648438,301.02374267578125,1927.510009765625,95.41999816894531,27.920000076293945,227.6300048828125,70.80999755859375,95.37999725341797 43 | 2021-02-03,37472.08984375,904.7814331054688,164.5,299.6307678222656,2070.070068359375,95.5,28.6299991607666,227.94000244140625,70.5999984741211,97.81999969482422 44 | 2021-02-04,36926.06640625,911.62841796875,167.7899932861328,307.7200012207031,2062.3701171875,94.62999725341797,29.329999923706055,237.72000122070312,71.43000030517578,97.23999786376953 45 | 2021-02-05,38144.30859375,943.7114868164062,168.80999755859375,307.69012451171875,2098.0,96.63999938964844,29.290000915527344,240.3800048828125,72.2699966430664,98.5 46 | 2021-02-08,46196.46484375,954.4710083007812,169.22999572753906,315.4510803222656,2092.909912109375,94.56999969482422,30.3799991607666,259.9700012207031,73.98999786376953,98.0 47 | 2021-02-09,46481.10546875,949.9716186523438,167.5500030517578,313.7496032714844,2083.510009765625,97.06999969482422,32.77000045776367,258.1199951171875,74.72000122070312,98.4000015258789 48 | -------------------------------------------------------------------------------- /tests/fixtures/comparison_data.csv: -------------------------------------------------------------------------------- 1 | DATE,CHANGED,NOT_CHANGED_1,NOT_CHANGED_2,NOT_CHANGED_3 2 | 2019-04-16,83836.5,85642.5,86137.5,81241.5 3 | 2019-04-17,83887.5,86326.5,85036.5,80877 4 | 2019-04-18,82662,87456,84409.5,80910 5 | 2019-04-19,83271,89551.5,87568.5,82150.5 6 | 2019-04-20,84210,90256.5,86602.5,83083.5 7 | 2019-04-21,79039.5,86418,82476,78921 8 | 2019-04-22,83040,89076,85146,82989 9 | 2019-04-23,84906,90870,86901,83745 10 | 2019-04-24,85089,86722.5,85267.5,82348.5 11 | 2019-04-25,82953,87619.5,83488.5,80907 12 | 2019-04-26,80076,85671,81501,78462 13 | 2019-04-27,80088,85320,80931,79519.5 14 | 2019-04-28,83431.5,88345.5,84702,81304.5 15 | 2019-04-29,85744.5,86787,87007.5,82500 16 | 2019-04-30,84747,85066.5,85149,81642 17 | 2019-05-01,82051.5,85293,85062,80187 18 | 2019-05-02,83794.5,86668.5,84967.5,79845 19 | 2019-05-03,80802,85710,81979.5,78684 20 | 2019-05-04,78313.5,83244,80875.5,78499.5 21 | 2019-05-05,78931.5,84523.5,80277,78462 22 | 2019-05-06,80523,87694.5,82500,80268 23 | 2019-05-07,114019.5,102448.5,121420.5,114240 24 | 2019-05-08,120520.5,107023.5,122865,115831.5 25 | 2019-05-09,87372,88839,86068.5,82033.5 26 | 2019-05-10,90376.5,87847.5,96744,92529 27 | 2019-05-11,85422,86869.5,84742.5,83779.5 28 | 2019-05-12,78696,83637,80853,79390.5 29 | 2019-05-13,80496,84703.5,83292,82272 30 | 2019-05-14,82659,85020,83163,82983 31 | 2019-05-15,81018,82870.5,81750,82171.5 32 | 2019-05-16,79528.5,83644.5,82396.5,82411.5 33 | 2019-05-17,79146,82326,82164,79459.5 34 | 2019-05-18,79645.5,82638,82150.5,79828.5 35 | 2019-05-19,84414,88573.5,86011.5,82750.5 36 | 2019-05-20,82899,89371.5,86311.5,84153 37 | 2019-05-21,82974,84795,86337,85107 38 | 2019-05-22,78043.5,79705.5,83578.5,79143 39 | 2019-05-23,75567,76149,79428,78142.5 40 | 2019-05-24,73971,77277,78478.5,75268.5 41 | 2019-05-25,70807.5,74059.5,73674,72775.5 42 | 2019-05-26,68449.5,75600,72816,69562.5 43 | 2019-05-27,75682.5,80227.5,77728.5,77661 44 | 2019-05-28,74301,79639.5,77962.5,77854.5 45 | 2019-05-29,75523.5,78537,79843.5,78288 46 | 2019-05-30,74298,77182.5,76795.5,76453.5 47 | 2019-05-31,69274.5,72475.5,72121.5,70200 48 | 2019-06-01,68787,74340,72805.5,69526.5 49 | 2019-06-02,71407.5,77667,75006,71412 50 | 2019-06-03,74041.5,79485,77041.5,73351.5 51 | 2019-06-04,74829,79294.5,76557,75373.5 52 | 2019-06-05,73030.5,76639.5,77014.5,74985 53 | 2019-06-06,71533.5,76116,75408,74311.5 54 | 2019-06-07,70441.5,74577,73089,71988 55 | 2019-06-08,71476.5,77071.5,75435,72768 56 | 2019-06-09,72342,78253.5,74493,71745 57 | 2019-06-10,75549,81898.5,78918,77343 58 | 2019-06-11,73360.5,78333,76722,74418 59 | 2019-06-12,75052.5,79086,78048,75807 60 | 2019-06-13,74577,77136,76663.5,74341.5 61 | 2019-06-14,70398,75723,73162.5,70561.5 62 | 2019-06-15,71511,78015,75091.5,72811.5 63 | 2019-06-16,72981,80059.5,75930,73260 64 | 2019-06-17,75700.5,80071.5,78834,76275 65 | 2019-06-18,76126.5,81747,79234.5,77827.5 66 | 2019-06-19,75819,79822.5,77856,76245 67 | 2019-06-20,74436,78069,76314,75525 68 | 2019-06-21,72717,74418,75436.5,73684.5 69 | 2019-06-22,72309,76647,75193.5,74148 70 | 2019-06-23,74296.5,76903.5,75774,75559.5 71 | 2019-06-24,76222.5,78928.5,79677,77593.5 72 | 2019-06-25,77190,80506.5,77746.5,78598.5 73 | 2019-06-26,74283,76129.5,74694,76170 74 | 2019-06-27,74715,78864,76060.5,75702 75 | 2019-06-28,72889.5,76474.5,74448,73899 76 | 2019-06-29,73645.5,78024,75457.5,74944.5 77 | 2019-06-30,74923.5,79464,76024.5,76746 78 | 2019-07-01,75862.5,78504,76608,76771.5 79 | 2019-07-02,76506,77835,76671,76267.5 80 | 2019-07-03,75157.5,76818,75820.5,75765 81 | 2019-07-04,70852.5,74109,71260.5,70942.5 82 | 2019-07-05,68314.5,72430.5,70762.5,68914.5 83 | 2019-07-06,74968.5,78663,76440,76345.5 84 | 2019-07-07,74503.5,77574,75442.5,75067.5 85 | 2019-07-08,77452.5,81088.5,78934.5,80007 86 | 2019-07-09,77817,81534,79449,81091.5 87 | 2019-07-10,77274,81549,79366.5,79345.5 88 | 2019-07-11,77259,80713.5,78501,77644.5 89 | 2019-07-12,76804.5,80631,78795,76782 90 | 2019-07-13,76639.5,79656,78729,76777.5 91 | 2019-07-14,77949,80001,77461.5,76137 92 | 2019-07-15,76608,77917.5,76702.5,78174 93 | 2019-07-16,76554,79224,77437.5,78528 94 | 2019-07-17,74914.5,78832.5,76596,76498.5 95 | 2019-07-18,76384.5,80602.5,79272,78270 96 | 2019-07-19,77904,84619.5,80229,79615.5 97 | 2019-07-20,79125,85050,81780,80104.5 98 | 2019-07-21,78469.5,85371,80848.5,79836 99 | 2019-07-22,80817,83827.5,84658.5,82258.5 100 | 2019-07-23,81544.5,84777,86796,83638.5 101 | 2019-07-24,77050.5,79228.5,80088,79930.5 102 | 2019-07-25,77593.5,78652.5,80061,81315 103 | 2019-07-26,77590.5,77715,80113.5,80227.5 104 | 2019-07-27,77482.5,77965.5,79423.5,77418 105 | 2019-07-28,79911,81244.5,81961.5,82143 106 | 2019-07-29,82662,84888,86212.5,85449 107 | 2019-07-30,84900,85710,89061,85162.5 108 | 2019-07-31,79314,80242.5,83842.5,79912.5 109 | 2019-08-01,75514.5,76407,78832.5,78643.5 -------------------------------------------------------------------------------- /tests/fixtures/google_data.csv: -------------------------------------------------------------------------------- 1 | y,x1,x2,date 2 | 110.0,134.0,128.0,2020-01-01 3 | 125.0,134.0,128.0,2020-01-02 4 | 123.0,134.0,128.0,2020-01-03 5 | 128.0,134.0,128.0,2020-01-04 6 | 114.0,134.0,128.0,2020-01-05 7 | 125.0,133.0,128.0,2020-01-06 8 | 119.0,133.0,128.0,2020-01-07 9 | 121.0,133.0,128.0,2020-01-08 10 | 139.0,133.0,128.0,2020-01-09 11 | 107.0,133.0,128.0,2020-01-10 12 | 115.0,132.0,128.0,2020-01-11 13 | 91.0,132.0,128.0,2020-01-12 14 | 107.0,132.0,128.0,2020-01-13 15 | 124.0,132.0,128.0,2020-01-14 16 | 116.0,131.0,128.0,2020-01-15 17 | 110.0,131.0,128.0,2020-01-16 18 | 100.0,131.0,128.0,2020-01-17 19 | 110.0,131.0,128.0,2020-01-18 20 | 113.0,129.0,128.0,2020-01-19 21 | 103.0,129.0,128.0,2020-01-20 22 | 117.0,129.0,128.0,2020-01-21 23 | 125.0,129.0,128.0,2020-01-22 24 | 115.0,129.0,128.0,2020-01-23 25 | 114.0,128.0,128.0,2020-01-24 26 | 138.0,128.0,128.0,2020-01-25 27 | 117.0,128.0,128.0,2020-01-26 28 | 104.0,128.0,128.0,2020-01-27 29 | 123.0,128.0,128.0,2020-01-28 30 | 122.0,128.0,128.0,2020-01-29 31 | 150.0,128.0,128.0,2020-01-30 32 | 127.0,128.0,128.0,2020-01-31 33 | 139.0,128.0,128.0,2020-02-01 34 | 139.0,127.0,127.0,2020-02-02 35 | 109.0,127.0,127.0,2020-02-03 36 | 107.0,127.0,127.0,2020-02-04 37 | 94.0,127.0,127.0,2020-02-05 38 | 112.0,127.0,127.0,2020-02-06 39 | 107.0,127.0,127.0,2020-02-07 40 | 126.0,127.0,127.0,2020-02-08 41 | 114.0,127.0,127.0,2020-02-09 42 | 129.0,127.0,127.0,2020-02-10 43 | 113.0,126.0,127.0,2020-02-11 44 | 114.0,126.0,127.0,2020-02-12 45 | 116.0,126.0,127.0,2020-02-13 46 | 110.0,125.0,126.0,2020-02-14 47 | 131.0,125.0,126.0,2020-02-15 48 | 109.0,125.0,126.0,2020-02-16 49 | 114.0,125.0,127.0,2020-02-17 50 | 116.0,125.0,126.0,2020-02-18 51 | 113.0,124.0,125.0,2020-02-19 52 | 108.0,124.0,125.0,2020-02-20 53 | 120.0,124.0,125.0,2020-02-21 54 | 106.0,123.0,125.0,2020-02-22 55 | 123.0,123.0,125.0,2020-02-23 56 | 123.0,123.0,124.0,2020-02-24 57 | 135.0,123.0,124.0,2020-02-25 58 | 127.0,123.0,124.0,2020-02-26 59 | 140.0,123.0,123.0,2020-02-27 60 | 139.0,123.0,123.0,2020-02-28 61 | 137.0,123.0,123.0,2020-02-29 62 | 123.0,123.0,123.0,2020-03-01 63 | 160.0,122.0,123.0,2020-03-02 64 | 173.0,122.0,123.0,2020-03-03 65 | 236.0,122.0,123.0,2020-03-04 66 | 233.0,122.0,123.0,2020-03-05 67 | 193.0,122.0,123.0,2020-03-06 68 | 169.0,122.0,123.0,2020-03-07 69 | 167.0,122.0,123.0,2020-03-08 70 | 172.0,121.0,123.0,2020-03-09 71 | 148.0,121.0,123.0,2020-03-10 72 | 125.0,121.0,123.0,2020-03-11 73 | 132.0,121.0,123.0,2020-03-12 74 | 165.0,121.0,123.0,2020-03-13 75 | 154.0,120.0,123.0,2020-03-14 76 | 158.0,120.0,123.0,2020-03-15 77 | 135.0,120.0,123.0,2020-03-16 78 | 145.0,120.0,123.0,2020-03-17 79 | 163.0,119.0,122.0,2020-03-18 80 | 146.0,119.0,122.0,2020-03-19 81 | 120.0,119.0,121.0,2020-03-20 82 | 149.0,118.0,121.0,2020-03-21 83 | 140.0,118.0,121.0,2020-03-22 84 | 150.0,117.0,121.0,2020-03-23 85 | 133.0,117.0,120.0,2020-03-24 86 | 143.0,117.0,120.0,2020-03-25 87 | 145.0,117.0,120.0,2020-03-26 88 | 145.0,117.0,120.0,2020-03-27 89 | 176.0,117.0,120.0,2020-03-28 90 | 134.0,117.0,120.0,2020-03-29 91 | 147.0,117.0,120.0,2020-03-30 92 | 131.0,117.0,120.0,2020-03-31 93 | -------------------------------------------------------------------------------- /tests/fixtures/season_data.csv: -------------------------------------------------------------------------------- 1 | date,revenue 2 | 20180804,37.41788180644533 3 | 20180805,103.13541901229372 4 | 20180806,47.493488770788694 5 | 20180807,100.92308599961706 6 | 20180808,114.22017005623331 7 | 20180809,65.4023626914725 8 | 20180810,123.32388907859631 9 | 20180811,166.34851923595792 10 | 20180812,63.874886182662294 11 | 20180813,57.77096292523924 12 | 20180814,65.59935159064743 13 | 20180815,638.8743488727109 14 | 20180816,79.72252015637939 15 | 20180817,55.15896784272935 16 | 20180818,28.603129835812982 17 | 20180819,43.552822710196956 18 | 20180820,57.90434953108399 19 | 20180821,51.72057710708568 20 | 20180822,48.03136943531721 21 | 20180823,77.4227710279723 22 | 20180824,47.16257335095286 23 | 20180825,34.37128324786863 24 | 20180826,38.63772317711256 25 | 20180827,123.54626039077412 26 | 20180828,155.12034380052248 27 | 20180829,35.44648863641337 28 | 20180830,32.0939464824336 29 | 20180831,181.47081879401037 30 | 20180901,41.142131852403026 31 | 20180902,151.78434727967888 32 | 20180903,546.0237853547194 33 | 20180904,311.5649470945726 34 | 20180905,2416.235745577139 35 | 20180906,33.64904115359729 36 | 20180907,44.106061949003774 37 | 20180908,24.7040114361683 38 | 20180909,48.620920597507414 39 | 20180910,199.08370643418732 40 | 20180911,135.21965134049336 41 | 20180912,39.415059837022795 42 | 20180913,118.3226265249833 43 | 20180914,34.16097982877402 44 | 20180915,72.56302665408269 45 | 20180916,524.9481973221193 46 | 20180917,198.65500494545162 47 | 20180918,153.63929615722103 48 | 20180919,39.95272091362296 49 | 20180920,55.07987818558617 50 | 20180921,134.14869721933707 51 | 20180922,1847.0300273123391 52 | 20180923,23.648384747331917 53 | 20180924,43.1728750774353 54 | 20180925,279.4272322630841 55 | 20180926,160.29214353713294 56 | 20180927,42.890970185462535 57 | 20180928,28.07325969694612 58 | 20180929,1045.888967307241 59 | 20180930,49.17677292361876 60 | 20181001,321.4879455196856 61 | 20181002,33.6546143232996 62 | 20181003,42.312195862698466 63 | 20181004,186.8148416835036 64 | 20181005,9196.870130770758 65 | 20181006,70.47179596911984 66 | 20181007,149.18349010503977 67 | 20181008,58.63616828685715 68 | 20181009,141.03604857651976 69 | 20181010,55.63306851863446 70 | 20181011,95.03879834325183 71 | 20181012,45.87466040215373 72 | 20181013,23.145652724571327 73 | 20181014,291.86165407457366 74 | 20181015,519.6434744323568 75 | 20181016,49.42162373625022 76 | 20181017,54.40342664478441 77 | 20181018,52.81919735475719 78 | 20181019,379.643587862287 79 | 20181020,141.67632189570685 80 | 20181021,322.2984849976911 81 | 20181022,60.80530315734212 82 | 20181023,32.60531440631044 83 | 20181024,58.293840134780105 84 | 20181025,53.66043189118586 85 | 20181026,46.45196372019156 86 | 20181027,34.44296779087776 87 | 20181028,29.544380893633356 88 | 20181029,303.03270800216603 89 | 20181030,48.757678651114105 90 | 20181031,42.229521988671735 91 | 20181101,44.09241735266612 92 | 20181102,38.72997419048667 93 | 20181103,61.275141153676934 94 | 20181104,261.85558002679113 95 | 20181105,35.42001222100643 96 | 20181106,49.95873406243922 97 | 20181107,47.928138736480626 98 | 20181108,103.49488522133697 99 | 20181109,61.77781737838664 100 | 20181110,196.9410172206741 101 | 20181111,31.577849273488425 102 | -------------------------------------------------------------------------------- /tests/fixtures/test_output_summary_1: -------------------------------------------------------------------------------- 1 | Posterior Inference {Causal Impact} 2 | Average Cumulative 3 | Actual 5.12 10.46 4 | Prediction (s.d.) 4.23 (0.95) 9.57 (0.34) 5 | 90% CI [3.12, 6.23] [8.46, 9.57] 6 | 7 | Absolute effect (s.d.) 3.12 (1.18) 10.46 (1.49) 8 | 90% CI [2.23, 6.12] [4.57, 9.46] 9 | 10 | Relative effect (s.d.) 23.45% (3.38%) 56.78% (3.38%) 11 | 90% CI [12.34%, 23.45%] [45.67%, 56.78%] 12 | 13 | Posterior tail-area probability p: 0.46 14 | Posterior prob. of a causal effect: 54.07% 15 | 16 | For more details run the command: print(impact.summary('report')) 17 | -------------------------------------------------------------------------------- /tests/fixtures/test_output_summary_single_digit: -------------------------------------------------------------------------------- 1 | Posterior Inference {Causal Impact} 2 | Average Cumulative 3 | Actual 5.1 10.5 4 | Prediction (s.d.) 4.2 (0.9) 9.6 (0.3) 5 | 90% CI [3.1, 6.2] [8.5, 9.6] 6 | 7 | Absolute effect (s.d.) 3.1 (1.2) 10.5 (1.5) 8 | 90% CI [2.2, 6.1] [4.6, 9.5] 9 | 10 | Relative effect (s.d.) 23.4% (3.4%) 56.8% (3.4%) 11 | 90% CI [12.3%, 23.4%] [45.7%, 56.8%] 12 | 13 | Posterior tail-area probability p: 0.5 14 | Posterior prob. of a causal effect: 54.1% 15 | 16 | For more details run the command: print(impact.summary('report')) 17 | -------------------------------------------------------------------------------- /tests/fixtures/test_report_summary_1: -------------------------------------------------------------------------------- 1 | Analysis report {CausalImpact} 2 | 3 | 4 | During the post-intervention period, the response variable had 5 | an average value of approx. 5.12. In the absence of an 6 | intervention, we would have expected an average response of 4.23. 7 | The 90% interval of this counterfactual prediction is [3.12, 6.23]. 8 | Subtracting this prediction from the observed response yields 9 | an estimate of the causal effect the intervention had on the 10 | response variable. This effect is 3.12 with a 90% interval of 11 | [2.23, 6.12]. For a discussion of the significance of this effect, 12 | see below. 13 | 14 | 15 | Summing up the individual data points during the post-intervention 16 | period (which can only sometimes be meaningfully interpreted), the 17 | response variable had an overall value of 10.46. 18 | Had the intervention not taken place, we would have expected 19 | a sum of 9.57. The 90% interval of this prediction is [8.46, 9.57]. 20 | 21 | 22 | The above results are given in terms of absolute numbers. In relative 23 | terms, the response variable showed an increase of +41.0%. The 90% 24 | interval of this percentage is [-30.0%, 30.0%]. 25 | 26 | 27 | This means that, although the intervention appears to have caused a 28 | positive effect, this effect is not statistically significant when 29 | considering the entire post-intervention period as a whole. Individual 30 | days or shorter stretches within the intervention period may of course 31 | still have had a significant effect, as indicated whenever the lower 32 | limit of the impact time series (lower plot) was above zero. 33 | 34 | 35 | The apparent effect could be the result of random fluctuations that 36 | are unrelated to the intervention. This is often the case when the 37 | intervention period is very long and includes much of the time when 38 | the effect has already worn off. It can also be the case when the 39 | intervention period is too short to distinguish the signal from the 40 | noise. Finally, failing to find a significant effect can happen when 41 | there are not enough control variables or when these variables do not 42 | correlate well with the response variable during the learning period. 43 | 44 | 45 | The probability of obtaining this effect by chance is p = 50.0%. 46 | This means the effect may be spurious and would generally not be 47 | considered statistically significant. 48 | -------------------------------------------------------------------------------- /tests/fixtures/test_report_summary_2: -------------------------------------------------------------------------------- 1 | Analysis report {CausalImpact} 2 | 3 | 4 | During the post-intervention period, the response variable had 5 | an average value of approx. 5.12. By contrast, in the absence of an 6 | intervention, we would have expected an average response of 4.23. 7 | The 90% interval of this counterfactual prediction is [3.12, 6.23]. 8 | Subtracting this prediction from the observed response yields 9 | an estimate of the causal effect the intervention had on the 10 | response variable. This effect is 3.12 with a 90% interval of 11 | [2.23, 6.12]. For a discussion of the significance of this effect, 12 | see below. 13 | 14 | 15 | Summing up the individual data points during the post-intervention 16 | period (which can only sometimes be meaningfully interpreted), the 17 | response variable had an overall value of 10.46. 18 | By contrast, had the intervention not taken place, we would have expected 19 | a sum of 9.57. The 90% interval of this prediction is [8.46, 9.57]. 20 | 21 | 22 | The above results are given in terms of absolute numbers. In relative 23 | terms, the response variable showed an increase of +41.0%. The 90% 24 | interval of this percentage is [23.4%, 43.4%]. 25 | 26 | 27 | This means that the positive effect observed during the intervention 28 | period is statistically significant and unlikely to be due to random 29 | fluctuations. It should be noted, however, that the question of whether 30 | this increase also bears substantive significance can only be answered 31 | by comparing the absolute effect (3.12) to the original goal 32 | of the underlying intervention. 33 | 34 | 35 | The probability of obtaining this effect by chance is very small 36 | (Bayesian one-sided tail-area probability p = 0.05). 37 | This means the causal effect can be considered statistically 38 | significant. 39 | -------------------------------------------------------------------------------- /tests/fixtures/test_report_summary_3: -------------------------------------------------------------------------------- 1 | Analysis report {CausalImpact} 2 | 3 | 4 | During the post-intervention period, the response variable had 5 | an average value of approx. 5.12. In the absence of an 6 | intervention, we would have expected an average response of 4.23. 7 | The 90% interval of this counterfactual prediction is [3.12, 6.23]. 8 | Subtracting this prediction from the observed response yields 9 | an estimate of the causal effect the intervention had on the 10 | response variable. This effect is 3.12 with a 90% interval of 11 | [2.23, 6.12]. For a discussion of the significance of this effect, 12 | see below. 13 | 14 | 15 | Summing up the individual data points during the post-intervention 16 | period (which can only sometimes be meaningfully interpreted), the 17 | response variable had an overall value of 10.46. 18 | Had the intervention not taken place, we would have expected 19 | a sum of 9.57. The 90% interval of this prediction is [8.46, 9.57]. 20 | 21 | 22 | The above results are given in terms of absolute numbers. In relative 23 | terms, the response variable showed a decrease of -34.3%. The 90% 24 | interval of this percentage is [-43.4%, 23.4%]. 25 | This means that, although it may look as though the intervention has 26 | exerted a negative effect on the response variable when considering 27 | the intervention period as a whole, this effect is not statistically 28 | significant and so cannot be meaningfully interpreted. 29 | 30 | 31 | The apparent effect could be the result of random fluctuations that 32 | are unrelated to the intervention. This is often the case when the 33 | intervention period is very long and includes much of the time when 34 | the effect has already worn off. It can also be the case when the 35 | intervention period is too short to distinguish the signal from the 36 | noise. Finally, failing to find a significant effect can happen when 37 | there are not enough control variables or when these variables do not 38 | correlate well with the response variable during the learning period. 39 | 40 | 41 | The probability of obtaining this effect by chance is p = 50.0%. 42 | This means the effect may be spurious and would generally not be 43 | considered statistically significant. 44 | -------------------------------------------------------------------------------- /tests/fixtures/test_report_summary_4: -------------------------------------------------------------------------------- 1 | Analysis report {CausalImpact} 2 | 3 | 4 | During the post-intervention period, the response variable had 5 | an average value of approx. 5.12. By contrast, in the absence of an 6 | intervention, we would have expected an average response of 4.23. 7 | The 90% interval of this counterfactual prediction is [3.12, 6.23]. 8 | Subtracting this prediction from the observed response yields 9 | an estimate of the causal effect the intervention had on the 10 | response variable. This effect is 3.12 with a 90% interval of 11 | [2.23, 6.12]. For a discussion of the significance of this effect, 12 | see below. 13 | 14 | 15 | Summing up the individual data points during the post-intervention 16 | period (which can only sometimes be meaningfully interpreted), the 17 | response variable had an overall value of 10.46. 18 | By contrast, had the intervention not taken place, we would have expected 19 | a sum of 9.57. The 90% interval of this prediction is [8.46, 9.57]. 20 | 21 | 22 | The above results are given in terms of absolute numbers. In relative 23 | terms, the response variable showed a decrease of -34.3%. The 90% 24 | interval of this percentage is [-43.4%, -23.4%]. 25 | 26 | 27 | This means that the negative effect observed during the intervention 28 | period is statistically significant. 29 | If the experimenter had expected a positive effect, it is recommended 30 | to double-check whether anomalies in the control variables may have 31 | caused an overly optimistic expectation of what should have happened 32 | in the response variable in the absence of the intervention. 33 | 34 | 35 | The probability of obtaining this effect by chance is very small 36 | (Bayesian one-sided tail-area probability p = 0.05). 37 | This means the causal effect can be considered statistically 38 | significant. 39 | -------------------------------------------------------------------------------- /tests/fixtures/test_report_summary_single_digit: -------------------------------------------------------------------------------- 1 | Analysis report {CausalImpact} 2 | 3 | 4 | During the post-intervention period, the response variable had 5 | an average value of approx. 5.1. In the absence of an 6 | intervention, we would have expected an average response of 4.2. 7 | The 90% interval of this counterfactual prediction is [3.1, 6.2]. 8 | Subtracting this prediction from the observed response yields 9 | an estimate of the causal effect the intervention had on the 10 | response variable. This effect is 3.1 with a 90% interval of 11 | [2.2, 6.1]. For a discussion of the significance of this effect, 12 | see below. 13 | 14 | 15 | Summing up the individual data points during the post-intervention 16 | period (which can only sometimes be meaningfully interpreted), the 17 | response variable had an overall value of 10.5. 18 | Had the intervention not taken place, we would have expected 19 | a sum of 9.6. The 90% interval of this prediction is [8.5, 9.6]. 20 | 21 | 22 | The above results are given in terms of absolute numbers. In relative 23 | terms, the response variable showed an increase of +41.0%. The 90% 24 | interval of this percentage is [-30.0%, 30.0%]. 25 | 26 | 27 | This means that, although the intervention appears to have caused a 28 | positive effect, this effect is not statistically significant when 29 | considering the entire post-intervention period as a whole. Individual 30 | days or shorter stretches within the intervention period may of course 31 | still have had a significant effect, as indicated whenever the lower 32 | limit of the impact time series (lower plot) was above zero. 33 | 34 | 35 | The apparent effect could be the result of random fluctuations that 36 | are unrelated to the intervention. This is often the case when the 37 | intervention period is very long and includes much of the time when 38 | the effect has already worn off. It can also be the case when the 39 | intervention period is too short to distinguish the signal from the 40 | noise. Finally, failing to find a significant effect can happen when 41 | there are not enough control variables or when these variables do not 42 | correlate well with the response variable during the learning period. 43 | 44 | 45 | The probability of obtaining this effect by chance is p = 50.0%. 46 | This means the effect may be spurious and would generally not be 47 | considered statistically significant. 48 | -------------------------------------------------------------------------------- /tests/fixtures/volks_data.csv: -------------------------------------------------------------------------------- 1 | "Date" "VolksWagen" "BMW" "Allianz" 2 | "2011-01-02" 99.142822 45.039032 60.006882 3 | "2011-01-09" 100.908623 44.75806 63.032661 4 | "2011-01-16" 96.084999 42.297653 64.578583 5 | "2011-01-23" 96.558739 43.360786 66.296272 6 | "2011-01-30" 94.965218 43.170944 69.962891 7 | "2011-02-06" 99.831917 47.947468 70.359276 8 | "2011-02-13" 97.678513 45.988258 71.383286 9 | "2011-02-20" 94.103867 44.575809 67.716675 10 | "2011-02-27" 91.433624 43.877167 69.137062 11 | "2011-03-06" 91.089073 42.753281 65.946129 12 | "2011-03-13" 87.77285 41.9939 62.041691 13 | "2011-03-20" 90.701469 43.634163 65.252441 14 | "2011-03-27" 93.242485 46.489449 66.527496 15 | "2011-04-03" 91.692032 44.454304 68.476425 16 | "2011-04-10" 90.012383 44.135361 68.245209 17 | "2011-04-17" 95.395897 46.322388 67.518471 18 | "2011-04-24" 103.708038 48.349937 70.227142 19 | "2011-05-01" 103.105087 47.704464 67.287254 20 | "2011-05-08" 103.164993 46.975456 67.789459 21 | "2011-05-15" 99.919174 47.279564 66.367401 22 | "2011-05-22" 100.708694 47.062473 65.891068 23 | "2011-05-29" 101.673668 46.744591 64.192894 24 | "2011-06-05" 103.472038 48.334011 63.695862 25 | "2011-06-12" 104.261574 49.760624 64.676117 26 | "2011-06-19" 107.858315 52.04784 63.419735 27 | "2011-06-26" 112.156845 52.544052 67.257912 28 | "2011-07-03" 116.89402 53.117794 65.911789 29 | "2011-07-10" 120.13987 56.343163 61.707737 30 | "2011-07-17" 119.350334 55.498055 64.393089 31 | "2011-07-24" 112.025261 54.133469 62.860573 32 | "2011-07-31" 95.13813 46.891903 54.604343 33 | "2011-08-07" 97.550583 46.008026 52.816418 34 | "2011-08-14" 84.225113 40.549709 47.494049 35 | "2011-08-21" 87.462158 41.906536 45.6647 36 | "2011-08-28" 89.830742 42.038342 46.941788 37 | "2011-09-04" 84.014549 40.076759 42.413284 38 | "2011-09-11" 91.497543 44.333313 46.182434 39 | "2011-09-18" 81.558258 40.456665 40.74271 40 | "2011-09-25" 81.751266 38.739319 48.916103 41 | "2011-10-02" 83.163628 39.456497 52.71286 42 | "2011-10-09" 89.962341 43.759571 53.844982 43 | "2011-10-16" 90.137794 44.341064 54.231571 44 | "2011-10-23" 102.112312 47.923092 57.37252 45 | "2011-10-30" 101.805267 44.860538 52.616215 46 | "2011-11-06" 103.033409 45.535072 52.636929 47 | "2011-11-13" 96.585602 41.518871 49.737591 48 | "2011-11-20" 88.602592 39.014553 45.41618 49 | "2011-11-27" 99.655998 43.116051 54.342022 50 | "2011-12-04" 96.936501 41.712704 54.804535 51 | "2011-12-11" 90.751862 38.758694 50.234612 52 | "2011-12-18" 92.725693 40.704777 52.443638 53 | "2011-12-25" 90.927322 40.131027 51.021591 54 | "2012-01-01" 96.980377 43.06953 51.000874 55 | "2012-01-08" 100.313934 45.317982 53.879513 56 | "2012-01-15" 107.331955 49.303173 57.952396 57 | "2012-01-22" 108.472389 50.086262 59.070721 58 | "2012-01-29" 112.200722 54.257526 61.107155 59 | "2012-02-05" 111.586639 54.172245 60.044075 60 | "2012-02-12" 113.516602 55.808182 61.894123 61 | "2012-02-19" 111.060287 54.521145 62.936508 62 | "2012-02-26" 114.481583 54.745983 62.915806 63 | "2012-03-04" 113.165703 54.660702 61.162388 64 | "2012-03-11" 113.867508 56.614529 64.890106 65 | "2012-03-18" 108.384674 52.644848 62.943413 66 | "2012-03-25" 106.059944 52.280437 61.76297 67 | "2012-04-01" 103.252731 51.77647 59.836967 68 | "2012-04-08" 102.857971 52.334709 56.523426 69 | "2012-04-15" 99.217369 54.148979 58.442539 70 | "2012-04-22" 114.51725 55.924484 58.663429 71 | "2012-04-29" 115.868217 53.489952 57.966206 72 | "2012-05-06" 114.472221 52.412243 55.342983 73 | "2012-05-13" 106.59156 47.535423 54.942265 74 | "2012-05-20" 109.428589 49.711437 55.292458 75 | "2012-05-27" 105.87104 47.1576 51.082954 76 | "2012-06-03" 105.87104 47.237915 52.848465 77 | "2012-06-10" 105.195557 44.941071 54.497242 78 | "2012-06-17" 104.159813 45.350643 55.263275 79 | "2012-06-24" 107.041885 45.720066 57.71455 80 | "2012-07-01" 112.896088 45.310486 56.817204 81 | "2012-07-08" 119.245644 46.394665 57.67807 82 | "2012-07-15" 118.480103 46.804241 58.261711 83 | "2012-07-22" 112.535828 47.502934 58.130394 84 | "2012-07-29" 118.29998 46.675747 60.603569 85 | "2012-08-05" 121.227089 48.105251 63.142399 86 | "2012-08-12" 121.497261 49.358074 63.908409 87 | "2012-08-19" 117.534416 48.225716 63.733322 88 | "2012-08-26" 116.633781 46.306324 63.667656 89 | "2012-09-02" 115.823189 46.603462 67.36647 90 | "2012-09-09" 124.424362 49.791748 70.415985 91 | "2012-09-16" 123.163467 48.016911 69.292488 92 | "2012-09-23" 117.264229 45.704002 67.548866 93 | "2012-09-30" 121.587334 48.980625 67.811493 94 | "2012-10-07" 119.831078 48.506798 67.140312 95 | "2012-10-14" 124.96476 49.799774 69.701027 96 | "2012-10-21" 130.143478 48.964565 68.519165 97 | "2012-10-28" 138.339355 52.201023 69.79586 98 | "2012-11-04" 130.143478 51.582645 68.139801 99 | "2012-11-11" 126.045532 49.783722 67.41024 100 | "2012-11-18" 135.862579 52.827438 71.58326 101 | "2012-11-25" 140.861176 54.76289 72.918327 102 | "2012-12-02" 141.311508 55.445522 74.997551 103 | "2012-12-09" 144.1035 57.605839 75.69062 104 | "2012-12-16" 146.580292 58.38483 76.857887 105 | "2012-12-23" 146.580292 58.569553 76.456635 106 | "2012-12-30" 150.903397 60.874424 78.937103 107 | "2013-01-06" 148.246475 58.802444 77.040276 108 | "2013-01-13" 149.957718 58.890789 75.763565 109 | "2013-01-20" 155.586746 60.914577 76.748459 110 | "2013-01-27" 156.71257 59.814342 77.003807 111 | "2013-02-03" 152.38945 59.027309 74.815163 112 | "2013-02-10" 150.723267 57.541588 74.632774 113 | "2013-02-17" 141.221451 55.244751 76.018921 114 | "2013-02-24" 139.285034 55.574013 75.909477 115 | "2013-03-03" 144.1035 58.344685 81.855301 116 | "2013-03-10" 136.493042 56.216488 81.891777 117 | "2013-03-17" 134.106323 55.887218 79.520744 118 | "2013-03-24" 132.169952 54.056168 77.295616 119 | "2013-03-31" 131.044128 53.253071 76.821426 120 | "2013-04-07" 126.450806 54.128448 78.280518 121 | "2013-04-14" 122.713142 52.072529 75.617661 122 | "2013-04-21" 128.071991 55.108219 80.979828 123 | "2013-04-28" 139.545349 57.919044 84.773491 124 | "2013-05-05" 138.761124 57.694176 85.247704 125 | "2013-05-12" 148.033401 57.766453 90.63298 126 | "2013-05-19" 151.308685 58.932926 88.738472 127 | "2013-05-26" 152.27742 61.269291 90.746658 128 | "2013-06-02" 149.601837 60.030434 88.586922 129 | "2013-06-09" 143.051285 58.226192 86.389297 130 | "2013-06-16" 133.871262 53.511898 79.94799 131 | "2013-06-23" 138.161423 55.856579 85.063148 132 | "2013-06-30" 139.499222 55.041756 83.774879 133 | "2013-07-07" 148.356323 58.633606 87.867012 134 | "2013-07-14" 151.493195 60.055386 89.155266 135 | "2013-07-21" 152.508087 61.660076 88.890045 136 | "2013-07-28" 160.903885 60.263245 90.822433 137 | "2013-08-04" 161.96489 60.113586 90.064629 138 | "2013-08-11" 164.179153 62.799152 87.601784 139 | "2013-08-18" 163.348801 62.691074 86.048294 140 | "2013-08-25" 155.18367 59.273823 82.145615 141 | "2013-09-01" 154.076523 64.603394 84.873703 142 | "2013-09-08" 162.426193 66.432571 85.442047 143 | "2013-09-15" 155.18367 66.906494 88.397469 144 | "2013-09-22" 156.659851 66.598869 89.231049 145 | "2013-09-29" 151.216415 66.249649 88.96582 146 | "2013-10-06" 155.322052 67.130989 90.519318 147 | "2013-10-13" 157.951477 68.203552 93.436852 148 | "2013-10-20" 157.213394 69.558807 93.664185 149 | "2013-10-27" 167.085388 68.744003 93.550514 150 | "2013-11-03" 169.760941 67.987373 94.156761 151 | "2013-11-10" 171.421661 67.804459 97.112175 152 | "2013-11-17" 173.589798 69.650269 96.013367 153 | "2013-11-24" 176.818954 70.315437 96.884834 154 | "2013-12-01" 171.790695 68.295013 94.876671 155 | "2013-12-08" 168.146393 66.34111 93.057945 156 | "2013-12-15" 175.804077 69.833183 97.604752 157 | "2013-12-22" 180.832336 71.022156 99.120346 158 | "2013-12-29" 177.879974 69.824883 96.922729 159 | "2014-01-05" 179.817444 69.134773 97.869972 160 | "2014-01-12" 182.077866 71.678993 101.469521 161 | "2014-01-19" 170.683578 68.153671 94.53566 162 | "2014-01-26" 166.393433 67.197502 93.815758 163 | "2014-02-02" 168.377045 68.012321 95.558685 164 | "2014-02-09" 180.647797 71.504402 98.248871 165 | "2014-02-16" 178.987106 70.855873 97.832092 166 | "2014-02-23" 170.775848 70.00779 98.286766 167 | "2014-03-02" 165.793716 68.06221 94.118858 168 | "2014-03-09" 160.765472 65.900444 91.315002 169 | "2014-03-16" 160.903885 73.49987 90.936111 170 | "2014-03-23" 170.176132 76.684311 93.209511 171 | "2014-03-30" 172.85173 78.03125 93.626297 172 | "2014-04-06" 175.481155 75.004791 90.557205 173 | "2014-04-13" 176.449921 76.59285 90.216202 174 | "2014-04-20" 174.512436 74.464348 91.883347 175 | "2014-04-27" 173.958862 74.023689 94.043083 176 | "2014-05-04" 173.866592 73.267059 91.580223 177 | "2014-05-11" 172.113632 71.861923 96.017281 178 | "2014-05-18" 180.447098 77.002655 96.373344 179 | "2014-05-25" 182.191238 78.896011 98.430565 180 | "2014-06-01" 182.804031 79.572838 98.03495 181 | "2014-06-08" 182.04982 77.756569 97.995392 182 | "2014-06-15" 183.558258 79.298676 97.32283 183 | "2014-06-22" 178.514435 79.127335 96.056847 184 | "2014-06-29" 180.352829 81.517601 99.459183 185 | "2014-07-06" 173.706253 78.878883 101.041664 186 | "2014-07-13" 173.093475 80.19825 102.584595 187 | "2014-07-20" 169.180954 80.789383 101.951607 188 | "2014-07-27" 161.120224 75.07502 97.085457 189 | "2014-08-03" 154.992172 74.012672 96.452461 190 | "2014-08-10" 156.264938 73.884163 99.815239 191 | "2014-08-17" 162.062973 76.300125 101.476852 192 | "2014-08-24" 160.743103 75.906044 102.703285 193 | "2014-08-31" 169.03952 78.861748 105.353935 194 | "2014-09-07" 164.797043 76.617119 105.274826 195 | "2014-09-14" 164.184235 76.240158 109.547531 196 | "2014-09-21" 156.406357 73.370125 101.437302 197 | "2014-09-28" 147.732819 71.639534 99.142693 198 | "2014-10-05" 142.076157 68.135513 97.283264 199 | "2014-10-12" 152.682388 69.02652 95.305153 200 | "2014-10-19" 152.399536 70.500092 97.362389 201 | "2014-10-26" 159.988876 73.09597 100.250427 202 | "2014-11-02" 160.790237 71.391083 103.811012 203 | "2014-11-09" 158.716141 71.159767 103.811012 204 | "2014-11-16" 165.928375 75.529083 107.490303 205 | "2014-11-23" 171.490738 78.776077 109.547531 206 | "2014-11-30" 174.88472 79.118759 109.864029 207 | "2014-12-07" 168.002472 75.374878 106.422119 208 | "2014-12-14" 172.103546 77.054054 109.547531 209 | "2014-12-21" 171.679291 77.816544 110.536575 210 | "2014-12-28" 167.766769 75.400574 108.162857 211 | "2015-01-04" 167.295395 75.529083 107.490303 212 | "2015-01-11" 176.864563 80.181107 112.752068 213 | "2015-01-18" 188.272141 88.071564 116.906075 214 | "2015-01-25" 185.868073 88.585609 115.837898 215 | "2015-02-01" 199.585419 91.541313 118.013817 216 | "2015-02-08" 197.699875 92.355202 117.578651 217 | "2015-02-15" 206.279144 95.182404 115.996162 218 | "2015-02-22" 208.683212 96.810188 118.369881 219 | "2015-03-01" 214.009888 98.780663 119.675438 220 | "2015-03-08" 226.925888 102.764442 123.038208 221 | "2015-03-15" 228.434341 100.022911 127.983482 222 | "2015-03-22" 222.400589 99.466042 126.796608 223 | "2015-03-29" 224.191849 98.480797 129.368149 224 | "2015-04-05" 230.791275 100.536949 134.273849 225 | "2015-04-12" 215.518326 94.582695 127.587868 226 | "2015-04-19" 217.969543 94.625526 123.473404 227 | "2015-04-26" 214.292725 90.898766 120.743614 228 | "2015-05-03" 212.454315 93.811646 119.477631 229 | "2015-05-10" 207.402023 87.386192 121.678383 230 | "2015-05-17" 221.129288 92.337738 125.859909 231 | "2015-05-24" 211.447968 88.725281 118.200684 232 | "2015-05-31" 205.475403 87.747276 118.490486 233 | "2015-06-07" 204.415756 88.038025 117.993675 234 | "2015-06-14" 198.780365 86.117264 116.006409 235 | "2015-06-21" 209.376846 91.192314 121.305763 236 | "2015-06-28" 203.067123 87.139328 117.248444 237 | "2015-07-05" 195.938553 83.879318 121.264374 238 | "2015-07-12" 195.264236 84.214127 127.64016 239 | "2015-07-19" 182.596649 80.742645 126.480927 240 | "2015-07-26" 177.298401 80.443085 123.458633 241 | "2015-08-02" 184.523285 81.835197 126.894943 242 | "2015-08-09" 173.396957 75.905495 126.232506 243 | "2015-08-16" 158.94722 70.601357 115.840805 244 | "2015-08-23" 164.245453 73.077217 117.496841 245 | "2015-08-30" 154.082474 70.768761 115.882202 246 | "2015-09-06" 160.151367 75.359222 116.627434 247 | "2015-09-13" 155.431122 75.517815 113.439522 248 | "2015-09-20" 111.311218 69.508812 114.557373 249 | "2015-09-27" 97.439461 68.909676 115.468185 250 | "2015-10-04" 121.281548 77.359283 121.14016 251 | "2015-10-11" 116.75396 76.425331 122.837624 252 | "2015-10-18" 117.380112 82.196442 129.254822 253 | "2015-10-25" 121.474205 82.258118 131.945908 254 | "2015-11-01" 117.428276 84.610611 130.248459 255 | "2015-11-08" 114.104836 83.297806 130.496872 256 | "2015-11-15" 119.354912 87.37722 133.891769 257 | "2015-11-22" 129.325241 89.430153 138.528717 258 | "2015-11-29" 132.263351 86.496132 134.429993 259 | "2015-12-06" 132.552353 82.91893 131.117874 260 | "2015-12-13" 135.056961 85.209747 134.09877 261 | "2015-12-20" 137.946915 86.830948 134.968201 262 | "2015-12-27" 137.079941 86.02034 135.423615 263 | "2016-01-03" 120.46273 73.517754 125.694313 264 | "2016-01-10" 115.935127 68.742264 123.582832 265 | "2016-01-17" 120.414558 70.2313 122.671997 266 | "2016-01-24" 116.175964 67.543999 123.086021 267 | "2016-01-31" 114.490166 63.799389 114.30896 268 | "2016-02-07" 106.542801 61.905048 106.815331 269 | "2016-02-14" 115.068146 65.050522 110.996864 270 | "2016-02-21" 117.765434 65.103394 114.143356 271 | "2016-02-28" 133.419342 72.80407 116.875832 272 | "2016-03-06" 125.664619 69.359024 119.773918 273 | "2016-03-13" 125.182961 71.878937 120.022324 274 | "2016-03-20" 121.811371 70.11676 118.076469 275 | "2016-03-27" 118.825089 68.724648 116.089218 276 | "2016-04-03" 115.935127 65.46463 115.012772 277 | "2016-04-10" 120.270058 69.050652 122.175201 278 | "2016-04-17" 131.637177 72.936234 128.509598 279 | "2016-04-24" 133.274841 70.927361 122.754822 280 | "2016-05-01" 127.061432 66.689339 115.385391 281 | "2016-05-08" 131.155533 64.99765 126.639931 282 | "2016-05-15" 130.529373 65.961395 126.50219 283 | "2016-05-22" 134.91246 69.144478 134.767303 284 | "2016-05-29" 129.999557 66.485779 131.00209 285 | "2016-06-05" 127.20594 65.041435 123.747154 286 | "2016-06-12" 124.460487 63.449902 119.706436 287 | "2016-06-19" 122.919182 63.164715 116.446304 288 | "2016-06-26" 121.480194 62.364342 116.905479 289 | "2016-07-03" 117.334435 62.962318 112.451492 290 | "2016-07-10" 122.974594 68.684502 117.548317 291 | "2016-07-17" 125.433128 69.457268 115.573875 292 | "2016-07-24" 127.89164 70.883209 117.823822 293 | "2016-07-31" 126.831116 71.683578 116.12487 294 | "2016-08-07" 127.747025 73.256714 123.563484 295 | "2016-08-14" 123.312035 71.131599 118.558502 296 | "2016-08-21" 125.674164 71.048805 119.017677 297 | "2016-08-28" 127.072136 72.281548 124.619583 298 | "2016-09-04" 126.975731 70.772812 125.400177 299 | "2016-09-11" 122.299698 67.543739 119.38501 300 | "2016-09-18" 124.420792 69.540062 125.446098 301 | "2016-09-25" 124.517197 68.822487 121.313538 302 | "2016-10-02" 126.204422 71.214394 124.711426 303 | "2016-10-09" 127.168541 70.69001 126.823624 304 | "2016-10-16" 129.096802 72.336746 128.568466 305 | "2016-10-23" 130.784027 73.753494 130.313324 306 | "2016-10-30" 122.974594 69.420464 125.308342 307 | "2016-11-06" 124.083336 73.698296 138.486603 308 | "2016-11-13" 123.456657 74.517059 138.440704 309 | "2016-11-20" 127.843445 76.163803 138.394775 310 | "2016-11-27" 122.974594 73.431511 136.19075 311 | "2016-12-04" 130.012741 81.665176 144.409943 312 | "2016-12-11" 134.640533 82.796738 143.858932 313 | "2016-12-18" 136.761642 82.759941 144.134445 314 | "2016-12-25" 131.844574 81.646782 144.180359 315 | "2017-01-01" 137.484741 83.265923 147.945587 316 | "2017-01-08" 146.306503 80.782013 147.670074 317 | "2017-01-15" 144.474655 80.174843 146.246643 318 | "2017-01-22" 147.559875 80.41404 148.772095 319 | "2017-01-29" 143.606949 77.304558 145.374207 320 | "2017-02-05" 141.148407 78.261314 144.042618 321 | "2017-02-12" 139.654022 78.22451 149.50676 322 | "2017-02-19" 140.714554 77.506943 150.379196 323 | "2017-02-26" 142.064346 80.036842 153.777069 324 | "2017-03-05" 138.545258 76.853775 155.338257 325 | "2017-03-12" 135.845703 76.255798 156.486191 326 | "2017-03-19" 134.977997 75.897003 156.210693 327 | -------------------------------------------------------------------------------- /tests/test_inferences.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | import numpy as np 17 | import pandas as pd 18 | import tensorflow as tf 19 | 20 | import causalimpact.inferences as inferrer 21 | from causalimpact.misc import maybe_unstandardize 22 | 23 | 24 | def test_get_lower_upper_percentiles(): 25 | lower_percen, upper_percen = inferrer.get_lower_upper_percentiles(0.05) 26 | assert [lower_percen, upper_percen] == [2.5, 97.5] 27 | 28 | 29 | def test_maybe_unstandardize(): 30 | data = pd.DataFrame(np.arange(0, 10)) 31 | results = inferrer.maybe_unstandardize(data) 32 | pd.testing.assert_frame_equal(results, data) 33 | 34 | mu_sig = (1, 2) 35 | results = inferrer.maybe_unstandardize(data, mu_sig) 36 | pd.testing.assert_frame_equal(results, data * 2 + 1) 37 | 38 | 39 | def test_build_cum_index(): 40 | data = pd.DataFrame(np.arange(0, 10)) 41 | pre_data = data.iloc[:4] 42 | post_data = data.iloc[6:] 43 | new_index = inferrer.build_cum_index(pre_data.index, post_data.index) 44 | np.testing.assert_equal(new_index, np.array([3, 6, 7, 8, 9])) 45 | 46 | 47 | def test_compile_posterior_inferences(): 48 | data = pd.DataFrame(np.arange(10)) 49 | pre_data = data.iloc[:3] 50 | post_data = data.iloc[7:] 51 | one_step_mean = 3 52 | one_step_stddev = 1.5 53 | posterior_mean = 7.5 54 | posterior_stddev = 1.5 55 | alpha = 0.05 56 | mu = 1 57 | sig = 2 58 | mu_sig = (mu, sig) 59 | niter = 10 60 | mask = np.array([True] * len(post_data)) 61 | 62 | class OneStepDist: 63 | def sample(self, niter): 64 | tmp = tf.convert_to_tensor( 65 | np.tile(np.arange(start=3.1, stop=6.1, step=1), (niter, 1)) + 66 | np.arange(niter).reshape(-1, 1), 67 | dtype=np.float32 68 | ) 69 | tmp = tmp[..., tf.newaxis] 70 | return tmp 71 | 72 | def mean(self): 73 | return np.ones((len(pre_data), 1)) * one_step_mean 74 | 75 | def stddev(self): 76 | return np.ones((len(pre_data), 1)) * one_step_stddev 77 | 78 | class PosteriorDist: 79 | def sample(self, niter): 80 | tmp = tf.convert_to_tensor( 81 | np.tile(np.arange(start=7.1, stop=10.1, step=1), (niter, 1)) + 82 | np.arange(niter).reshape(-1, 1), 83 | dtype=np.float32 84 | ) 85 | tmp = tmp[..., tf.newaxis] 86 | return tmp 87 | 88 | def mean(self): 89 | return np.ones((len(post_data), 1)) * posterior_mean 90 | 91 | def stddev(self): 92 | return np.ones((len(post_data), 1)) * posterior_stddev 93 | 94 | one_step_dist = OneStepDist() 95 | posterior_dist = PosteriorDist() 96 | inferences = inferrer.compile_posterior_inferences( 97 | data.index, 98 | mask, 99 | pre_data, 100 | post_data, 101 | one_step_dist, 102 | posterior_dist, 103 | mu_sig, 104 | alpha=alpha, 105 | niter=niter 106 | ) 107 | 108 | # by forcing the expected_index to be of type RangeIndex we avoid mismatching 109 | # types on Windows OS. 110 | expected_index = pd.RangeIndex(start=0, stop=6, step=1) 111 | 112 | # test complete_preds_means 113 | expec_complete_preds_means = pd.DataFrame( 114 | data=np.array([7, 7, 7, 16, 16, 16]), 115 | index=expected_index, 116 | dtype=np.float64, 117 | columns=['complete_preds_means'] 118 | ) 119 | pd.testing.assert_series_equal( 120 | expec_complete_preds_means['complete_preds_means'], 121 | inferences['complete_preds_means'] 122 | ) 123 | simulated_pre_ys = maybe_unstandardize( 124 | np.squeeze(one_step_dist.sample(niter)), 125 | mu_sig 126 | ) 127 | simulated_post_ys = maybe_unstandardize( 128 | np.squeeze(posterior_dist.sample(niter)), 129 | mu_sig 130 | ) 131 | lower_percen, upper_percen = inferrer.get_lower_upper_percentiles(alpha) 132 | # test complete_preds_lower 133 | pre_preds_lower, pre_preds_upper = np.percentile( 134 | simulated_pre_ys, 135 | [lower_percen, upper_percen], 136 | axis=0 137 | ) 138 | post_preds_lower, post_preds_upper = np.percentile( 139 | simulated_post_ys, 140 | [lower_percen, upper_percen], 141 | axis=0 142 | ) 143 | expec_complete_preds_lower = np.concatenate([pre_preds_lower, post_preds_lower]) 144 | expec_complete_preds_lower = pd.DataFrame( 145 | data=expec_complete_preds_lower, 146 | index=expected_index, 147 | dtype=np.float64, 148 | columns=['complete_preds_lower'] 149 | ) 150 | pd.testing.assert_series_equal( 151 | expec_complete_preds_lower['complete_preds_lower'], 152 | inferences['complete_preds_lower'] 153 | ) 154 | # test complete_preds_upper 155 | expec_complete_preds_upper = np.concatenate([pre_preds_upper, post_preds_upper]) 156 | expec_complete_preds_upper = pd.DataFrame( 157 | data=expec_complete_preds_upper, 158 | index=expected_index, 159 | dtype=np.float64, 160 | columns=['complete_preds_upper'] 161 | ) 162 | pd.testing.assert_series_equal( 163 | expec_complete_preds_upper['complete_preds_upper'], 164 | inferences['complete_preds_upper'] 165 | ) 166 | # test pre and post_preds_means 167 | expec_post_preds_means = pd.DataFrame( 168 | data=np.array([np.nan] * 3 + [posterior_mean * sig + mu] * len(pre_data)), 169 | index=expected_index, 170 | dtype=np.float64, 171 | columns=['post_preds_means'] 172 | ) 173 | pd.testing.assert_series_equal( 174 | expec_post_preds_means['post_preds_means'], 175 | inferences['post_preds_means'] 176 | ) 177 | # test post_preds_lower 178 | expec_post_preds_lower = pd.DataFrame( 179 | data=np.concatenate([[np.nan] * len(pre_data), post_preds_lower]), 180 | index=expected_index, 181 | dtype=np.float64, 182 | columns=['post_preds_lower'] 183 | ) 184 | pd.testing.assert_series_equal( 185 | expec_post_preds_lower['post_preds_lower'], 186 | inferences['post_preds_lower'] 187 | ) 188 | # test post_preds_upper 189 | expec_post_preds_upper = pd.DataFrame( 190 | data=np.concatenate([[np.nan] * len(pre_data), post_preds_upper]), 191 | index=expected_index, 192 | dtype=np.float64, 193 | columns=['post_preds_upper'] 194 | ) 195 | pd.testing.assert_series_equal( 196 | expec_post_preds_upper['post_preds_upper'], 197 | inferences['post_preds_upper'] 198 | ) 199 | # test post_cum_Y 200 | post_cum_y = np.concatenate([[np.nan] * (len(pre_data) - 1) + [0], 201 | np.cumsum(post_data.iloc[:, 0])]) 202 | expec_post_cum_y = pd.DataFrame( 203 | data=post_cum_y, 204 | index=expected_index, 205 | dtype=np.float64, 206 | columns=['post_cum_y'] 207 | ) 208 | pd.testing.assert_series_equal( 209 | expec_post_cum_y['post_cum_y'], 210 | inferences['post_cum_y'] 211 | ) 212 | # test post_cum_preds_means 213 | expec_post_cum_preds_means = np.cumsum(expec_post_preds_means) 214 | expec_post_cum_preds_means.rename( 215 | columns={'post_preds_means': 'post_cum_preds_means'}, 216 | inplace=True 217 | ) 218 | expec_post_cum_preds_means['post_cum_preds_means'][len(pre_data) - 1] = 0 219 | pd.testing.assert_series_equal( 220 | expec_post_cum_preds_means['post_cum_preds_means'], 221 | inferences['post_cum_preds_means'] 222 | ) 223 | # test post_cum_preds_lower 224 | post_cum_preds_lower, post_cum_preds_upper = np.percentile( 225 | np.cumsum(simulated_post_ys, axis=1), 226 | [100 * alpha / 2, 100 - 100 * alpha / 2], 227 | axis=0 228 | ) 229 | post_cum_preds_lower = np.concatenate( 230 | [np.array([np.nan] * (len(pre_data) - 1) + [0]), 231 | post_cum_preds_lower] 232 | ) 233 | expec_post_cum_preds_lower = pd.DataFrame( 234 | data=post_cum_preds_lower, 235 | index=expected_index, 236 | dtype=np.float64, 237 | columns=['post_cum_preds_lower'] 238 | ) 239 | pd.testing.assert_series_equal( 240 | expec_post_cum_preds_lower['post_cum_preds_lower'], 241 | inferences['post_cum_preds_lower'] 242 | ) 243 | # test post_cum_preds_upper 244 | post_cum_preds_upper = np.concatenate( 245 | [np.array([np.nan] * (len(pre_data) - 1) + [0]), 246 | post_cum_preds_upper] 247 | ) 248 | expec_post_cum_preds_upper = pd.DataFrame( 249 | data=post_cum_preds_upper, 250 | index=expected_index, 251 | dtype=np.float64, 252 | columns=['post_cum_preds_upper'] 253 | ) 254 | pd.testing.assert_series_equal( 255 | expec_post_cum_preds_upper['post_cum_preds_upper'], 256 | inferences['post_cum_preds_upper'] 257 | ) 258 | # test point_effects_means 259 | net_data = pd.concat([pre_data, post_data]) 260 | net_data.set_index(pd.RangeIndex(start=0, stop=6), inplace=True) 261 | expec_point_effects_means = net_data.iloc[:, 0] - inferences['complete_preds_means'] 262 | expec_point_effects_means = pd.DataFrame( 263 | data=expec_point_effects_means, 264 | index=expected_index, 265 | dtype=np.float64, 266 | columns=['point_effects_means'] 267 | ) 268 | pd.testing.assert_series_equal( 269 | expec_point_effects_means['point_effects_means'], 270 | inferences['point_effects_means'] 271 | ) 272 | # test point_effects_lower 273 | expec_point_effects_lower = net_data.iloc[:, 0] - inferences['complete_preds_upper'] 274 | expec_point_effects_lower = pd.DataFrame( 275 | data=expec_point_effects_lower, 276 | index=expected_index, 277 | dtype=np.float64, 278 | columns=['point_effects_lower'] 279 | ) 280 | pd.testing.assert_series_equal( 281 | expec_point_effects_lower['point_effects_lower'], 282 | inferences['point_effects_lower'] 283 | ) 284 | # test point_effects_upper 285 | expec_point_effects_upper = net_data.iloc[:, 0] - inferences['complete_preds_lower'] 286 | expec_point_effects_upper = pd.DataFrame( 287 | data=expec_point_effects_upper, 288 | index=expected_index, 289 | dtype=np.float64, 290 | columns=['point_effects_upper'] 291 | ) 292 | pd.testing.assert_series_equal( 293 | expec_point_effects_upper['point_effects_upper'], 294 | inferences['point_effects_upper'] 295 | ) 296 | # test post_cum_effects_means 297 | post_data = post_data.set_index(pd.RangeIndex(start=3, stop=6)) 298 | post_effects_means = post_data.iloc[:, 0] - inferences['post_preds_means'] 299 | post_effects_means.iloc[len(pre_data) - 1] = 0 300 | expec_post_cum_effects_means = np.cumsum(post_effects_means) 301 | expec_post_cum_effects_means = pd.DataFrame( 302 | data=expec_post_cum_effects_means, 303 | index=expected_index, 304 | dtype=np.float64, 305 | columns=['post_cum_effects_means'] 306 | ) 307 | pd.testing.assert_series_equal( 308 | expec_post_cum_effects_means['post_cum_effects_means'], 309 | inferences['post_cum_effects_means'] 310 | ) 311 | # test post_cum_effects_lower 312 | post_cum_effects_lower, post_cum_effects_upper = np.percentile( 313 | np.cumsum(post_data.iloc[:, 0].values - simulated_post_ys, axis=1), 314 | [100 * alpha / 2, 100 - 100 * alpha / 2], 315 | axis=0 316 | ) 317 | post_cum_effects_lower = np.concatenate( 318 | [np.array([np.nan] * (len(pre_data) - 1) + [0]), 319 | post_cum_effects_lower] 320 | ) 321 | expec_post_cum_effects_lower = pd.DataFrame( 322 | data=post_cum_effects_lower, 323 | index=expected_index, 324 | dtype=np.float64, 325 | columns=['post_cum_effects_lower'] 326 | ) 327 | pd.testing.assert_series_equal( 328 | expec_post_cum_effects_lower['post_cum_effects_lower'], 329 | inferences['post_cum_effects_lower'] 330 | ) 331 | # test post_cum_effects_upper 332 | post_cum_effects_upper = np.concatenate( 333 | [np.array([np.nan] * (len(pre_data) - 1) + [0]), 334 | post_cum_effects_upper] 335 | ) 336 | expec_post_cum_effects_upper = pd.DataFrame( 337 | data=post_cum_effects_upper, 338 | index=expected_index, 339 | dtype=np.float64, 340 | columns=['post_cum_effects_upper'] 341 | ) 342 | pd.testing.assert_series_equal( 343 | expec_post_cum_effects_upper['post_cum_effects_upper'], 344 | inferences['post_cum_effects_upper'] 345 | ) 346 | 347 | 348 | def test_summarize_posterior_inference(): 349 | post_preds_means = pd.Series([1, 2, 3]) 350 | post_data = pd.DataFrame([1.1, 2.2, 3.3]) 351 | simulated_ys = ( 352 | np.tile(np.arange(start=7, stop=10, step=1), (10, 1)) + 353 | np.arange(10).reshape(-1, 1) 354 | ) 355 | alpha = 0.05 356 | lower_percen, upper_percen = [100 * alpha / 2, 100 - 100 * alpha / 2] 357 | pred_lower, pred_upper = np.percentile(simulated_ys.mean(axis=1), 358 | [lower_percen, upper_percen]) 359 | pred_cum_lower, pred_cum_upper = np.percentile(simulated_ys.sum(axis=1), 360 | [lower_percen, upper_percen]) 361 | 362 | summary = inferrer.summarize_posterior_inferences(post_preds_means, post_data, 363 | simulated_ys, alpha) 364 | 365 | np.testing.assert_almost_equal( 366 | summary['average']['actual'], 367 | post_data.mean().values 368 | ) 369 | np.testing.assert_almost_equal( 370 | summary['average']['predicted'], 371 | post_preds_means.mean() 372 | ) 373 | np.testing.assert_almost_equal( 374 | summary['average']['predicted_lower'], 375 | pred_lower 376 | ) 377 | np.testing.assert_almost_equal( 378 | summary['average']['predicted_upper'], 379 | pred_upper 380 | ) 381 | np.testing.assert_almost_equal( 382 | summary['average']['abs_effect'], 383 | post_data.mean().values - post_preds_means.mean() 384 | ) 385 | np.testing.assert_almost_equal( 386 | summary['average']['abs_effect_lower'], 387 | post_data.mean().values - pred_upper.mean() 388 | ) 389 | np.testing.assert_almost_equal( 390 | summary['average']['abs_effect_upper'], 391 | post_data.mean().values - pred_lower.mean() 392 | ) 393 | np.testing.assert_almost_equal( 394 | summary['average']['rel_effect'], 395 | (post_data.mean().values - post_preds_means.mean()) / post_preds_means.mean() 396 | ) 397 | np.testing.assert_almost_equal( 398 | summary['average']['rel_effect_lower'], 399 | (post_data.mean().values - pred_upper.mean()) / post_preds_means.mean() 400 | ) 401 | np.testing.assert_almost_equal( 402 | summary['average']['rel_effect_upper'], 403 | (post_data.mean().values - pred_lower.mean()) / post_preds_means.mean() 404 | ) 405 | 406 | np.testing.assert_almost_equal( 407 | summary['cumulative']['actual'], 408 | post_data.values.sum() 409 | ) 410 | np.testing.assert_almost_equal( 411 | summary['cumulative']['predicted'], 412 | post_preds_means.sum() 413 | ) 414 | np.testing.assert_almost_equal( 415 | summary['cumulative']['predicted_lower'], 416 | pred_cum_lower 417 | ) 418 | np.testing.assert_almost_equal( 419 | summary['cumulative']['predicted_upper'], 420 | pred_cum_upper 421 | ) 422 | np.testing.assert_almost_equal( 423 | summary['cumulative']['abs_effect'], 424 | post_data.sum().values - post_preds_means.sum() 425 | ) 426 | np.testing.assert_almost_equal( 427 | summary['cumulative']['abs_effect_lower'], 428 | post_data.sum().values - pred_cum_upper 429 | ) 430 | np.testing.assert_almost_equal( 431 | summary['cumulative']['abs_effect_upper'], 432 | post_data.sum().values - pred_cum_lower 433 | ) 434 | np.testing.assert_almost_equal( 435 | summary['cumulative']['rel_effect'], 436 | (post_data.sum().values - post_preds_means.sum()) / post_preds_means.sum() 437 | ) 438 | np.testing.assert_almost_equal( 439 | summary['cumulative']['rel_effect_lower'], 440 | (post_data.sum().values - pred_cum_upper) / post_preds_means.sum() 441 | ) 442 | np.testing.assert_almost_equal( 443 | summary['cumulative']['rel_effect_upper'], 444 | (post_data.sum().values - pred_cum_lower) / post_preds_means.sum() 445 | ) 446 | 447 | 448 | def test_compute_p_value(): 449 | simulated_ys = ( 450 | np.tile(np.arange(start=7, stop=10, step=1), (10, 1)) + 451 | np.arange(10).reshape(-1, 1) 452 | ) 453 | post_data_sum = 46 454 | p_value = inferrer.compute_p_value(simulated_ys, post_data_sum) 455 | assert p_value == 2 / 11 456 | -------------------------------------------------------------------------------- /tests/test_misc.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | import numpy as np 17 | import pandas as pd 18 | import pytest 19 | 20 | from causalimpact.misc import get_z_score, standardize, unstandardize 21 | 22 | 23 | def test_basic_standardize(): 24 | data = { 25 | 'c1': [1, 4, 8, 9, 10], 26 | 'c2': [4, 8, 12, 16, 20] 27 | } 28 | data = pd.DataFrame(data) 29 | result, (mu, sig) = standardize(data) 30 | 31 | np.testing.assert_array_almost_equal( 32 | np.zeros(data.shape[1]), 33 | result.mean().values 34 | ) 35 | 36 | np.testing.assert_array_almost_equal( 37 | np.ones(data.shape[1]), 38 | result.std(ddof=0).values 39 | ) 40 | 41 | 42 | def test_standardize_with_integer_column_names(): 43 | # https://github.com/WillianFuks/tfcausalimpact/issues/17 44 | data = { 45 | 'c1': [1, 4, 8, 9, 10], 46 | 0: [4, 8, 12, 16, 20] 47 | } 48 | data = pd.DataFrame(data) 49 | result, (mu, sig) = standardize(data) 50 | 51 | np.testing.assert_array_almost_equal( 52 | np.zeros(data.shape[1]), 53 | result.mean().values 54 | ) 55 | 56 | np.testing.assert_array_almost_equal( 57 | np.ones(data.shape[1]), 58 | result.std(ddof=0).values 59 | ) 60 | 61 | 62 | def test_standardize_w_various_distinct_inputs(): 63 | test_data = [[1, 2, 1], [1, np.nan, 3], [10, 20, 30]] 64 | test_data = [pd.DataFrame(data, dtype="float") for data in test_data] 65 | for data in test_data: 66 | result, (mu, sig) = standardize(data) 67 | pd.testing.assert_frame_equal(unstandardize(result, (mu, sig)), data) 68 | 69 | 70 | def test_standardize_raises_single_input(): 71 | with pytest.raises(ValueError): 72 | standardize(pd.DataFrame([1])) 73 | 74 | 75 | def test_get_z_score(): 76 | assert get_z_score(0.5) == 0. 77 | assert round(float(get_z_score(0.9177)), 2) == 1.39 78 | -------------------------------------------------------------------------------- /tests/test_model.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | import mock 17 | import numpy as np 18 | import pandas as pd 19 | import pytest 20 | import tensorflow as tf 21 | import tensorflow_probability as tfp 22 | 23 | import causalimpact.model as cimodel 24 | 25 | tfd = tfp.distributions 26 | 27 | 28 | def test_process_model_args(): 29 | model_args = cimodel.process_model_args(dict(standardize=False)) 30 | assert model_args['standardize'] is False 31 | 32 | model_args = cimodel.process_model_args(dict(standardize=True)) 33 | assert model_args['standardize'] is True 34 | 35 | model_args = cimodel.process_model_args({}) 36 | assert model_args['standardize'] is True 37 | 38 | with pytest.raises(ValueError) as excinfo: 39 | cimodel.process_model_args(dict(standardize='yes')) 40 | assert str(excinfo.value) == 'standardize argument must be of type bool.' 41 | 42 | model_args = cimodel.process_model_args(dict(niter=10)) 43 | assert model_args['niter'] == 10 44 | 45 | model_args = cimodel.process_model_args({}) 46 | assert model_args['niter'] == 1000 47 | 48 | with pytest.raises(ValueError) as excinfo: 49 | cimodel.process_model_args(dict(niter='yes')) 50 | assert str(excinfo.value) == 'niter argument must be of type int.' 51 | 52 | model_args = cimodel.process_model_args({}) 53 | assert model_args['prior_level_sd'] == 0.01 54 | 55 | with pytest.raises(ValueError) as excinfo: 56 | cimodel.process_model_args(dict(prior_level_sd='test')) 57 | assert str(excinfo.value) == 'prior_level_sd argument must be of type float.' 58 | 59 | model_args = cimodel.process_model_args(dict(fit_method='hmc')) 60 | assert model_args['fit_method'] == 'hmc' 61 | 62 | model_args = cimodel.process_model_args(dict(fit_method='vi')) 63 | assert model_args['fit_method'] == 'vi' 64 | 65 | model_args = cimodel.process_model_args(dict()) 66 | assert model_args['fit_method'] == 'vi' 67 | 68 | with pytest.raises(ValueError) as excinfo: 69 | model_args = cimodel.process_model_args(dict(fit_method='test')) 70 | assert str(excinfo.value) == 'fit_method can be either "hmc" or "vi".' 71 | 72 | model_args = cimodel.process_model_args(dict(nseasons=7)) 73 | assert model_args['nseasons'] == 7 74 | 75 | model_args = cimodel.process_model_args({}) 76 | assert model_args['nseasons'] == 1 77 | 78 | with pytest.raises(ValueError) as excinfo: 79 | model_args = cimodel.process_model_args(dict(nseasons='test')) 80 | assert str(excinfo.value) == 'nseasons argument must be of type int.' 81 | 82 | model_args = cimodel.process_model_args({}) 83 | assert model_args['season_duration'] == 1 84 | 85 | model_args = cimodel.process_model_args(dict(nseasons=7, season_duration=24)) 86 | assert model_args['season_duration'] == 24 87 | 88 | with pytest.raises(ValueError) as excinfo: 89 | model_args = cimodel.process_model_args(dict(season_duration='test')) 90 | assert str(excinfo.value) == 'season_duration argument must be of type int.' 91 | 92 | with pytest.raises(ValueError) as excinfo: 93 | model_args = cimodel.process_model_args(dict(season_duration=24)) 94 | assert str(excinfo.value) == ('nseasons must be bigger than 1 when season_duration ' 95 | 'is also bigger than 1.') 96 | 97 | 98 | def test_check_input_model(): 99 | model = tfp.sts.Sum([tfp.sts.LocalLevel()]) 100 | cimodel.check_input_model(model, None, None) 101 | 102 | model = tfp.sts.LocalLevel() 103 | cimodel.check_input_model(model, None, None) 104 | 105 | data = pd.DataFrame(np.random.rand(200, 2)).astype(np.float32) 106 | pre_data = data.iloc[:100, :] 107 | post_data = data.iloc[100:, :] 108 | model = tfp.sts.LinearRegression(design_matrix=data.iloc[:, 1].values.reshape(-1, 1)) 109 | cimodel.check_input_model(model, pre_data, post_data) 110 | 111 | model = tfp.sts.LinearRegression( 112 | design_matrix=pre_data.iloc[:, 1].values.reshape(-1, 1) 113 | ) 114 | with pytest.raises(ValueError) as excinfo: 115 | cimodel.check_input_model(model, pre_data, post_data) 116 | assert str(excinfo.value) == ( 117 | 'Customized Linear Regression Models must have total ' 118 | 'points equal to pre_data and post_data points and same number of covariates. ' 119 | 'Input design_matrix shape was (100, 1) and expected (200, 1) instead.' 120 | ) 121 | 122 | model = tfp.sts.Sum([tfp.sts.LocalLevel(), tfp.sts.LinearRegression( 123 | design_matrix=pre_data.iloc[:, 1].values.reshape(-1, 1))]) 124 | with pytest.raises(ValueError) as excinfo: 125 | cimodel.check_input_model(model, pre_data, post_data) 126 | assert str(excinfo.value) == ( 127 | 'Customized Linear Regression Models must have total ' 128 | 'points equal to pre_data and post_data points and same number of covariates. ' 129 | 'Input design_matrix shape was (100, 1) and expected (200, 1) instead.' 130 | ) 131 | with pytest.raises(ValueError) as excinfo: 132 | cimodel.check_input_model('test', None, None) 133 | assert str(excinfo.value) == 'Input model must be of type StructuralTimeSeries.' 134 | 135 | # tests dtype != float32 136 | data = pd.DataFrame(np.random.rand(200, 2)) 137 | pre_data = data.iloc[:100, :] 138 | post_data = data.iloc[100:, :] 139 | model = tfp.sts.LinearRegression(design_matrix=data.iloc[:, 1].values.reshape(-1, 1)) 140 | with pytest.raises(AssertionError): 141 | cimodel.check_input_model(model, pre_data, post_data) 142 | 143 | model = tfp.sts.LocalLevel(observed_time_series=pre_data.iloc[:, 0]) 144 | with pytest.raises(AssertionError): 145 | cimodel.check_input_model(model, pre_data, post_data) 146 | 147 | model = tfp.sts.Sum( 148 | [tfp.sts.LinearRegression(design_matrix=data.iloc[:, 1].values.reshape(-1, 1)), 149 | tfp.sts.LocalLevel(observed_time_series=pre_data.iloc[:, 0])], 150 | observed_time_series=pre_data.iloc[:, 0] 151 | ) 152 | with pytest.raises(AssertionError): 153 | cimodel.check_input_model(model, pre_data, post_data) 154 | 155 | 156 | def test_build_default_model(rand_data, pre_int_period, post_int_period): 157 | prior_level_sd = 0.01 158 | pre_data = pd.DataFrame(rand_data.iloc[pre_int_period[0]: pre_int_period[1], 0]) 159 | post_data = pd.DataFrame(rand_data.iloc[post_int_period[0]: post_int_period[1], 0]) 160 | observed_time_series = pd.DataFrame(pre_data.iloc[:, 0]).astype(np.float32) 161 | model = cimodel.build_default_model(observed_time_series, pre_data, post_data, 162 | prior_level_sd, 1, 1) 163 | assert isinstance(model, tfp.sts.Sum) 164 | obs_prior = model.parameters[0].prior 165 | assert isinstance(obs_prior, tfd.TransformedDistribution) 166 | assert isinstance(obs_prior.bijector, tfp.bijectors.Power) 167 | assert isinstance(obs_prior.distribution, tfd.InverseGamma) 168 | level_prior = model.parameters[1].prior 169 | assert isinstance(level_prior, tfd.TransformedDistribution) 170 | assert isinstance(level_prior.bijector, tfp.bijectors.Power) 171 | assert isinstance(level_prior.distribution, tfd.InverseGamma) 172 | assert level_prior.dtype == tf.float32 173 | 174 | pre_data = pd.DataFrame(rand_data.iloc[pre_int_period[0]: pre_int_period[1], :]) 175 | post_data = pd.DataFrame(rand_data.iloc[post_int_period[0]: post_int_period[1], :]) 176 | observed_time_series = pd.DataFrame(pre_data.iloc[:, 0]).astype(np.float32) 177 | model = cimodel.build_default_model(observed_time_series, pre_data, post_data, 178 | prior_level_sd, 1, 1) 179 | assert isinstance(model, tfp.sts.Sum) 180 | obs_prior = model.parameters[0].prior 181 | assert isinstance(obs_prior, tfd.TransformedDistribution) 182 | assert isinstance(obs_prior.bijector, tfp.bijectors.Power) 183 | assert isinstance(obs_prior.distribution, tfd.InverseGamma) 184 | level_prior = model.parameters[1].prior 185 | assert isinstance(level_prior, tfd.TransformedDistribution) 186 | assert isinstance(level_prior.bijector, tfp.bijectors.Power) 187 | assert isinstance(level_prior.distribution, tfd.InverseGamma) 188 | assert level_prior.dtype == tf.float32 189 | linear = model.components[1] 190 | design_matrix = linear.design_matrix.to_dense() 191 | np.testing.assert_equal(pd.concat([pre_data, post_data]).iloc[:, 1:].values.astype( 192 | np.float32), 193 | design_matrix) 194 | assert design_matrix.dtype == tf.float32 195 | # test seasonal 196 | pre_data = pd.DataFrame(rand_data.iloc[pre_int_period[0]: pre_int_period[1], :]) 197 | post_data = pd.DataFrame(rand_data.iloc[post_int_period[0]: post_int_period[1], :]) 198 | observed_time_series = pd.DataFrame(pre_data.iloc[:, 0]).astype(np.float32) 199 | model = cimodel.build_default_model(observed_time_series, pre_data, post_data, 200 | prior_level_sd, 7, 2) 201 | assert isinstance(model, tfp.sts.Sum) 202 | obs_prior = model.parameters[0].prior 203 | assert isinstance(obs_prior, tfd.TransformedDistribution) 204 | assert isinstance(obs_prior.bijector, tfp.bijectors.Power) 205 | assert isinstance(obs_prior.distribution, tfd.InverseGamma) 206 | assert obs_prior.dtype == tf.float32 207 | level_prior = model.parameters[1].prior 208 | assert isinstance(level_prior, tfd.TransformedDistribution) 209 | assert isinstance(level_prior.bijector, tfp.bijectors.Power) 210 | assert isinstance(level_prior.distribution, tfd.InverseGamma) 211 | assert level_prior.dtype == tf.float32 212 | linear = model.components[1] 213 | design_matrix = linear.design_matrix.to_dense() 214 | np.testing.assert_equal(pd.concat([pre_data, post_data]).iloc[:, 1:].values.astype( 215 | np.float32), 216 | design_matrix) 217 | seasonal_component = model.components[-1] 218 | assert isinstance(seasonal_component, tfp.sts.Seasonal) 219 | assert seasonal_component.num_seasons == 7 220 | assert seasonal_component.num_steps_per_season == 2 221 | 222 | 223 | def test_fit_model(monkeypatch): 224 | observed_time_series = 'observed_time_series' 225 | fit_mock = mock.Mock(return_value=('samples', 'kr')) 226 | vi_fit_mock = mock.Mock() 227 | 228 | class VarPost: 229 | def sample(self, *args): 230 | return f'{args[0]} var_post_samples' 231 | var_post = VarPost() 232 | surrogate_posterior_mock = mock.Mock(return_value=var_post) 233 | monkeypatch.setattr('causalimpact.model.tfp.sts.fit_with_hmc', fit_mock) 234 | monkeypatch.setattr('causalimpact.model.tfp.sts.build_factored_surrogate_posterior', 235 | surrogate_posterior_mock) 236 | monkeypatch.setattr('causalimpact.model.Adam', 237 | mock.Mock(return_value='optimizer')) 238 | 239 | monkeypatch.setattr('causalimpact.model.tfp.vi.fit_surrogate_posterior', vi_fit_mock) 240 | 241 | class Model: 242 | def joint_log_prob(self, observed_time_series): 243 | return f'target_log_prob of {observed_time_series}' 244 | model = Model() 245 | samples, kr = cimodel.fit_model( 246 | model=model, 247 | observed_time_series=observed_time_series, 248 | method='hmc' 249 | ) 250 | fit_mock.assert_called_once_with( 251 | model=model, observed_time_series=observed_time_series 252 | ) 253 | assert samples == 'samples' 254 | assert kr == 'kr' 255 | surrogate_posterior_mock.assert_not_called() 256 | 257 | samples, kr = cimodel.fit_model( 258 | model=model, 259 | observed_time_series=observed_time_series, 260 | method='vi' 261 | ) 262 | surrogate_posterior_mock.assert_called_with(model=model) 263 | vi_fit_mock.assert_called_once_with( 264 | target_log_prob_fn='target_log_prob of observed_time_series', 265 | surrogate_posterior=var_post, 266 | optimizer='optimizer', 267 | num_steps=200 268 | ) 269 | assert kr is None 270 | assert samples == '100 var_post_samples' 271 | 272 | with pytest.raises(ValueError) as excinfo: 273 | cimodel.fit_model(model=model, observed_time_series=observed_time_series, 274 | method='test') 275 | assert str(excinfo.value) == ( 276 | 'Input method "test" not valid. Choose between "hmc" or "vi".' 277 | ) 278 | -------------------------------------------------------------------------------- /tests/test_summary.py: -------------------------------------------------------------------------------- 1 | # Copyright WillianFuks 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """ 17 | Tests for summary.py module. 18 | """ 19 | 20 | 21 | import os 22 | 23 | import pandas as pd 24 | import pytest 25 | 26 | import causalimpact.summary as summarizer 27 | 28 | 29 | @pytest.fixture 30 | def summary_data(): 31 | data = [ 32 | [5.123, 10.456], 33 | [4.234, 9.567], 34 | [3.123, 8.456], 35 | [6.234, 9.567], 36 | [3.123, 10.456], 37 | [2.234, 4.567], 38 | [6.123, 9.456], 39 | [0.2345, 0.5678], 40 | [0.1234, 0.4567], 41 | [0.2345, 0.5678] 42 | ] 43 | data = pd.DataFrame( 44 | data, 45 | columns=['average', 'cumulative'], 46 | index=[ 47 | 'actual', 48 | 'predicted', 49 | 'predicted_lower', 50 | 'predicted_upper', 51 | 'abs_effect', 52 | 'abs_effect_lower', 53 | 'abs_effect_upper', 54 | 'rel_effect', 55 | 'rel_effect_lower', 56 | 'rel_effect_upper' 57 | ] 58 | ) 59 | return data 60 | 61 | 62 | def test_summary_raises(summary_data): 63 | with pytest.raises(ValueError): 64 | summarizer.summary(summary_data, 0.1, output='test') 65 | 66 | 67 | def test_output_summary_single_digit(summary_data, fix_path): 68 | summarizer.summary_data = summary_data 69 | alpha = 0.1 70 | p_value = 0.459329 71 | 72 | result = summarizer.summary(summary_data, p_value, alpha, digits=1) 73 | expected = open(os.path.join( 74 | fix_path, 'test_output_summary_single_digit')).read().strip() 75 | assert result == expected 76 | 77 | 78 | def test_report_summary_single_digit(summary_data, fix_path): 79 | # detected positive signal but with no significance. 80 | alpha = 0.1 81 | p_value = 0.5 82 | summary_data['average']['rel_effect'] = 0.41 83 | summary_data['average']['rel_effect_lower'] = -0.30 84 | summary_data['average']['rel_effect_upper'] = 0.30 85 | 86 | result = summarizer.summary(summary_data, p_value, alpha=alpha, output='report', 87 | digits=1) 88 | expected = open(os.path.join( 89 | fix_path, 'test_report_summary_single_digit')).read().strip() 90 | assert result == expected 91 | 92 | 93 | def test_output_summary_1(summary_data, fix_path): 94 | alpha = 0.1 95 | p_value = 0.459329 96 | 97 | result = summarizer.summary(summary_data, p_value, alpha=alpha) 98 | expected = open(os.path.join(fix_path, 'test_output_summary_1')).read().strip() 99 | assert result == expected 100 | 101 | 102 | def test_report_summary_1(summary_data, fix_path): 103 | # detected positive signal but with no significance. 104 | alpha = 0.1 105 | p_value = 0.5 106 | summary_data['average']['rel_effect'] = 0.41 107 | summary_data['average']['rel_effect_lower'] = -0.30 108 | summary_data['average']['rel_effect_upper'] = 0.30 109 | 110 | result = summarizer.summary(summary_data, p_value, alpha=alpha, output='report') 111 | expected = open(os.path.join(fix_path, 'test_report_summary_1')).read().strip() 112 | assert result == expected 113 | 114 | 115 | def test_report_summary_2(summary_data, fix_path): 116 | # detected positive signal with significance. 117 | alpha = 0.1 118 | p_value = 0.05 119 | summary_data['average']['rel_effect'] = 0.41 120 | summary_data['average']['rel_effect_lower'] = 0.434 121 | summary_data['average']['rel_effect_upper'] = 0.234 122 | 123 | result = summarizer.summary(summary_data, p_value, alpha=alpha, output='report') 124 | expected = open(os.path.join(fix_path, 'test_report_summary_2')).read().strip() 125 | assert result == expected 126 | 127 | 128 | def test_report_summary_3(summary_data, fix_path): 129 | # detected negative signal but with no significance. 130 | summary_data['average']['rel_effect'] = -0.343 131 | summary_data['average']['rel_effect_lower'] = -0.434 132 | summary_data['average']['rel_effect_upper'] = 0.234 133 | alpha = 0.1 134 | p_value = 0.5 135 | 136 | result = summarizer.summary(summary_data, p_value, alpha=alpha, output='report') 137 | expected = open(os.path.join(fix_path, 'test_report_summary_3')).read().strip() 138 | assert result == expected 139 | 140 | 141 | def test_report_summary_4(summary_data, fix_path): 142 | # detected negative signal with significance. 143 | summary_data['average']['rel_effect'] = -0.343 144 | summary_data['average']['rel_effect_lower'] = -0.434 145 | summary_data['average']['rel_effect_upper'] = -0.234 146 | alpha = 0.1 147 | p_value = 0.05 148 | 149 | result = summarizer.summary(summary_data, p_value, alpha=alpha, output='report') 150 | expected = open(os.path.join(fix_path, 'test_report_summary_4')).read().strip() 151 | assert result == expected 152 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py{37, 38, 39, 310, 311, 312}-{linux,macos,windows} 4 | gh-actions-coveralls 5 | 6 | [gh-actions] 7 | python = 8 | 3.7: py37 9 | 3.8: py38 10 | 3.9: py39 11 | 3.10: py310 12 | 3.11: py311 13 | 3.12: py312, lint, isort-check, coverage 14 | 15 | [gh-actions:env] 16 | PLATFORM = 17 | ubuntu-latest: linux 18 | macos-latest: macos 19 | windows-latest: windows 20 | 21 | [testenv] 22 | platform = 23 | linux: linux 24 | macos: darwin 25 | windows: win32 26 | deps = 27 | pip >= 21.0 28 | -rtest-requirements.txt 29 | commands = 30 | # To run specific test, simply run `tox -e py39 -- tests/test_data.py -k test_input_data` 31 | python -m pytest {posargs: tests/} -m "not slow" -n 4 -x 32 | 33 | [testenv:isort] 34 | deps = 35 | isort 36 | commands = 37 | isort causalimpact tests 38 | 39 | [testenv:isort-check] 40 | deps = 41 | isort 42 | commands = 43 | isort -c -p causalimpact causalimpact tests 44 | 45 | [testenv:lint] 46 | basepython = 47 | python3.12 48 | deps = 49 | flake8 50 | commands = 51 | flake8 causalimpact tests 52 | 53 | [testenv:coverage] 54 | deps = 55 | {[testenv]deps} 56 | pytest-cov 57 | commands = 58 | python -m pytest tests --cov-report html --cov causalimpact -n 4 -x 59 | 60 | [testenv:GHA-coverage] 61 | deps = 62 | {[testenv]deps} 63 | pytest-cov 64 | coverage 65 | commands = 66 | python -m pytest tests --cov-report xml --cov causalimpact -n 4 -x 67 | coverage lcov 68 | --------------------------------------------------------------------------------