├── .gitattributes ├── .github └── workflows │ ├── pre-commit.yml │ └── publish-to-pypi.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE.md ├── Makefile ├── README.md ├── causing ├── __init__.py ├── examples │ ├── __main__.py │ ├── input │ │ ├── education.csv │ │ ├── example.json │ │ ├── example2.json │ │ └── example3.json │ └── models.py ├── graph.py ├── model.py └── utils.py ├── data ├── education.csv ├── xdat.csv └── ymdat.csv ├── docs ├── contributing.md └── education.md ├── images_education └── IME_32.svg ├── images_readme ├── IME_1.svg └── RealRate_AI_Software_Winner.png ├── mypy.ini ├── output ├── education │ └── graphs.json ├── example │ └── graphs.json ├── example2 │ └── graphs.json ├── example3 │ └── graphs.json └── heaviside │ └── graphs.json ├── setup.py └── tests ├── __init__.py ├── examples ├── __init__.py └── models.py ├── test_estimate.py └── utils.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | - develop 9 | 10 | jobs: 11 | pre-commit: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-python@v2 16 | - uses: pre-commit/action@v2.0.0 17 | 18 | verify-output: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v2 22 | - uses: actions/setup-python@v2 23 | with: 24 | python-version: '3.9' 25 | - uses: actions/cache@v3 26 | with: 27 | path: ${{ env.pythonLocation }} 28 | key: ${{ env.pythonLocation }}-${{ hashFiles('setup.py') }} 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install .[estimate] 33 | sudo apt-get install graphviz 34 | - name: Verify examples output 35 | run: make verify-output 36 | -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | upload-to-pypi: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: actions/setup-python@v2 13 | with: 14 | python-version: '3.9' 15 | - name: Install pypa/build 16 | run: >- 17 | python -m 18 | pip install build --user 19 | - name: Build a binary wheel and a source tarball 20 | run: >- 21 | python -m 22 | build 23 | --sdist 24 | --wheel 25 | --outdir dist/ 26 | . 27 | - name: Publish distribution to PyPI 28 | uses: pypa/gh-action-pypi-publish@release/v1 29 | with: 30 | password: ${{ secrets.PYPI_API_TOKEN }} 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .png 2 | .pylint.d 3 | .spyproject 4 | .venv 5 | *.egg-info/ 6 | .eggs/ 7 | 8 | ~$*.doc* 9 | *.pdf 10 | *.mp4 11 | 12 | /output/*/graphs 13 | /output/*/logging.txt 14 | __pycache__ 15 | 16 | build/ 17 | dist/ 18 | .idea/ 19 | 20 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/psf/black 5 | rev: 22.3.0 6 | hooks: 7 | - id: black 8 | - repo: https://github.com/pre-commit/mirrors-mypy 9 | rev: v0.910 10 | hooks: 11 | - id: mypy 12 | - repo: https://github.com/pycqa/flake8 13 | rev: 5.0.4 14 | hooks: 15 | - id: flake8 16 | args: 17 | - "--max-line-length=120" 18 | - "--extend-ignore=E203" # conflicts with black 19 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # MIT License 2 | 3 | **Copyright (c) 2020 Dr. Holger Bartel, RealRate GmbH** 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | lint: 2 | pre-commit run 3 | 4 | lint-all: 5 | pre-commit run --all-files 6 | 7 | examples: 8 | python -m causing.examples example > /tmp/example.log 9 | python -m causing.examples example2 > /tmp/example2.log 10 | python -m causing.examples example3 > /tmp/example3.log 11 | python -m causing.examples education > /tmp/education.log 12 | python -m causing.examples heaviside > /tmp/heaviside.log 13 | 14 | verify-output: examples 15 | git diff --exit-code output/ 16 | 17 | test: 18 | python3 -m unittest 19 | 20 | .PHONY: lint lint-all examples verify-output test 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Causing: CAUSal INterpretation using Graphs 2 | 3 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 4 | [![Python 3.7](https://img.shields.io/badge/python-3.7-blue.svg)](https://www.python.org/) 5 | 6 | _Causing is a multivariate graphical analysis tool helping you to interpret the causal 7 | effects of a given equation system._ 8 | 9 | Get a nice colored graph and immediately understand the causal effects between the variables. 10 | 11 | **Input:** You simply have to put in a dataset and provide an equation system in form of a 12 | python function. The endogenous variables on the left-hand side are assumed to be caused by 13 | the variables on the right-hand side of the equation. Thus, you provide the causal structure 14 | in form of a directed acyclic graph (DAG). 15 | 16 | **Output:** As an output, you will get a colored graph of quantified effects acting between 17 | the model variables. You can immediately interpret mediation chains for every 18 | individual observation - even for highly complex nonlinear systems. 19 | 20 | Here is a table relating Causing to other approaches: 21 | 22 | Causing is | Causing is NOT 23 | --- | --- 24 | ✅ causal model given | ❌ causal search 25 | ✅ DAG directed acyclic graph | ❌ cyclic, undirected, or bidirected graph 26 | ✅ latent variables | ❌ just observed / manifest variables 27 | ✅ individual effects | ❌ just average effects 28 | ✅ direct, total, and mediation effects | ❌ just total effects 29 | ✅ structural model | ❌ reduced model 30 | ✅ small and big data | ❌ big data requirement 31 | ✅ graphical results | ❌ just numerical results 32 | ✅ XAI explainable AI | ❌ black box neural network 33 | 34 | The Causing approach is quite flexible. It can be applied to highly latent models with many of the modeled endogenous variables being unobserved. Exogenous variables are assumed to be observed and deterministic. The most severe restriction certainly is that you need to specify the causal model / causal ordering. 35 | 36 | ## Causal Effects 37 | 38 | Causing combines total effects and mediation effects in one single graph that is easy to explain. 39 | 40 | The total effects of a variable on the final variable are shown in the corresponding nodes of the graph. The total effects are split up over their outgoing edges, yielding the mediation effects shown on the edges. Just education has more than one outgoing edge to be interpreted in this way. 41 | 42 | The effects differ from individual to individual. To emphasize this, we talk about individual effects. And the corresponding graph, combining total and mediation effects is called the Individual Mediation Effects (IME) graph. 43 | 44 | ## Software 45 | 46 | Causing is free software written in _Python 3_. Graphs are generated using _Graphviz_. See dependencies in [setup.py](setup.py). Causing is available under MIT license. See [LICENSE](LICENSE.md "LICENSE"). 47 | 48 | The software is developed by RealRate, an AI rating agency aiming to re-invent the rating market by using AI, interpretability, and avoiding any conflict of interest. See www.realrate.ai. 49 | 50 | When starting `python -m causing.examples example` after cloning / downloading the Causing repository you will find the results in the _output_ folder. The results are saved in SVG files. The IME files show the individual mediation effects graphs for the respective individual. 51 | 52 | See `causing/examples` for the code generating some examples. 53 | 54 | ## Start your Model 55 | 56 | To start your model, you have to provide the following information, as done in the example code below: 57 | 58 | - Define all your model variables as SymPy symbols. 59 | - Note that in Sympy some operators are special, e.g. Max() instead of max(). 60 | - Provide the model equations in topological order, that is, in order of computation. 61 | - Then the model is specified with: 62 | - _xvars_: exogenous variables 63 | - _yvars_: endogenous variables in topological order 64 | - _equations_: previously defined equations 65 | - _final_var_: the final variable of interest used for mediation effects 66 | 67 | ## 1. A Simple Example 68 | 69 | Assume a model defined by the equation system: 70 | 71 | Y1 = X1 72 | 73 | Y2 = X2 + 2 * Y12 74 | 75 | Y3 = Y1 + Y2. 76 | 77 | This gives the following graphs. Some notes to understand them: 78 | 79 | - The data used consists of 200 observations. They are available for the x variables X1 and X2 with mean(X1) = 3 and mean(X2) = 2. Variables Y1 and Y2 are assumed to be latent / unobserved. Y3 is assumed to be manifest / observed. Therefore, 200 observations are available for Y3. 80 | 81 | - To allow for benchmark comparisons, each individual effect is measured with respect to the mean of all observations. 82 | 83 | - Nodes and edges are colored, showing positive (_green_) and negative (_red_) effects they have on the final variable Y3. 84 | 85 | - Individual effects are based on the given model. For each individual, however, its _own_ exogenous data is put into the given graph function to yield the corresponding endogenous values. The effects are computed at this individual point. Individual effects are shown below just for individual no. 1 out of the 200 observations. 86 | 87 | - Total effects are shown below in the nodes and they are split up over the outgoing edges yielding the Mediation effects shown on the edges. Note, however, that just outgoing edges sum up to the node value, incoming edges do not. All effects are effects just on the final variable of interest, assumed here to be Y3. 88 | 89 | ![Individual Mediation Effects (IME)](https://github.com/realrate/Causing/raw/develop/images_readme/IME_1.svg) 90 | 91 | As you can see in the right-most graph for the individual mediation effects (IME), there is one green path starting at X1 passing through Y1, Y2, and finally ending in Y3. This means that X1 is the main cause for Y3 taking on a value above average with its effect on Y3 being +29.81. However, this positive effect is slightly reduced by X2. In total, accounting for all exogenous and endogenous effects, Y3 is +27.07 above average. You can understand at one glance why Y3 is above average for individual no. 1. 92 | 93 | You can find the full source code for this example [here](https://github.com/realrate/Causing/blob/develop/causing/examples/models.py#L16-L45). 94 | 95 | ## 2. Application to Education and Wages 96 | 97 | To dig a bit deeper, here we have a real-world example from social sciences. We analyze how the wage earned by young American workers is determined by their educational attainment, family characteristics, and test scores. 98 | 99 | This 5-minute introductory video gives a short overview of Causing and includes this real data example: See [Causing Introduction Video](https://youtu.be/GJLsjSZOk2w "Causing_Introduction_Video"). 100 | 101 | See here for a detailed analysis of the Education and Wages example: [An Application of Causing: Education and Wages](docs/education.md). 102 | 103 | ## 3. Application to Insurance Ratings 104 | 105 | The Causing approach and its formulas together with an application are given in: 106 | 107 | > Bartel, Holger (2020), "Causal Analysis - With an Application to Insurance Ratings" 108 | DOI: 10.13140/RG.2.2.31524.83848 109 | https://www.researchgate.net/publication/339091133 110 | 111 | Note that in this early paper the mediation effects on the final variable of interest are called final effects. Also, while the current Causing version just uses numerically computed effects, that paper uses closed formulas. 112 | 113 | The paper proposes simple linear algebra formulas for the causal analysis of equation systems. The effect of one variable on another is the total derivative. It is extended to endogenous system variables. These total effects are identical to the effects used in graph theory and its do-calculus. Further, mediation effects are defined, decomposing the total effect of one variable on a final variable of interest over all its directly caused variables. This allows for an easy but in-depth causal and mediation analysis. 114 | 115 | The equation system provided by the user is represented as a structural neural network (SNN). The network's nodes are represented by the model variables and its edge weights are given by the effects. Unlike classical deep neural networks, we follow a sparse and 'small data' approach. This new methodology is applied to the financial strength ratings of insurance companies. 116 | 117 | > **Keywords:** total derivative, graphical effect, graph theory, do-Calculus, structural neural network, linear Simultaneous Equations Model (SEM), Structural Causal Model (SCM), insurance rating 118 | 119 | ## Award 120 | 121 | RealRate's AI software _Causing_ is a winner of the PyTorch AI Hackathon. 122 | 123 | 124 | 125 | We are excited to be a winner of the PyTorch AI Hackathon 2020 in the Responsible AI category. This is quite an honor given that more than 2,500 teams submitted their projects. 126 | 127 | [devpost.com/software/realrate-explainable-ai-for-company-ratings](https://devpost.com/software/realrate-explainable-ai-for-company-ratings "devpost.com/software/realrate-explainable-ai-for-company-ratings"). 128 | 129 | ## GitHub Star History 130 | 131 | [star-history.com](https://www.star-history.com/#realrate/Causing&Date) 132 | ![star-history-2025327](https://github.com/user-attachments/assets/67271706-0534-4b97-b9da-7fe502f1d94a) 133 | 134 | ## Contact 135 | 136 | Dr. Holger Bartel 137 | RealRate 138 | Cecilienstr. 14, D-12307 Berlin 139 | [holger.bartel@realrate.ai](mailto:holger.bartel@realrate.ai?subject=[Causing]) 140 | Phone: +49 160 957 90 844 141 | [realrate.ai](https://realrate.ai) 142 | [drbartel.com](https://drbartel.com) 143 | -------------------------------------------------------------------------------- /causing/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """causing - causal interpretation using graphs.""" 3 | 4 | # flake8: noqa 5 | 6 | # public Causing API 7 | from causing.model import Model 8 | 9 | 10 | def create_indiv(m: Model, xdat, show_nr_indiv: int) -> dict: 11 | """Calculate effects and limit result set to `show_nr_indiv` 12 | 13 | This is mostly to for backwards compatability and to shrink the amount of 14 | data for tests cases. Otherwise, use `Model.calc_effects` directly. 15 | """ 16 | eff = m.calc_effects(xdat) 17 | for key in ["exj_indivs", "eyj_indivs", "eyx_indivs", "eyy_indivs"]: 18 | if key in ["exj_indivs", "eyj_indivs"]: 19 | eff[key] = eff[key][:, :show_nr_indiv] 20 | else: 21 | eff[key] = eff[key][:show_nr_indiv] 22 | return eff 23 | -------------------------------------------------------------------------------- /causing/examples/__main__.py: -------------------------------------------------------------------------------- 1 | from sys import argv 2 | from pathlib import Path 3 | import warnings 4 | import logging 5 | 6 | import pandas 7 | 8 | import causing.graph 9 | from causing.examples import models 10 | from causing.utils import round_sig_recursive, dump_json 11 | 12 | logging.basicConfig(level=logging.INFO) # type: ignore 13 | 14 | # Our examples should run without any warnings, so let's treat them as errors. 15 | warnings.filterwarnings("error") 16 | 17 | # Keep wide output even if redirecting to file 18 | pandas.set_option("display.max_columns", 500) 19 | pandas.set_option("display.max_rows", 500) 20 | pandas.set_option("display.width", 500) 21 | 22 | if len(argv) != 2: 23 | print('Please call with model name as argument (e.g. "example" or "education").') 24 | exit(1) 25 | 26 | model_name = argv[1] 27 | 28 | try: 29 | model_function = getattr(models, model_name) 30 | except AttributeError: 31 | print(f'Unkown model function "{model_name}".') 32 | exit(1) 33 | 34 | 35 | # Do all calculations 36 | m, xdat = model_function() 37 | graphs = m.calc_effects(xdat) 38 | 39 | # Print json output 40 | output_dir = Path("output") / model_name 41 | dump_json(round_sig_recursive(graphs, 6), output_dir / "graphs.json") 42 | 43 | # Draw graphs 44 | show_nr_indiv = 33 if model_name == "education" else 3 45 | annotated_graphs = causing.graph.annotated_graphs( 46 | m, graphs, ids=[str(i) for i in range(show_nr_indiv)] 47 | ) 48 | causing.graph.create_graphs(annotated_graphs, output_dir / "graphs") 49 | -------------------------------------------------------------------------------- /causing/examples/input/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "xdat": [ 3 | [ 4 | 2.8872869691061602, 5 | 4.785026931424977, 6 | 2.6736792144640185, 7 | 3.3758664973419217, 8 | 2.6500242287369096, 9 | 4.200911925211615, 10 | 4.854028741791787, 11 | 2.869288568402771, 12 | 2.0263377372073608, 13 | 2.2820785332228883, 14 | 2.2227999905821543, 15 | 2.942381145922457, 16 | 1.8167989405733076, 17 | 3.912256171757456, 18 | 2.304230997962419, 19 | 2.212758471472195, 20 | 3.0933081068260373, 21 | 3.7802448586640867, 22 | 2.519391413372405, 23 | 2.4972656287527957, 24 | 2.075002378029681, 25 | 1.599763814234508, 26 | 3.2173406802207793, 27 | 3.0858755266228397, 28 | 2.802968785886549, 29 | 3.3451739807671483, 30 | 2.4047148350193743, 31 | 4.331252678012974, 32 | 2.5142442071775224, 33 | 0.40464257762082045, 34 | 4.0389596412374775, 35 | 3.7586409441262836, 36 | 0.6557068408092563, 37 | 1.49721232937859, 38 | 2.446821476108088, 39 | 2.130682143294284, 40 | 2.8928450354571593, 41 | 2.65946206614633, 42 | 1.7053831777164132, 43 | 4.126269575524226, 44 | 3.7655853172760128, 45 | 3.5738584993611338, 46 | 3.645188978621543, 47 | 4.899738557080213, 48 | 3.499970383255115, 49 | 1.997115783987647, 50 | 3.204277735537852, 51 | 1.9250025366322525, 52 | 3.6469302206281182, 53 | 2.8274358144735015, 54 | 2.8879561501492454, 55 | 3.45473818292903, 56 | 3.062460465192962, 57 | 2.5464461674841923, 58 | 2.186884007640653, 59 | 3.445713200789419, 60 | 3.1078318128236835, 61 | 2.8431479090574925, 62 | 1.5277366136014627, 63 | 2.106610472737535, 64 | 3.001462948847881, 65 | 2.4944514415718055, 66 | 4.758176140420122, 67 | 3.7946416510491887, 68 | 2.908823602380252, 69 | 2.731887381170233, 70 | 2.40603677745438, 71 | 2.1664383556553113, 72 | 3.6441244592704134, 73 | 2.2182410399027823, 74 | 3.3051991650580543, 75 | 2.501928484246964, 76 | 2.9445662409692055, 77 | 1.1876853032251635, 78 | 2.6208023317844744, 79 | 2.96889984080992, 80 | 3.270149615184717, 81 | 2.739032565895155, 82 | 1.673923343601349, 83 | 3.2579314850568326, 84 | 1.8502340173256802, 85 | 3.8311648452832605, 86 | 3.7311102235595213, 87 | 6.2592559170283915, 88 | 0.9484913453096051, 89 | 2.767983030562516, 90 | 2.116166313265303, 91 | 4.066938240964659, 92 | 3.7648748965769148, 93 | 2.483665572184565, 94 | 2.8861545302601717, 95 | 2.5734428378111573, 96 | 3.2759071317142463, 97 | 4.2115136836422185, 98 | 1.549904523402743, 99 | 3.4763530654671584, 100 | 2.571267249401294, 101 | 4.039451329698665, 102 | 3.371804663869714, 103 | 3.621954815294834, 104 | 3.061394449445934, 105 | 2.730352442998404, 106 | 2.9927653627393807, 107 | 2.03313616388579, 108 | 2.89852800879, 109 | 2.3040956673625486, 110 | 2.8944942969662772, 111 | 2.4004027518716806, 112 | 3.11359668445339, 113 | 4.284907708139697, 114 | 1.8763025547279524, 115 | 2.4623858130099046, 116 | 2.3479797052171394, 117 | 2.524438919555902, 118 | 2.4586609739140277, 119 | 4.713092512432414, 120 | 1.4891068463473403, 121 | 4.377964234434829, 122 | 3.1569983934533132, 123 | 2.6190877126068717, 124 | 2.87375023091356, 125 | 3.2486564582861166, 126 | 2.6528455754460745, 127 | 3.005476921270593, 128 | 4.164232296973058, 129 | 3.780966820743133, 130 | 4.816306941606609, 131 | 2.66655002687836, 132 | 5.047364009401332, 133 | 3.6969608986513998, 134 | 3.5670551666090775, 135 | 3.661881588239103, 136 | 2.0929879464576757, 137 | 2.9099855912552752, 138 | 3.0774500846462565, 139 | 4.098866238256457, 140 | 2.6237114311605927, 141 | 1.046540865873361, 142 | 2.228691751324142, 143 | 2.7618070480377344, 144 | 3.846074678747882, 145 | 2.94351360255783, 146 | 3.954767364075697, 147 | 3.3118891480087354, 148 | 2.8936539288318435, 149 | 3.76306781846118, 150 | 0.6552130470177158, 151 | 3.656514304436807, 152 | 2.0206212078730834, 153 | 2.2685397111761723, 154 | 3.534031795914862, 155 | 3.1715241767872833, 156 | 2.110420588830027, 157 | 3.393343513161322, 158 | 4.104738237889165, 159 | 3.680783663265437, 160 | 3.563299169053024, 161 | 3.005614352581387, 162 | 3.087266524609803, 163 | 3.4313093353724726, 164 | 3.232336023851622, 165 | 3.9734899485918684, 166 | 2.9504362681593572, 167 | 2.0661992197778942, 168 | 3.160150193317373, 169 | 3.259285881992602, 170 | 2.8323858865562337, 171 | 1.0271769233156829, 172 | 2.5935738333609066, 173 | 2.8015362631274687, 174 | 5.194837332354277, 175 | 3.7663408873157636, 176 | 4.031791927556463, 177 | 2.2567868006120047, 178 | 4.056446908530157, 179 | 2.4007762052187513, 180 | 3.280445470271719, 181 | 3.8570325888238717, 182 | 3.860621845169172, 183 | 3.4850094100306594, 184 | 2.7490990870560683, 185 | 1.6186175139101526, 186 | 2.1647664944760656, 187 | 2.8268549424378078, 188 | 4.576259428567907, 189 | 4.126690293240515, 190 | 1.5275721604715555, 191 | 4.324972261723336, 192 | 2.3080556055625405, 193 | 4.315641617626974, 194 | 3.6738139571871877, 195 | 2.0712800147844592, 196 | 1.53607688458728, 197 | 1.5554929600778507, 198 | 2.2732617847626293, 199 | 1.3015309837029603, 200 | 3.7079411806877287, 201 | 2.9598664447073637, 202 | 2.887364555410513, 203 | 4.962038075781968 204 | ], 205 | [ 206 | 0.9226444224672825, 207 | 0.9294591185928729, 208 | 0.7002944365636006, 209 | 1.214768112917615, 210 | 2.2206712953453698, 211 | 0.7028872875674521, 212 | 2.8547875011343207, 213 | 1.578881654341097, 214 | 1.3472455129620644, 215 | 3.0531318572670036, 216 | 1.267924982888177, 217 | 2.0664482732752085, 218 | 0.24590829408531278, 219 | 2.5867843279163028, 220 | -0.3816831012145938, 221 | 0.8348152790525589, 222 | 1.500159348577411, 223 | 1.9592915569971832, 224 | 0.8895327013178551, 225 | 2.6733843052360955, 226 | 3.9803840140549758, 227 | 1.4856861319804242, 228 | 1.698202483406017, 229 | 2.121672268788325, 230 | 3.280314026453696, 231 | 2.615412507064239, 232 | 2.5989689409333256, 233 | 1.550967258816849, 234 | 2.958004972041247, 235 | 1.6444840642836143, 236 | 0.43575868063162804, 237 | 1.6617197113173978, 238 | 1.425404059032009, 239 | 2.1783457927798424, 240 | 2.925505829592848, 241 | 1.7525624835650067, 242 | 4.034986208906158, 243 | 1.4146531550770585, 244 | 1.9508499885709887, 245 | 2.6186770883947976, 246 | 0.8273283355360868, 247 | 1.8990822461613104, 248 | 1.1446179404584775, 249 | 2.592427050821793, 250 | 1.4860627063397391, 251 | 1.0338986053567427, 252 | 2.5661162941747326, 253 | -0.3880444860614265, 254 | 1.8135627535942385, 255 | -0.2766991301515813, 256 | 2.913603784550382, 257 | 4.111215695780565, 258 | 2.015277101913836, 259 | 2.2752258566035004, 260 | 1.73787117011666, 261 | 0.8397768311201528, 262 | 2.856584568453247, 263 | 3.1334298165072467, 264 | 2.5540905852775326, 265 | 1.283442940804922, 266 | 1.1170825726335645, 267 | 3.1058559859229478, 268 | 3.858719807608227, 269 | 1.2702859041225625, 270 | -0.06795559289248754, 271 | 0.9842824013279381, 272 | 3.0816303863513683, 273 | 1.6107872206306568, 274 | 3.490901641888225, 275 | -0.9286431243068205, 276 | 2.4843813532581063, 277 | 2.0171562158368475, 278 | 3.5457411117333475, 279 | 0.20377390180683297, 280 | 1.1386755424689001, 281 | 1.5846684231020272, 282 | 1.444482379728466, 283 | 0.8961596153787899, 284 | 1.5347058166312924, 285 | 0.553491806650217, 286 | 1.5630243232366383, 287 | 0.02627548034679683, 288 | 1.408695602852081, 289 | 4.273769226171176, 290 | 0.7479071339699841, 291 | 2.9262968341913425, 292 | 1.764046869509698, 293 | 2.420768785032239, 294 | 1.7128268268926747, 295 | 0.30869335112730867, 296 | 3.756076547833911, 297 | 1.1470735235273697, 298 | 2.3319186688055447, 299 | 1.4359715717524066, 300 | 2.0164866466837092, 301 | 0.5617860815255695, 302 | 3.250288977965843, 303 | 2.595799008717896, 304 | 2.516543384472706, 305 | 2.949865096669039, 306 | 2.5903071834654683, 307 | 1.5942879820625426, 308 | 3.008864857520584, 309 | 2.838178853735018, 310 | 0.21730154658388745, 311 | 2.4606218892209606, 312 | 3.6304882339853037, 313 | 2.596546765093882, 314 | 2.3926595004477607, 315 | 2.9322161930017634, 316 | 1.6256060780193693, 317 | 1.240724906279075, 318 | 0.1104421226189336, 319 | 1.6537717127608933, 320 | -0.04484330469960929, 321 | 1.9722602030189287, 322 | 0.556166905081231, 323 | 3.7332038961653864, 324 | 1.2038437944038325, 325 | 1.4559495581637414, 326 | 2.3116821836983896, 327 | 3.100930678251502, 328 | 2.8113654329199367, 329 | 0.9728213305442048, 330 | 3.619644314947672, 331 | 4.151626406226346, 332 | 2.05787204578069, 333 | 2.5406975498508046, 334 | 1.888124090887733, 335 | 0.6840451515246686, 336 | 2.2795766197948617, 337 | 3.2070093779609374, 338 | 0.7311878049131035, 339 | 1.8896353621645683, 340 | 2.8037421644009073, 341 | 1.2382440344429244, 342 | 0.768037487237573, 343 | 1.6994847758222944, 344 | 3.0557837386463196, 345 | 2.3176509665010827, 346 | 2.024770437344937, 347 | 1.4057848820267524, 348 | 0.9352620520209849, 349 | -0.4222668339577629, 350 | 1.9698673808217695, 351 | 2.085718299641306, 352 | 1.5946525289334947, 353 | 2.1734651592540204, 354 | -0.27111078601554484, 355 | 2.0574929553834664, 356 | 0.40447335128996054, 357 | 3.587737208395782, 358 | 2.180748555856563, 359 | 1.3251320363200185, 360 | 0.5583035425199661, 361 | 3.52484561018949, 362 | 1.7406996192990234, 363 | 2.372795718679365, 364 | 1.5280388793186288, 365 | 1.477046197636669, 366 | 1.8631505650207825, 367 | 4.121237273176616, 368 | 1.381763411809243, 369 | 1.2466009178571835, 370 | 1.4175554162441855, 371 | 2.08426606878413, 372 | 0.9628625992344577, 373 | 1.5302470818501053, 374 | 2.617921714292243, 375 | 2.0475346697286754, 376 | 1.6721931992940329, 377 | 2.015083016276819, 378 | 1.602946451757259, 379 | 3.4175363481192313, 380 | 1.9684011169976288, 381 | 3.3416730890515067, 382 | 3.634718718329693, 383 | 2.7449829287781977, 384 | 2.5521970303043204, 385 | 1.8535103583299786, 386 | 2.4837568367872978, 387 | 2.4280421484417176, 388 | 1.0145787809590394, 389 | 2.994803238446809, 390 | 3.031989718547572, 391 | 1.6142990395475707, 392 | 1.8185839882665342, 393 | 3.174659695401056, 394 | 1.0089450416246355, 395 | 1.8005793504007783, 396 | 2.4592980462169747, 397 | 3.045729092536873, 398 | 0.8114356839967107, 399 | 3.4497743320518284, 400 | 0.5420014213123594, 401 | 0.9604658732033851, 402 | 0.8231517390179206, 403 | 1.909812809958391, 404 | 1.017897451595904, 405 | 2.3402902008910145 406 | ] 407 | ], 408 | "ymdat": [ 409 | [ 410 | 21.82038219052063, 411 | 49.975955779190926, 412 | 19.276575657402223, 413 | 28.753192265354457, 414 | 17.49978522674554, 415 | 40.055320910573755, 416 | 54.835438551163115, 417 | 22.024145480767782, 418 | 11.80644641025265, 419 | 15.949233167087147, 420 | 13.719267859447015, 421 | 22.911758662443752, 422 | 9.410700038311115, 423 | 36.896503116275696, 424 | 11.736689554541677, 425 | 13.47919477880806, 426 | 23.75494574612701, 427 | 33.23890611180403, 428 | 16.246831596435143, 429 | 17.086646436124408, 430 | 15.056210778612774, 431 | 9.297788023616206, 432 | 25.787377778241616, 433 | 23.508148207378206, 434 | 21.563759840715715, 435 | 26.984027572400404, 436 | 16.407085238622372, 437 | 43.62179111834604, 438 | 17.117927857487164, 439 | 1.8515773486216243, 440 | 35.42251733773925, 441 | 33.623612395668914, 442 | 4.094619065846722, 443 | 6.758486571904081, 444 | 18.633354926308904, 445 | 14.401485633063219, 446 | 24.10773006734984, 447 | 18.994017602143604, 448 | 10.392384112826388, 449 | 40.97851113271272, 450 | 32.332092417908974, 451 | 30.72592343393059, 452 | 31.47995515506708, 453 | 56.64648237174675, 454 | 28.745298192637545, 455 | 8.243869409820716, 456 | 26.413387460897273, 457 | 8.420233401459777, 458 | 32.08163450846512, 459 | 18.11651641423544, 460 | 21.662142362765728, 461 | 32.63809194485312, 462 | 23.702851261189938, 463 | 20.69154115621013, 464 | 13.956060581564085, 465 | 27.927148866390745, 466 | 25.2312095955475, 467 | 21.301821893725457, 468 | 8.884433499291587, 469 | 12.411714659563291, 470 | 21.993254266895526, 471 | 17.524788327317935, 472 | 53.81139764555536, 473 | 33.89166508381495, 474 | 20.497087464982474, 475 | 19.817082648417355, 476 | 20.261154390372017, 477 | 12.169042689363351, 478 | 33.43250503183893, 479 | 11.073168390847469, 480 | 28.753498187294987, 481 | 15.665854216630681, 482 | 24.21197854641842, 483 | 3.3060335851844553, 484 | 16.331857756567132, 485 | 23.155655409808933, 486 | 27.10176390523562, 487 | 18.47037964040584, 488 | 8.054504809299011, 489 | 26.528496826255267, 490 | 8.48978226765763, 491 | 32.625390117284816, 492 | 34.035292601514314, 493 | 86.83363748713404, 494 | 2.3533169202440134, 495 | 20.158648058560015, 496 | 13.862614982774083, 497 | 39.471139954720165, 498 | 34.25724910135015, 499 | 16.188291283903645, 500 | 23.28445261953703, 501 | 16.82814792598575, 502 | 26.094199040715463, 503 | 40.08924268684723, 504 | 8.149078550553586, 505 | 29.66355428852332, 506 | 17.121883038337796, 507 | 41.77990918355217, 508 | 30.987929780344942, 509 | 33.35309564906497, 510 | 23.418431302074872, 511 | 19.121840774980218, 512 | 23.819490680786704, 513 | 12.411688955121045, 514 | 21.862602898660953, 515 | 15.332895052185505, 516 | 21.14466072767647, 517 | 16.456670432937113, 518 | 25.921447098972127, 519 | 44.60324194956124, 520 | 10.141371991041838, 521 | 17.475404098961636, 522 | 12.62111437219627, 523 | 17.29191285888236, 524 | 13.895534589260585, 525 | 50.92958167866882, 526 | 5.823634648126632, 527 | 46.91068561975163, 528 | 24.35246267471942, 529 | 18.171434920047506, 530 | 20.82684785067222, 531 | 26.697206786107007, 532 | 19.54904879782474, 533 | 20.623587646186262, 534 | 41.97192789689322, 535 | 36.366492268580636, 536 | 54.36732000382837, 537 | 19.09014538030402, 538 | 57.598156945467316, 539 | 30.685913161196492, 540 | 30.43935067872708, 541 | 31.811931858597145, 542 | 11.584617993871987, 543 | 20.311799947502923, 544 | 25.659657360955418, 545 | 38.96851477468402, 546 | 18.775096276841474, 547 | 6.266805993730996, 548 | 16.009519374065853, 549 | 20.61383111547197, 550 | 36.00099523097948, 551 | 21.575382044893438, 552 | 36.2179133378303, 553 | 25.995906767665932, 554 | 20.624224781130465, 555 | 33.52714835248783, 556 | 3.5841400431821535, 557 | 31.811722915023836, 558 | 9.624383939370265, 559 | 13.86114716858814, 560 | 30.414671173876325, 561 | 26.855448489597386, 562 | 12.740193625855179, 563 | 27.360449879267676, 564 | 37.41053916293403, 565 | 33.95991914686377, 566 | 28.845822064405066, 567 | 23.60821140707702, 568 | 23.49879553095469, 569 | 28.016184575226507, 570 | 26.999406267077404, 571 | 41.284577083469664, 572 | 23.301276402197438, 573 | 13.088364686808742, 574 | 24.838707878850624, 575 | 24.740754584643597, 576 | 18.924089965671524, 577 | 6.488334146641673, 578 | 19.735716272239106, 579 | 20.467604725356058, 580 | 60.75532102277243, 581 | 33.48478411344634, 582 | 38.70673421525028, 583 | 15.241769714111337, 584 | 41.562829786456966, 585 | 18.753797731011737, 586 | 24.7925985700743, 587 | 36.813588983837654, 588 | 37.77477508116713, 589 | 29.728577318068464, 590 | 21.086095641324647, 591 | 7.407700033348016, 592 | 12.911524861885606, 593 | 21.864201384556036, 594 | 50.425507878571494, 595 | 39.872370345090445, 596 | 8.279187331573457, 597 | 44.478251080404206, 598 | 13.353304516285041, 599 | 44.09196102771181, 600 | 33.46428065808478, 601 | 14.244936126457995, 602 | 8.461522630226588, 603 | 10.997030304640129, 604 | 13.172069662908402, 605 | 7.04557940708292, 606 | 32.198719463253745, 607 | 25.118111931960176, 608 | 19.837285497260304, 609 | 56.842205882446876 610 | ] 611 | ] 612 | } -------------------------------------------------------------------------------- /causing/examples/input/example2.json: -------------------------------------------------------------------------------- 1 | { 2 | "xdat": [ 3 | [ 4 | 2.8872869691061602, 5 | 1.923436019214106, 6 | 4.785026931424977, 7 | 1.5430166916415569, 8 | 2.6736792144640185, 9 | 1.7401035154707398, 10 | 3.3758664973419217, 11 | 2.1218526337110184, 12 | 2.6500242287369096, 13 | 3.2966602028589476, 14 | 4.200911925211615, 15 | 1.4310047612935157, 16 | 4.854028741791787, 17 | 3.493961807966228, 18 | 2.869288568402771, 19 | 2.596879247501283, 20 | 2.0263377372073608, 21 | 2.532533218572503, 22 | 2.2820785332228883, 23 | 4.2213933066552505, 24 | 2.2227999905821543, 25 | 2.4114743520286543, 26 | 2.942381145922457, 27 | 3.0795798842600206, 28 | 1.8167989405733076, 29 | 1.4512575542517088, 30 | 3.912256171757456, 31 | 3.412670735564499, 32 | 2.304230997962419, 33 | 0.7112281165759713, 34 | 2.212758471472195, 35 | 1.9714833280983022, 36 | 3.0933081068260373, 37 | 2.470805833831886, 38 | 3.7802448586640867, 39 | 2.79918530449522, 40 | 2.519391413372405, 41 | 1.9647378761789605, 42 | 2.4972656287527957, 43 | 3.7898902024041283, 44 | 2.075002378029681, 45 | 5.210035319426028, 46 | 1.599763814234508, 47 | 2.7609026210900796, 48 | 3.2173406802207793, 49 | 2.6476147188864747, 50 | 3.0858755266228397, 51 | 3.1066519708560043, 52 | 2.802968785886549, 53 | 4.346933859556485, 54 | 3.3451739807671483, 55 | 3.557644416067242, 56 | 2.4047148350193743, 57 | 3.7328321909133715, 58 | 4.331252678012974, 59 | 2.2699670626370727, 60 | 2.5142442071775224, 61 | 4.076914216230647, 62 | 0.40464257762082045, 63 | 3.1669281829423115, 64 | 4.0389596412374775, 65 | 1.191426140039237, 66 | 3.7586409441262836, 67 | 2.4998871918615406, 68 | 0.6557068408092563, 69 | 2.8920823107737323, 70 | 1.49721232937859, 71 | 3.4887786612651634, 72 | 2.446821476108088, 73 | 4.057507525214147, 74 | 2.130682143294284, 75 | 2.9249089067318836, 76 | 2.8928450354571593, 77 | 5.098822017803357, 78 | 2.65946206614633, 79 | 2.4720948927464113, 80 | 1.7053831777164132, 81 | 3.2140990318960836, 82 | 4.126269575524226, 83 | 3.401535844811228, 84 | 3.7655853172760128, 85 | 1.6468725457769033, 86 | 3.5738584993611338, 87 | 2.7798628731285846, 88 | 3.645188978621543, 89 | 1.9952806924726025, 90 | 4.899738557080213, 91 | 3.216860817678221, 92 | 3.499970383255115, 93 | 2.373408919020704, 94 | 1.997115783987647, 95 | 2.218689776415458, 96 | 3.204277735537852, 97 | 3.536092005094851, 98 | 1.9250025366322525, 99 | 0.7821452408708565, 100 | 3.6469302206281182, 101 | 2.677664203879725, 102 | 2.8274358144735015, 103 | 0.7115781974058732, 104 | 2.8879561501492454, 105 | 3.955313813089303, 106 | 3.45473818292903, 107 | 5.061927453612517, 108 | 3.062460465192962, 109 | 3.002842437780612, 110 | 2.5464461674841923, 111 | 3.3734825020035344, 112 | 2.186884007640653, 113 | 2.89844249021185, 114 | 3.445713200789419, 115 | 1.724871360740863, 116 | 3.1078318128236835, 117 | 3.852236887650466, 118 | 2.8431479090574925, 119 | 4.188819261366114, 120 | 1.5277366136014627, 121 | 3.8660408408090556, 122 | 2.106610472737535, 123 | 2.4510293875211007, 124 | 3.001462948847881, 125 | 2.09857755095916, 126 | 2.4944514415718055, 127 | 4.2318542067430736, 128 | 4.758176140420122, 129 | 4.538161757938014, 130 | 3.7946416510491887, 131 | 2.0930331217905254, 132 | 2.908823602380252, 133 | 0.9080129653456019, 134 | 2.731887381170233, 135 | 2.0180658260146975, 136 | 2.40603677745438, 137 | 4.225176625455613, 138 | 2.1664383556553113, 139 | 2.7729114286394254, 140 | 3.6441244592704134, 141 | 4.390163761705033, 142 | 2.2182410399027823, 143 | 0.170542007314733, 144 | 3.3051991650580543, 145 | 3.4320711298075115, 146 | 2.501928484246964, 147 | 3.119178411883016, 148 | 2.9445662409692055, 149 | 4.5889307845912795, 150 | 1.1876853032251635, 151 | 1.536671603685592, 152 | 2.6208023317844744, 153 | 2.1983178065939715, 154 | 2.96889984080992, 155 | 2.5824522778006074, 156 | 3.270149615184717, 157 | 2.3778831437196666, 158 | 2.739032565895155, 159 | 1.9266673795035667, 160 | 1.673923343601349, 161 | 2.795795376691757, 162 | 3.2579314850568326, 163 | 1.4710137133918215, 164 | 1.8502340173256802, 165 | 2.7887085658978172, 166 | 3.8311648452832605, 167 | 0.8159150339221504, 168 | 3.7311102235595213, 169 | 2.2472652273470786, 170 | 6.2592559170283915, 171 | 4.655363170578773, 172 | 0.9484913453096051, 173 | 2.140850520427705, 174 | 2.767983030562516, 175 | 3.9927580131174896, 176 | 2.116166313265303, 177 | 2.9395931404512505, 178 | 4.066938240964659, 179 | 3.211657486472749, 180 | 3.7648748965769148, 181 | 2.5507756730565037, 182 | 2.483665572184565, 183 | 1.379213703600541, 184 | 2.8861545302601717, 185 | 4.815526730641361, 186 | 2.5734428378111573, 187 | 2.216556176346062, 188 | 3.2759071317142463, 189 | 3.2824437653576974, 190 | 4.2115136836422185, 191 | 2.1770417008769942, 192 | 1.549904523402743, 193 | 3.31282611297537, 194 | 3.4763530654671584, 195 | 1.4348939038366062, 196 | 2.571267249401294, 197 | 4.363585550849811, 198 | 4.039451329698665, 199 | 3.3959077027379285, 200 | 3.371804663869714, 201 | 3.4513005749478496, 202 | 3.621954815294834, 203 | 3.842496009671126 204 | ] 205 | ], 206 | "ymdat": [ 207 | [ 208 | 2.9486814185520944, 209 | 2.513383675944758, 210 | 4.515379374423381, 211 | 1.1839801743693585, 212 | 2.666444577203399, 213 | 2.7712486630972446, 214 | 2.409002661227712, 215 | 3.1746755981642014, 216 | 2.5485522375269096, 217 | 1.4979140962804196, 218 | 3.505007592574164, 219 | 2.0431758854180635, 220 | 4.748523038758064, 221 | 5.179608154779938, 222 | 2.2696913202744518, 223 | 3.3281195158372068, 224 | 2.139934421660751, 225 | 2.9101018169377553, 226 | 3.5669862413625855, 227 | 4.910551786868566, 228 | 1.0991025453101066, 229 | 2.258733936022483, 230 | 2.4047669589323615, 231 | 2.4143880231355412, 232 | 1.164778645790447, 233 | -0.344171293335233, 234 | 3.436695091313358, 235 | 3.156376468676733, 236 | 1.7628919718764466, 237 | -1.2652809759687724, 238 | 3.9258509839046094, 239 | 1.593487971556471, 240 | 1.5824149531733775, 241 | 1.3056096255686729, 242 | 5.1582090930989155, 243 | 4.2868533520681025, 244 | 2.676389806825718, 245 | 1.1201171886204384, 246 | 2.1163533413596674, 247 | 3.3123744404621855, 248 | 1.948752608943241, 249 | 5.5539152422838045, 250 | 1.8484202725206245, 251 | 3.833778502374519, 252 | 2.870186255666854, 253 | 3.5465736963896184, 254 | 3.0913524478934327, 255 | 2.0571741594674573, 256 | 3.9672010828596065, 257 | 5.762328493927705, 258 | 4.126140801510282, 259 | 5.594224736475578, 260 | 4.2210217766259825, 261 | 3.4211454983681318, 262 | 3.997802704891334, 263 | 2.8898793793396136, 264 | 4.561608216578854, 265 | 3.544814916235046, 266 | 1.1016034762722202, 267 | 1.6815708422008007, 268 | 4.606014807846555, 269 | 1.3610181815681028, 270 | 4.4205225323653865, 271 | 3.5966799662334887, 272 | -0.25130521273306794, 273 | 1.7822493470083007, 274 | 1.4071979206338654, 275 | 3.394512338698792, 276 | 2.5242715607543444, 277 | 4.862014004363578, 278 | 3.229548381550742, 279 | 1.9231398484002424, 280 | 2.516556466617752, 281 | 3.9182651259011836, 282 | 0.7060029320196912, 283 | 2.5641310024232387, 284 | 0.9340749290405553, 285 | 4.449096434872146, 286 | 3.8880766235619606, 287 | 3.774357937626448, 288 | 4.611659996023895, 289 | 1.4994494969253929, 290 | 3.517372101918964, 291 | 2.1849248455227963, 292 | 4.59995634269724, 293 | 0.7136960027492203, 294 | 5.211627705088949, 295 | 0.6809809769073962, 296 | 3.3936243120869585, 297 | 2.3643627442299, 298 | 2.7601836024488273, 299 | 2.150415083443156, 300 | 0.8594907825555675, 301 | 3.601013607356468, 302 | 2.5815168410690594, 303 | 0.825176956446396, 304 | 2.6675514285012016, 305 | 0.5596363230686974, 306 | 2.0959755256496737, 307 | 0.9195654055056771, 308 | 3.4219879460641076, 309 | 2.2178774624079827, 310 | 3.6262623597163133, 311 | 6.647392730222813, 312 | 2.172881054022989, 313 | 3.3689027970322423, 314 | 2.9397896806455144, 315 | 2.6044073624440025, 316 | 3.291622245529817, 317 | 1.201513456362616, 318 | 4.126496864054856, 319 | 3.1421960113419063, 320 | 3.6711309818767077, 321 | 3.4726065834131536, 322 | 2.8487622616388797, 323 | 4.568156273552153, 324 | 1.615003138211266, 325 | 3.3665343343567713, 326 | 2.537919808110008, 327 | 1.8292512485916252, 328 | 3.2337989726995024, 329 | 1.9114807889808343, 330 | 3.467941390163674, 331 | 6.198120129201332, 332 | 4.70861240857948, 333 | 3.9172938367567705, 334 | 2.860840870827083, 335 | 1.5147096895656527, 336 | 3.068973795697625, 337 | 0.28086743000534575, 338 | 2.991173263162835, 339 | 2.0511430132953286, 340 | 2.2384226640106135, 341 | 3.200866785994462, 342 | 0.19361527897099418, 343 | 2.696172688423677, 344 | 3.23769829263132, 345 | 5.103788864320897, 346 | 2.019777303030251, 347 | 0.25956811709961336, 348 | 5.5000364974123315, 349 | 2.6494854204409637, 350 | 3.2682693715627273, 351 | 2.9781437723285307, 352 | 3.9763581685256684, 353 | 3.973076058589834, 354 | 0.4444721038371683, 355 | 3.135146339710915, 356 | 3.6772492403146315, 357 | 1.9504210094916226, 358 | 2.369676046028671, 359 | 4.074107685129584, 360 | 3.550595085456436, 361 | 3.9890652571847207, 362 | 3.5960651547190268, 363 | 2.5120713526201373, 364 | 2.534545188770521, 365 | 3.183705412231074, 366 | 3.742940895087492, 367 | 1.2225012176837444, 368 | 1.5993331043817487, 369 | 3.3336557543369594, 370 | 2.449782359193413, 371 | 1.5347572415031268, 372 | 2.895876718035587, 373 | 1.4120152325805013, 374 | 6.086110859466199, 375 | 5.70602306106435, 376 | 2.5247507738775115, 377 | 2.8723680078408313, 378 | 3.8946733238030316, 379 | 3.369118925731219, 380 | 0.6437384737368583, 381 | 3.0549942729782953, 382 | 5.391910502687995, 383 | 4.140080687403162, 384 | 3.0729305021394553, 385 | 1.6805269998378827, 386 | 3.7993071898115387, 387 | 0.9071266347656909, 388 | 3.5599684874473594, 389 | 5.146754138038862, 390 | 1.6447228525956166, 391 | 3.4734231352792593, 392 | 1.8119840163015264, 393 | 2.3681924446898512, 394 | 2.767006643720069, 395 | 3.95157019722898, 396 | 0.8231663081653723, 397 | 1.9731073618481496, 398 | 1.7778840491701187, 399 | 0.7206223646322524, 400 | 3.2792084300890227, 401 | 3.0179619357641836, 402 | 3.999317774406029, 403 | 3.3120530150165903, 404 | 3.259169219280227, 405 | 2.4719379726820585, 406 | 5.583992891076802, 407 | 3.789303896478031 408 | ] 409 | ] 410 | } -------------------------------------------------------------------------------- /causing/examples/input/example3.json: -------------------------------------------------------------------------------- 1 | { 2 | "xdat": [ 3 | [ 4 | 2.8872869691061602, 5 | 1.923436019214106, 6 | 4.785026931424977, 7 | 1.5430166916415569, 8 | 2.6736792144640185, 9 | 1.7401035154707398, 10 | 3.3758664973419217, 11 | 2.1218526337110184, 12 | 2.6500242287369096, 13 | 3.2966602028589476, 14 | 4.200911925211615, 15 | 1.4310047612935157, 16 | 4.854028741791787, 17 | 3.493961807966228, 18 | 2.869288568402771, 19 | 2.596879247501283, 20 | 2.0263377372073608, 21 | 2.532533218572503, 22 | 2.2820785332228883, 23 | 4.2213933066552505, 24 | 2.2227999905821543, 25 | 2.4114743520286543, 26 | 2.942381145922457, 27 | 3.0795798842600206, 28 | 1.8167989405733076, 29 | 1.4512575542517088, 30 | 3.912256171757456, 31 | 3.412670735564499, 32 | 2.304230997962419, 33 | 0.7112281165759713, 34 | 2.212758471472195, 35 | 1.9714833280983022, 36 | 3.0933081068260373, 37 | 2.470805833831886, 38 | 3.7802448586640867, 39 | 2.79918530449522, 40 | 2.519391413372405, 41 | 1.9647378761789605, 42 | 2.4972656287527957, 43 | 3.7898902024041283, 44 | 2.075002378029681, 45 | 5.210035319426028, 46 | 1.599763814234508, 47 | 2.7609026210900796, 48 | 3.2173406802207793, 49 | 2.6476147188864747, 50 | 3.0858755266228397, 51 | 3.1066519708560043, 52 | 2.802968785886549, 53 | 4.346933859556485, 54 | 3.3451739807671483, 55 | 3.557644416067242, 56 | 2.4047148350193743, 57 | 3.7328321909133715, 58 | 4.331252678012974, 59 | 2.2699670626370727, 60 | 2.5142442071775224, 61 | 4.076914216230647, 62 | 0.40464257762082045, 63 | 3.1669281829423115, 64 | 4.0389596412374775, 65 | 1.191426140039237, 66 | 3.7586409441262836, 67 | 2.4998871918615406, 68 | 0.6557068408092563, 69 | 2.8920823107737323, 70 | 1.49721232937859, 71 | 3.4887786612651634, 72 | 2.446821476108088, 73 | 4.057507525214147, 74 | 2.130682143294284, 75 | 2.9249089067318836, 76 | 2.8928450354571593, 77 | 5.098822017803357, 78 | 2.65946206614633, 79 | 2.4720948927464113, 80 | 1.7053831777164132, 81 | 3.2140990318960836, 82 | 4.126269575524226, 83 | 3.401535844811228, 84 | 3.7655853172760128, 85 | 1.6468725457769033, 86 | 3.5738584993611338, 87 | 2.7798628731285846, 88 | 3.645188978621543, 89 | 1.9952806924726025, 90 | 4.899738557080213, 91 | 3.216860817678221, 92 | 3.499970383255115, 93 | 2.373408919020704, 94 | 1.997115783987647, 95 | 2.218689776415458, 96 | 3.204277735537852, 97 | 3.536092005094851, 98 | 1.9250025366322525, 99 | 0.7821452408708565, 100 | 3.6469302206281182, 101 | 2.677664203879725, 102 | 2.8274358144735015, 103 | 0.7115781974058732, 104 | 2.8879561501492454, 105 | 3.955313813089303, 106 | 3.45473818292903, 107 | 5.061927453612517, 108 | 3.062460465192962, 109 | 3.002842437780612, 110 | 2.5464461674841923, 111 | 3.3734825020035344, 112 | 2.186884007640653, 113 | 2.89844249021185, 114 | 3.445713200789419, 115 | 1.724871360740863, 116 | 3.1078318128236835, 117 | 3.852236887650466, 118 | 2.8431479090574925, 119 | 4.188819261366114, 120 | 1.5277366136014627, 121 | 3.8660408408090556, 122 | 2.106610472737535, 123 | 2.4510293875211007, 124 | 3.001462948847881, 125 | 2.09857755095916, 126 | 2.4944514415718055, 127 | 4.2318542067430736, 128 | 4.758176140420122, 129 | 4.538161757938014, 130 | 3.7946416510491887, 131 | 2.0930331217905254, 132 | 2.908823602380252, 133 | 0.9080129653456019, 134 | 2.731887381170233, 135 | 2.0180658260146975, 136 | 2.40603677745438, 137 | 4.225176625455613, 138 | 2.1664383556553113, 139 | 2.7729114286394254, 140 | 3.6441244592704134, 141 | 4.390163761705033, 142 | 2.2182410399027823, 143 | 0.170542007314733, 144 | 3.3051991650580543, 145 | 3.4320711298075115, 146 | 2.501928484246964, 147 | 3.119178411883016, 148 | 2.9445662409692055, 149 | 4.5889307845912795, 150 | 1.1876853032251635, 151 | 1.536671603685592, 152 | 2.6208023317844744, 153 | 2.1983178065939715, 154 | 2.96889984080992, 155 | 2.5824522778006074, 156 | 3.270149615184717, 157 | 2.3778831437196666, 158 | 2.739032565895155, 159 | 1.9266673795035667, 160 | 1.673923343601349, 161 | 2.795795376691757, 162 | 3.2579314850568326, 163 | 1.4710137133918215, 164 | 1.8502340173256802, 165 | 2.7887085658978172, 166 | 3.8311648452832605, 167 | 0.8159150339221504, 168 | 3.7311102235595213, 169 | 2.2472652273470786, 170 | 6.2592559170283915, 171 | 4.655363170578773, 172 | 0.9484913453096051, 173 | 2.140850520427705, 174 | 2.767983030562516, 175 | 3.9927580131174896, 176 | 2.116166313265303, 177 | 2.9395931404512505, 178 | 4.066938240964659, 179 | 3.211657486472749, 180 | 3.7648748965769148, 181 | 2.5507756730565037, 182 | 2.483665572184565, 183 | 1.379213703600541, 184 | 2.8861545302601717, 185 | 4.815526730641361, 186 | 2.5734428378111573, 187 | 2.216556176346062, 188 | 3.2759071317142463, 189 | 3.2824437653576974, 190 | 4.2115136836422185, 191 | 2.1770417008769942, 192 | 1.549904523402743, 193 | 3.31282611297537, 194 | 3.4763530654671584, 195 | 1.4348939038366062, 196 | 2.571267249401294, 197 | 4.363585550849811, 198 | 4.039451329698665, 199 | 3.3959077027379285, 200 | 3.371804663869714, 201 | 3.4513005749478496, 202 | 3.621954815294834, 203 | 3.842496009671126 204 | ] 205 | ], 206 | "ymdat": [ 207 | [ 208 | 2.9486814185520944, 209 | 1.5643995019419075, 210 | 3.8181630953107675, 211 | -0.2557294149369711, 212 | 2.5681735114302957, 213 | 2.471343783806664, 214 | 4.660774205481619, 215 | 1.9691122177048468, 216 | 1.998003933954049, 217 | 3.0403659359711814, 218 | 5.91400443764403, 219 | 0.2658085530303025, 220 | 5.0110271352451, 221 | 3.016446046024285, 222 | 3.1179450266888877, 223 | 3.4958382250044266, 224 | 3.190570034180418, 225 | 4.56911353898084, 226 | 1.9486285601012483, 227 | 3.6892940066596496, 228 | 2.789855157191232, 229 | 3.5082671264006025, 230 | 2.852366737177732, 231 | 3.884086363409452, 232 | 1.4405103717339003, 233 | 1.543293663928536, 234 | 3.6740632197951903, 235 | 3.2652476867129883, 236 | 3.258998362038116, 237 | -1.8246517241948537, 238 | 2.9758262899333756, 239 | 2.0364049303599185, 240 | 2.1139293146991207, 241 | 2.67879304193169, 242 | 3.95176903545137, 243 | 3.1652456637468505, 244 | 3.624129651261569, 245 | 3.382062526780004, 246 | 2.502879981334183, 247 | 3.290383695951844, 248 | 2.307338401881303, 249 | 7.176301241884286, 250 | 0.6659630340124022, 251 | 2.1337570857498234, 252 | 3.049726566777013, 253 | 2.5708759786707263, 254 | 2.8874117897503084, 255 | 2.3240662614894565, 256 | 3.834760713443012, 257 | 5.9454085955818075, 258 | 2.7459501859858997, 259 | 5.168826529532296, 260 | 3.2653366801885464, 261 | 3.4843196952052944, 262 | 2.9498701919231265, 263 | 1.4347170678704955, 264 | 4.090503635745429, 265 | 3.4532751288443766, 266 | 1.7296148393441564, 267 | 2.2966795097236905, 268 | 4.712773598424665, 269 | 2.4482930989724343, 270 | 2.3141339042041342, 271 | 1.16016844073432, 272 | 1.3636480214969848, 273 | 2.808227623052394, 274 | 3.4592504051605584, 275 | 3.875372341462055, 276 | 1.5259703832894007, 277 | 3.398699269380522, 278 | 2.0037237036098294, 279 | 2.958895098962748, 280 | 0.2989551648771904, 281 | 3.974723919134424, 282 | 2.141173183687305, 283 | 3.386270423521602, 284 | 3.9707909484090536, 285 | 2.95652842068491, 286 | 4.809014766965124, 287 | 2.2164380122188376, 288 | 2.0960322203245956, 289 | 0.15189046920629834, 290 | 3.602744896245628, 291 | 2.4669694711035266, 292 | 4.436824107445079, 293 | 1.0373353392974671, 294 | 4.964726898564631, 295 | 2.958507480100587, 296 | 4.14916584086813, 297 | 2.231107234820795, 298 | 2.112981634711213, 299 | 1.9672517642114862, 300 | 2.3868976095272028, 301 | 2.8524740059391576, 302 | 1.6054616157367865, 303 | 1.5710560339362676, 304 | 5.464357369689663, 305 | 3.7045543957293505, 306 | 4.580863886577676, 307 | 1.688491928247433, 308 | 2.6921776239306086, 309 | 4.45265078605661, 310 | 2.734052354081108, 311 | 5.825940628293769, 312 | 3.2228696905611582, 313 | 3.9647456885240744, 314 | 2.6583147181319386, 315 | 4.33562973259637, 316 | 2.4893764946828734, 317 | 1.6548380066668886, 318 | 3.253880531024228, 319 | 1.8870521086720797, 320 | 1.4390734927304838, 321 | 2.276430786333944, 322 | 1.5352826733318783, 323 | 3.7214793181087544, 324 | -0.9489266622055197, 325 | 2.994443302753691, 326 | 2.4433190813621284, 327 | 2.1243186387466806, 328 | 1.6049819676088293, 329 | 0.7482370764490036, 330 | 1.498402830436671, 331 | 2.274478526825745, 332 | 4.5201621027813, 333 | 5.143417667711427, 334 | 5.022927912412393, 335 | 3.255748076926915, 336 | 4.520056092023544, 337 | 1.9686191131229334, 338 | 1.148648614806266, 339 | 0.9367141758147841, 340 | 2.7397915908302015, 341 | 5.6335175000600834, 342 | 2.348803298051457, 343 | 2.593554411761902, 344 | 4.799828630080503, 345 | 6.194137954871157, 346 | 1.8512624616386968, 347 | 0.6923691409186945, 348 | 3.2519438740112196, 349 | 3.4124788477671717, 350 | 3.6906540002080597, 351 | 2.0914679217351573, 352 | 3.0975984786502293, 353 | 4.322282285287827, 354 | 1.3451582125588888, 355 | 1.4093001625390897, 356 | 2.9909070522737697, 357 | 2.9968654872254716, 358 | 5.20256156125959, 359 | 3.743146224558327, 360 | 2.7645586143646526, 361 | 2.926190501540802, 362 | 1.3305817833723705, 363 | 3.2923076829740876, 364 | 3.0626572764871915, 365 | 1.8997802674482687, 366 | 3.4341745470925797, 367 | 1.5038451373832906, 368 | 2.462349190165779, 369 | 2.961970512452176, 370 | 1.7247281353019903, 371 | -0.0701160647457647, 372 | 5.499748328956942, 373 | 2.113429365207714, 374 | 5.483645964241033, 375 | 3.715321684895784, 376 | 1.3494326779258243, 377 | 2.4341472624507676, 378 | 2.2905454365903757, 379 | 2.614804084986206, 380 | 3.1594552310582973, 381 | 3.713177747785976, 382 | 3.3289752206059573, 383 | 2.6048475499946107, 384 | 3.5759395809097807, 385 | 1.7171895853276655, 386 | 3.496595650720497, 387 | -0.2949349997438686, 388 | 2.424258331405327, 389 | 5.377414589144412, 390 | 3.159766428006025, 391 | 2.855140937779442, 392 | 2.3692780529833937, 393 | 3.877122514443179, 394 | 2.4087136036114387, 395 | 2.744036899937371, 396 | 0.9099370410745223, 397 | 3.098426167091282, 398 | 3.5861738999296042, 399 | 2.1048236649167147, 400 | 4.12942365700744, 401 | 4.070527935050184, 402 | 3.4282722026102546, 403 | 1.5493803518664258, 404 | 2.298313360501771, 405 | 5.401125256562746, 406 | 5.242906952434235, 407 | 4.299450361955542 408 | ] 409 | ] 410 | } -------------------------------------------------------------------------------- /causing/examples/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Model Examples.""" 3 | 4 | import json 5 | from pathlib import Path 6 | 7 | import numpy as np 8 | import sympy 9 | from sympy import symbols 10 | 11 | from causing.model import Model 12 | 13 | data_path = Path(__file__.split("causing")[0]) / "causing" / "examples" / "input" 14 | 15 | 16 | def example(): 17 | """model example 1""" 18 | 19 | X1, X2, Y1, Y2, Y3 = symbols(["X1", "X2", "Y1", "Y2", "Y3"]) 20 | equations = ( # equations in topological order (Y1, Y2, ...) 21 | X1, 22 | X2 + 2 * Y1**2, 23 | Y1 + Y2, 24 | ) 25 | m = Model( 26 | xvars=[X1, X2], # exogenous variables in desired order 27 | yvars=[Y1, Y2, Y3], # endogenous variables in topological order 28 | equations=equations, 29 | final_var=Y3, # final variable of interest, for mediation analysis 30 | ) 31 | 32 | with open(data_path / "example.json") as f: 33 | input_data = json.load(f) 34 | xdat = np.array(input_data["xdat"]) 35 | 36 | return m, xdat 37 | 38 | 39 | def example2(): 40 | """model example 2, no regularization required, no latent variables""" 41 | 42 | X1, Y1 = symbols(["X1", "Y1"]) 43 | equations = (X1,) 44 | m = Model( 45 | equations=equations, 46 | xvars=[X1], 47 | yvars=[Y1], 48 | final_var=Y1, 49 | ) 50 | 51 | with open(data_path / "example2.json") as f: 52 | input_data = json.load(f) 53 | xdat = np.array(input_data["xdat"]) 54 | 55 | return m, xdat 56 | 57 | 58 | def example3(): 59 | X1, Y1, Y2, Y3 = symbols(["X1", "Y1", "Y2", "Y3"]) 60 | equations = ( 61 | 2 * X1, 62 | -X1, 63 | Y1 + Y2, 64 | ) 65 | m = Model( 66 | equations=equations, 67 | xvars=[X1], 68 | yvars=[Y1, Y2, Y3], 69 | final_var=Y3, 70 | ) 71 | 72 | with open(data_path / "example3.json") as f: 73 | input_data = json.load(f) 74 | xdat = np.array(input_data["xdat"]) 75 | 76 | return m, xdat 77 | 78 | 79 | def education(): 80 | """Education 81 | 82 | The dataset contains following variables in this order, the variables 0. 83 | to 4. being time varying and variables 5. to 9. being time invariant: 84 | 85 | 0. PERSONID = Person id (ranging from 1 to 2,178) # not used by us 86 | 1. EDUC = Education (years of schooling) 87 | 2. LOGWAGE = Log of hourly wage, at most recent job, in real 1993 dollars # we use wage instead of log wage 88 | 3. POTEXPER = Potential experience (= AGE - EDUC - 5) 89 | 4. TIMETRND = Time trend (starting at 1 in 1979 and incrementing by year) # not used by us 90 | 5. ABILITY = Ability (cognitive ability measured by test score) 91 | 6. MOTHERED = Mother's education (highest grade completed, in years) 92 | 7. FATHERED = Father's education (highest grade completed, in years) 93 | 8. BRKNHOME = Dummy variable for residence in a broken home at age 14 94 | 9. SIBLINGS = Number of siblings 95 | 96 | Model identified without regularization if wage instead of logwage and all observations. # yyyy 97 | 98 | ToDo: Automatic Hessian gives wrong results for this example: # yyyy 99 | Algebraic and numeric Hessian allclose: True. 100 | Automatic and numeric Hessian allclose: False. 101 | Automatic and algebraic Hessian allclose: False. 102 | No problem if ABILITY has zero effect 103 | """ 104 | 105 | ( 106 | FATHERED, 107 | MOTHERED, 108 | SIBLINGS, 109 | BRKNHOME, 110 | ABILITY, 111 | AGE, 112 | EDUC, 113 | POTEXPER, 114 | WAGE, 115 | ) = symbols( 116 | [ 117 | "FATHERED", 118 | "MOTHERED", 119 | "SIBLINGS", 120 | "BRKNHOME", 121 | "ABILITY", 122 | "AGE", 123 | "EDUC", 124 | "POTEXPER", 125 | "WAGE", 126 | ] 127 | ) 128 | 129 | equations = ( 130 | # EDUC 131 | 13 132 | + 0.1 * (FATHERED - 12) 133 | + 0.1 * (MOTHERED - 12) 134 | - 0.1 * SIBLINGS 135 | - 0.5 * BRKNHOME, 136 | # POTEXPER 137 | sympy.Max(AGE - EDUC - 5, 0), 138 | # WAGE 139 | 7 + 1 * (EDUC - 12) + 0.5 * POTEXPER + 1 * ABILITY, 140 | ) 141 | m = Model( 142 | equations=equations, 143 | xvars=[FATHERED, MOTHERED, SIBLINGS, BRKNHOME, ABILITY, AGE], 144 | yvars=[EDUC, POTEXPER, WAGE], 145 | final_var=WAGE, 146 | ) 147 | 148 | # load and transform data 149 | from numpy import array, concatenate, loadtxt 150 | 151 | xymdat = loadtxt(data_path / "education.csv", delimiter=",").reshape(-1, 10) 152 | xymdat = xymdat.T # observations in columns 153 | # xymdat = xymdat[:, 0:200] # just some of the 17,919 observations 154 | xdat = xymdat[[7, 6, 9, 8, 5]] # without PERSONID, TIMETRND 155 | age = array(xymdat[3, :] + xymdat[1, :] + 5).reshape( 156 | 1, -1 157 | ) # age = POTEXPER + EDUC + 5 158 | xdat = concatenate((xdat, age)) 159 | 160 | return m, xdat 161 | 162 | 163 | def heaviside(): 164 | """Minimal example exercise correct Heaviside(0) handling""" 165 | 166 | X1, Y1 = symbols(["X1", "Y1"]) 167 | m = Model( 168 | xvars=[X1], 169 | yvars=[Y1], 170 | equations=(sympy.Max(X1, 0),), 171 | final_var=Y1, 172 | ) 173 | 174 | xdat = np.array([[-1, -2, 3, 4, 5, 6]]) 175 | return m, xdat 176 | -------------------------------------------------------------------------------- /causing/graph.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Create direct, total and mediation Graphviz graph from dot_str""" 3 | import re 4 | import subprocess 5 | from typing import Iterable 6 | from itertools import chain 7 | from pathlib import Path 8 | from functools import cache 9 | 10 | import numpy as np 11 | import networkx 12 | 13 | from causing.model import Model 14 | from causing import utils 15 | 16 | 17 | DOT_COMMAND = "dot" 18 | 19 | 20 | @cache 21 | def dot_version() -> list[int]: 22 | full_output = subprocess.check_output( 23 | [DOT_COMMAND, "-V"], encoding="utf-8", stderr=subprocess.STDOUT 24 | ) 25 | match = re.search(r"(\d+[.])+\d+", full_output) 26 | assert match 27 | version = match.group(0) 28 | return [int(x) for x in version.split(".")] 29 | 30 | 31 | def fix_svg_scale(svg_code): 32 | """Work around graphviz SVG generation bug 33 | 34 | Graphviz divides the numbers in the viewBox attribute by the scale instead 35 | of multiplying it. We work around it my multiplying with the scale twice. 36 | See https://github.com/realrate/RealRate-Private/issues/631 37 | and https://gitlab.com/graphviz/graphviz/-/issues/1406 38 | """ 39 | # find scaling factor 40 | scale_match = re.search(r"scale\(([0-9.]+)", svg_code) 41 | assert scale_match 42 | factor = float(scale_match.group(1)) ** 2 43 | 44 | # edit SVG tag 45 | orig_svg_tag = next(re.finditer(r"", svg_code, flags=re.DOTALL)).group(0) 46 | attrs = { 47 | match.group(1): match.group(2) 48 | for match in re.finditer(r'(\w+)="(.*?)"', orig_svg_tag) 49 | } 50 | new_svg_tag = " Iterable[networkx.DiGraph]: 84 | """Return DiGraphs with all information required to draw IME graphs""" 85 | if ids is None: 86 | ids = [str(i + 1) for i in range(len(graph_json["eyx_indivs"]))] 87 | for graph_id, exj, eyj, eyx, eyy in zip( 88 | ids, 89 | np.array(graph_json["exj_indivs"]).T, 90 | np.array(graph_json["eyj_indivs"]).T, 91 | graph_json["eyx_indivs"], 92 | graph_json["eyy_indivs"], 93 | ): 94 | g = m.graph.copy() 95 | g.graph["id"] = graph_id 96 | 97 | # nodes 98 | for var, effect in chain(zip(m.xvars, exj), zip(m.yvars, eyj)): 99 | if np.isnan(effect): 100 | g.remove_node(var) 101 | continue 102 | data = g.nodes[var] 103 | data["effect"] = effect 104 | data["label"] = node_labels.get(var, var) 105 | 106 | # edges 107 | for to_node, x_effects, y_effects in zip(m.yvars, eyx, eyy): 108 | for from_node, eff in chain( 109 | zip(m.xvars, x_effects), zip(m.yvars, y_effects) 110 | ): 111 | if not np.isnan(eff): 112 | g[from_node][to_node]["effect"] = eff 113 | 114 | yield g 115 | 116 | 117 | NODE_PALETTE = "#ff7973 #FFC7AD #EEEEEE #BDE7BD #75cf73".split(" ") 118 | EDGE_PALETTE = "#ff7973 #FFC7AD #BBBBBB #aad3aa #75cf73".split(" ") 119 | PEN_WIDTH_PALETTE = [12, 8, 4, 8, 12] 120 | 121 | 122 | def color(val, max_val, palette): 123 | """Choose element of palette based on `val` 124 | 125 | val == -max_val will return palette[0] 126 | val == +max_val will return palette[-1] 127 | """ 128 | zero_one_scale = (val + max_val) / (2 * max_val) 129 | ind = round(zero_one_scale * len(palette) - 0.5) 130 | clipped_ind = np.clip(round(ind), 0, len(palette) - 1) 131 | return palette[clipped_ind] 132 | 133 | 134 | GRAPH_OPTIONS_STR = """ 135 | node [style="filled,rounded"] 136 | node [shape=box] 137 | node [color="#444444"] 138 | ratio="compress" 139 | size="8,10" 140 | """ 141 | 142 | 143 | def graph_to_dot( 144 | g: networkx.DiGraph, 145 | invisible_edges={}, 146 | node_palette=NODE_PALETTE, 147 | edge_palette=EDGE_PALETTE, 148 | pen_width_palette=PEN_WIDTH_PALETTE, 149 | graph_options_str=GRAPH_OPTIONS_STR, 150 | in_percent=False, 151 | min_sig_figures=3, 152 | cutoff=0.0001, 153 | ): 154 | dot_str = "digraph {" + graph_options_str 155 | max_val = max( 156 | [abs(data["effect"]) for _, data in g.nodes(data=True)] 157 | + [abs(data["effect"]) for _, _, data in g.edges(data=True)] 158 | ) 159 | 160 | for node, data in g.nodes(data=True): 161 | eff_str = utils.fmt_min_sig( 162 | data["effect"] if abs(data["effect"]) > cutoff else 0, 163 | min_sig_figures, 164 | percent=in_percent, 165 | ) 166 | label = data.get("label", node).replace("\n", r"\n") + r"\n" + eff_str 167 | col_str = color(data["effect"], max_val, palette=node_palette) 168 | dot_str += f' "{node}"[label = "{label}" fillcolor="{col_str}"]\n' 169 | 170 | for from_node, to_node, data in g.edges(data=True): 171 | eff_str = utils.fmt_min_sig( 172 | data["effect"] if abs(data["effect"]) > cutoff else 0, 173 | min_sig_figures, 174 | percent=in_percent, 175 | ) 176 | col_str = color(data["effect"], max_val, palette=edge_palette) 177 | penwidth = color(data["effect"], max_val, palette=pen_width_palette) 178 | dot_str += ( 179 | f' "{from_node}" -> "{to_node}" [label="{eff_str}" color="{col_str}" penwidth="{penwidth}", ' 180 | f"arrowsize=0.5]\n" 181 | ) 182 | 183 | for from_node, to_node in invisible_edges: 184 | dot_str += f' "{from_node}" -> "{to_node}" [style = "invisible", arrowhead="none"]\n' 185 | 186 | dot_str += "}" 187 | return dot_str 188 | 189 | 190 | def create_graphs(graphs: Iterable[networkx.DiGraph], output_dir: Path, **kwargs): 191 | for g in graphs: 192 | filename = f"IME_{g.graph['id']}.svg" 193 | print("Create", filename) 194 | dot_str = graph_to_dot(g, **kwargs) 195 | save_graph(output_dir / filename, dot_str) 196 | 197 | 198 | def remove_node_keep_edges(graph, node): 199 | """Keep transitive connections when removing node. 200 | 201 | Removing B from A->B->C will result in A->C. 202 | 203 | WARNING: This function only approximates the edge effects. To get accurate 204 | results, you must shrink the model and recalculate the effects, instead. 205 | """ 206 | 207 | total_out_effect = sum( 208 | out_data["effect"] for _, _, out_data in graph.out_edges(node, data=True) 209 | ) 210 | num_out_edges = len(graph.out_edges(node)) 211 | for a, _, in_data in graph.in_edges(node, data=True): 212 | for _, b, out_data in graph.out_edges(node, data=True): 213 | if total_out_effect == 0: 214 | # If all outgoing edges have no effect, distribute the incoming effects 215 | # evenly across all outgoing edges. 216 | new_edge_effect = in_data["effect"] / num_out_edges 217 | else: 218 | new_edge_effect = ( 219 | in_data["effect"] * out_data["effect"] / total_out_effect 220 | ) 221 | if graph.has_edge(a, b): 222 | graph[a][b]["effect"] += new_edge_effect 223 | else: 224 | graph.add_edge(a, b, effect=new_edge_effect) 225 | 226 | graph.remove_node(node) 227 | 228 | 229 | def recalc_graphs(graphs, model, xdat) -> Iterable[networkx.DiGraph]: 230 | """Recalculate node and edge effects in graph. 231 | 232 | Do this after modifying the graphs (typically with `remove_node_keep_edges`) 233 | to calculate exact effects. 234 | `graphs` must be in the format generated by `annotated_graphs` and in the 235 | same order as individuals within `xdat`. 236 | """ 237 | yhat = model.compute(xdat) 238 | yhat_mean = np.mean(yhat, axis=1) 239 | xdat_mean = np.mean(xdat, axis=1) 240 | 241 | for i, approx_graph in enumerate(graphs): 242 | individual_xdat = xdat[:, i : i + 1] 243 | removed_nodes = set(model.graph.nodes) - set(approx_graph.nodes) 244 | 245 | # Calc effects on shrunken model 246 | individual_model = model.shrink(removed_nodes) 247 | effects = individual_model.calc_effects( 248 | individual_xdat, 249 | xdat_mean=xdat_mean, 250 | yhat_mean=yhat_mean[[yvar not in removed_nodes for yvar in model.yvars]], 251 | ) 252 | 253 | # Get graph for shrunken model 254 | [g] = annotated_graphs( 255 | individual_model, 256 | effects, 257 | node_labels={ 258 | n: data["label"] 259 | for n, data in approx_graph.nodes(data=True) 260 | if "label" in data 261 | }, 262 | ) 263 | for xvar in set(model.xvars) & removed_nodes & g.nodes(): 264 | g.remove_node(xvar) 265 | 266 | # Preserve graph attributes 267 | g.graph = approx_graph.graph 268 | yield g 269 | -------------------------------------------------------------------------------- /causing/model.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass, field 4 | from typing import Iterable, Callable 5 | from functools import cached_property 6 | import networkx 7 | 8 | import sympy 9 | import numpy as np 10 | 11 | 12 | class NumericModelError(Exception): 13 | pass 14 | 15 | 16 | @dataclass 17 | class Model: 18 | 19 | xvars: list[str] 20 | yvars: list[str] 21 | equations: Iterable[sympy.Expr] 22 | final_var: str 23 | parameters: dict[str, float] = field(default_factory=dict) 24 | 25 | ndim: int = field(init=False) 26 | mdim: int = field(init=False) 27 | graph: networkx.DiGraph = field(init=False) 28 | 29 | def __post_init__(self): 30 | # If sympy.Symbols are passed in, convert to string 31 | self.xvars = [str(var) for var in self.xvars] 32 | self.yvars = [str(var) for var in self.yvars] 33 | self.final_var = str(self.final_var) 34 | 35 | self.mdim = len(self.xvars) 36 | self.ndim = len(self.yvars) 37 | 38 | self.graph = networkx.DiGraph() 39 | for yvar, eq in zip(self.yvars, self.equations): 40 | if isinstance(eq, (float, int)): 41 | continue 42 | for sym in eq.free_symbols: 43 | if str(sym) in self.parameters: 44 | continue 45 | self.graph.add_edge(str(sym), yvar) 46 | for var in self.vars: 47 | self.graph.add_node(var) 48 | self.trans_graph = networkx.transitive_closure(self.graph, reflexive=True) 49 | 50 | @np.errstate(all="raise") 51 | def compute( 52 | self, 53 | xdat: np.array, 54 | # fix a yval 55 | fixed_yval: np.array = None, 56 | fixed_yind: int = None, 57 | # fix an arbitrary node going into a yval 58 | fixed_from_ind: int = None, 59 | fixed_to_yind: int = None, 60 | fixed_vals: list = None, 61 | # override default parameter values 62 | parameters: dict[str, float] = {}, 63 | ) -> np.array: 64 | """Compute y values for given x values 65 | 66 | xdat: m rows, tau columns 67 | returns: n rows, tau columns 68 | """ 69 | assert xdat.ndim == 2, f"xdat must be m*tau (is {xdat.ndim}-dimensional)" 70 | assert xdat.shape[0] == self.mdim, f"xdat must be m*tau (is {xdat.shape})" 71 | tau = xdat.shape[1] 72 | parameters = self.parameters | parameters 73 | 74 | yhat = np.array([[float("nan")] * tau] * len(self.yvars)) 75 | for i, eq in enumerate(self._model_lam): 76 | if fixed_yind == i: 77 | yhat[i, :] = fixed_yval 78 | else: 79 | eq_inputs = np.array( 80 | [[*xval, *yval] for xval, yval in zip(xdat.T, yhat.T)] 81 | ) 82 | if fixed_to_yind == i: 83 | eq_inputs[:, fixed_from_ind] = fixed_vals 84 | 85 | try: 86 | # print(f"Comuting variable: {self.yvars[i]}") 87 | # yhat[i] = np.array( 88 | # [eq(*eq_in, *parameters.values()) for eq_in in eq_inputs], 89 | # dtype=np.float64, 90 | # ) 91 | np.seterr(under="ignore") 92 | computed_yvars = [] 93 | for eq_in in eq_inputs: 94 | computed_yvars.append(eq(*eq_in, *parameters.values())) 95 | 96 | yhat[i] = np.array( 97 | computed_yvars, 98 | dtype=np.float64, 99 | ) 100 | except Exception as e: 101 | # for eq_in in eq_inputs: 102 | # print("--", self.yvars[i]) 103 | # for var, val in zip( 104 | # self.vars + list(parameters.keys()), 105 | # list(eq_in) + list(parameters.values()), 106 | # ): 107 | # print(var, "=", val) 108 | # eq(*eq_in, *parameters.values()) 109 | raise NumericModelError( 110 | f"Failed to compute model value for yvar {self.yvars[i]}: {e}" 111 | ) from e 112 | assert yhat.shape == (self.ndim, tau) 113 | return yhat 114 | 115 | def calc_effects(self, xdat: np.array, xdat_mean=None, yhat_mean=None): 116 | """Calculate node and edge effects for the given input 117 | 118 | Pass mean values only if you compute effects for a subset of the 119 | individuals you want to use as a benchmark. 120 | """ 121 | yhat = self.compute(xdat) 122 | if yhat_mean is None: 123 | yhat_mean = np.mean(yhat, axis=1) 124 | if xdat_mean is None: 125 | xdat_mean = np.mean(xdat, axis=1) 126 | tau = xdat.shape[1] 127 | exj = np.full([len(self.xvars), tau], float("NaN")) 128 | eyx = np.full([tau, len(self.yvars), len(self.xvars)], float("NaN")) 129 | for xind, xvar in enumerate(self.xvars): 130 | if not self.trans_graph.has_edge(xvar, self.final_var): 131 | # Without path to final_var, there is no effect on final_var 132 | continue 133 | 134 | fixed_xdat = xdat.copy() 135 | fixed_xdat[xind, :] = xdat_mean[xind] 136 | fixed_yhat = self.compute(fixed_xdat) 137 | exj[xind, :] = yhat[self.final_ind] - fixed_yhat[self.final_ind] 138 | 139 | for yind, yvar in enumerate(self.yvars): 140 | if not self.graph.has_edge(xvar, yvar): 141 | # Without edge, there is no mediated effect for that edge 142 | continue 143 | if not self.trans_graph.has_edge(yvar, self.final_var): 144 | # Without path to final_var, there is no effect on final_var 145 | continue 146 | 147 | fixed_vals = fixed_xdat.T[:, xind] 148 | eyx[:, yind, xind] = ( 149 | yhat[self.final_ind] 150 | - self.compute( 151 | xdat, 152 | fixed_from_ind=xind, 153 | fixed_to_yind=yind, 154 | fixed_vals=fixed_vals, 155 | )[self.final_ind] 156 | ) 157 | 158 | eyj = np.full([len(self.yvars), tau], float("NaN")) 159 | eyy = np.full([tau, len(self.yvars), len(self.yvars)], float("NaN")) 160 | for yind, yvar in enumerate(self.yvars): 161 | if not self.trans_graph.has_edge(yvar, self.final_var): 162 | # Without path to final_var, there is no effect on final_var 163 | continue 164 | 165 | fixed_yval = yhat_mean[yind] 166 | fixed_yhat = self.compute(xdat, fixed_yind=yind, fixed_yval=fixed_yval) 167 | eyj[yind, :] = yhat[self.final_ind] - fixed_yhat[self.final_ind] 168 | 169 | for yind2, yvar2 in enumerate(self.yvars): 170 | if not self.graph.has_edge(yvar, yvar2): 171 | # Without edge, there is no mediated effect for that edge 172 | continue 173 | if not self.trans_graph.has_edge(yvar2, self.final_var): 174 | # Without path to final_var, there is no effect on final_var 175 | continue 176 | 177 | fixed_vals = fixed_yhat.T[:, yind] 178 | eyy[:, yind2, yind] = ( 179 | yhat[self.final_ind] 180 | - self.compute( 181 | xdat, 182 | fixed_from_ind=len(self.xvars) + yind, 183 | fixed_to_yind=yind2, 184 | fixed_vals=fixed_vals, 185 | )[self.final_ind] 186 | ) 187 | 188 | return { 189 | # model results 190 | "yhat": yhat, 191 | # nodes 192 | "exj_indivs": exj, 193 | "eyj_indivs": eyj, 194 | # edges 195 | "eyx_indivs": eyx, 196 | "eyy_indivs": eyy, 197 | } 198 | 199 | @cached_property 200 | def _model_lam(self) -> Iterable[Callable]: 201 | return [ 202 | sympy.lambdify(self.vars + list(self.parameters), eq) 203 | for eq in self.equations 204 | ] 205 | 206 | @cached_property 207 | def final_ind(self): 208 | "Index of final variable" 209 | return self.yvars.index(self.final_var) 210 | 211 | @property 212 | def vars(self) -> list[str]: 213 | return self.xvars + self.yvars 214 | 215 | def shrink(m: Model, remove_nodes) -> Model: # noqa 216 | """Create a model without `remove_nodes`""" 217 | yvars = [] 218 | equations = [] 219 | substitutions: list[tuple] = [] 220 | for yvar, eq in zip(m.yvars, m.equations): 221 | if yvar in remove_nodes: 222 | substitutions.insert(0, (yvar, eq)) 223 | else: 224 | yvars.append(yvar) 225 | equations.append(eq.subs(substitutions)) 226 | 227 | new_model = Model(m.xvars, yvars, equations, m.final_var, m.parameters) 228 | return new_model 229 | -------------------------------------------------------------------------------- /causing/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import math 3 | from math import floor, log10 4 | import locale 5 | 6 | import numpy as np 7 | 8 | 9 | @np.vectorize 10 | def round_sig(x, sig=2) -> float: 11 | """Round x to the given number of significant figures""" 12 | if x == 0 or not np.isfinite(x): 13 | return x 14 | return round(x, sig - int(floor(log10(abs(x)))) + 1) 15 | 16 | 17 | def round_sig_recursive(x, sig=2): 18 | """Round all floats in x to the given number of significant figures 19 | 20 | x can be a nested data structure. 21 | """ 22 | if isinstance(x, dict): 23 | return {key: round_sig_recursive(value, sig) for key, value in x.items()} 24 | if isinstance(x, (list, tuple)): 25 | return x.__class__(round_sig_recursive(value, sig) for value in x) 26 | if isinstance(x, (float, np.ndarray)): 27 | return round_sig(x, sig) 28 | # avoid importing pytorch for isinstance check 29 | if type(x).__name__ == "Tensor": 30 | return x.apply_(lambda x: round_sig(x, sig)) 31 | # avoid importing pandas for isinstance check 32 | if type(x).__name__ == "DataFrame": 33 | return x.apply(lambda x: round_sig(x, sig)) 34 | 35 | return x 36 | 37 | 38 | class MatrixEncoder(json.JSONEncoder): 39 | def default(self, obj): 40 | # allow serialization of numpy scalars 41 | if isinstance(obj, np.integer): 42 | return int(obj) 43 | if isinstance(obj, np.floating): 44 | return float(obj) 45 | # avoid importing pytorch for isinstance check 46 | if isinstance(obj, np.ndarray) or type(obj).__name__ == "Tensor": 47 | return obj.tolist() 48 | # avoid importing pandas for isinstance check 49 | if type(obj).__name__ == "DataFrame": 50 | return obj.to_dict() 51 | return json.JSONEncoder.default(self, obj) 52 | 53 | 54 | def dump_json(data, filename, allow_nan=True): 55 | with open(filename, "w") as f: 56 | json.dump( 57 | data, f, sort_keys=True, indent=4, cls=MatrixEncoder, allow_nan=allow_nan 58 | ) 59 | f.write("\n") 60 | 61 | 62 | def fmt_min_sig(x, min_sig_figures=3, percent=False, percent_spacer=""): 63 | """Format number with at least the given amount of significant figures. 64 | See https://www.karl.berlin/formatting-numbers.html 65 | """ 66 | if not math.isfinite(x): 67 | return str(x) 68 | if x == 0: 69 | num = "0" 70 | else: 71 | if percent: 72 | x *= 100 73 | show_dec = max(-math.floor(math.log10(abs(x)) + 1) + min_sig_figures, 0) 74 | num = locale.format_string("%." + str(show_dec) + "f", x, grouping=True) 75 | if percent: 76 | num += percent_spacer + "%" 77 | return num 78 | -------------------------------------------------------------------------------- /data/xdat.csv: -------------------------------------------------------------------------------- 1 | 2.887286969106160228e+00,4.785026931424977370e+00,2.673679214464018461e+00,3.375866497341921679e+00,2.650024228736909571e+00,4.200911925211615383e+00,4.854028741791786672e+00,2.869288568402771133e+00,2.026337737207360767e+00,2.282078533222888339e+00,2.222799990582154273e+00,2.942381145922456920e+00,1.816798940573307597e+00,3.912256171757455991e+00,2.304230997962418925e+00,2.212758471472195065e+00,3.093308106826037296e+00,3.780244858664086660e+00,2.519391413372404909e+00,2.497265628752795674e+00,2.075002378029680905e+00,1.599763814234508086e+00,3.217340680220779348e+00,3.085875526622839704e+00,2.802968785886549075e+00,3.345173980767148336e+00,2.404714835019374330e+00,4.331252678012973867e+00,2.514244207177522394e+00,4.046425776208204539e-01,4.038959641237477527e+00,3.758640944126283578e+00,6.557068408092563416e-01,1.497212329378589946e+00,2.446821476108087978e+00,2.130682143294284181e+00,2.892845035457159319e+00,2.659462066146330184e+00,1.705383177716413234e+00,4.126269575524226241e+00,3.765585317276012756e+00,3.573858499361133756e+00,3.645188978621543008e+00,4.899738557080213219e+00,3.499970383255115003e+00,1.997115783987646997e+00,3.204277735537851779e+00,1.925002536632252514e+00,3.646930220628118224e+00,2.827435814473501452e+00,2.887956150149245449e+00,3.454738182929030010e+00,3.062460465192962111e+00,2.546446167484192280e+00,2.186884007640653049e+00,3.445713200789418895e+00,3.107831812823683482e+00,2.843147909057492484e+00,1.527736613601462734e+00,2.106610472737535211e+00,3.001462948847880785e+00,2.494451441571805539e+00,4.758176140420122380e+00,3.794641651049188713e+00,2.908823602380251838e+00,2.731887381170233109e+00,2.406036777454379827e+00,2.166438355655311288e+00,3.644124459270413396e+00,2.218241039902782319e+00,3.305199165058054334e+00,2.501928484246963791e+00,2.944566240969205495e+00,1.187685303225163524e+00,2.620802331784474415e+00,2.968899840809919866e+00,3.270149615184716918e+00,2.739032565895155091e+00,1.673923343601348979e+00,3.257931485056832610e+00,1.850234017325680158e+00,3.831164845283260512e+00,3.731110223559521266e+00,6.259255917028391458e+00,9.484913453096051050e-01,2.767983030562516156e+00,2.116166313265302801e+00,4.066938240964659279e+00,3.764874896576914765e+00,2.483665572184564851e+00,2.886154530260171658e+00,2.573442837811157347e+00,3.275907131714246301e+00,4.211513683642218453e+00,1.549904523402743095e+00,3.476353065467158387e+00,2.571267249401294031e+00,4.039451329698665383e+00,3.371804663869713892e+00,3.621954815294833985e+00,3.061394449445934196e+00,2.730352442998404072e+00,2.992765362739380652e+00,2.033136163885790104e+00,2.898528008790000055e+00,2.304095667362548561e+00,2.894494296966277247e+00,2.400402751871680618e+00,3.113596684453390218e+00,4.284907708139696680e+00,1.876302554727952376e+00,2.462385813009904556e+00,2.347979705217139390e+00,2.524438919555902050e+00,2.458660973914027714e+00,4.713092512432414338e+00,1.489106846347340252e+00,4.377964234434829294e+00,3.156998393453313234e+00,2.619087712606871676e+00,2.873750230913560078e+00,3.248656458286116600e+00,2.652845575446074466e+00,3.005476921270592960e+00,4.164232296973057856e+00,3.780966820743132839e+00,4.816306941606608660e+00,2.666550026878359958e+00,5.047364009401332119e+00,3.696960898651399763e+00,3.567055166609077510e+00,3.661881588239102925e+00,2.092987946457675719e+00,2.909985591255275228e+00,3.077450084646256467e+00,4.098866238256457351e+00,2.623711431160592689e+00,1.046540865873361037e+00,2.228691751324141990e+00,2.761807048037734358e+00,3.846074678747882114e+00,2.943513602557830122e+00,3.954767364075697067e+00,3.311889148008735351e+00,2.893653928831843469e+00,3.763067818461180103e+00,6.552130470177157662e-01,3.656514304436806917e+00,2.020621207873083414e+00,2.268539711176172258e+00,3.534031795914862162e+00,3.171524176787283267e+00,2.110420588830026922e+00,3.393343513161322100e+00,4.104738237889164587e+00,3.680783663265437156e+00,3.563299169053023796e+00,3.005614352581387205e+00,3.087266524609803220e+00,3.431309335372472624e+00,3.232336023851622109e+00,3.973489948591868437e+00,2.950436268159357223e+00,2.066199219777894225e+00,3.160150193317373013e+00,3.259285881992601830e+00,2.832385886556233690e+00,1.027176923315682888e+00,2.593573833360906633e+00,2.801536263127468729e+00,5.194837332354277137e+00,3.766340887315763553e+00,4.031791927556462873e+00,2.256786800612004651e+00,4.056446908530157103e+00,2.400776205218751347e+00,3.280445470271719177e+00,3.857032588823871677e+00,3.860621845169172062e+00,3.485009410030659449e+00,2.749099087056068313e+00,1.618617513910152628e+00,2.164766494476065617e+00,2.826854942437807772e+00,4.576259428567906795e+00,4.126690293240515395e+00,1.527572160471555529e+00,4.324972261723336153e+00,2.308055605562540524e+00,4.315641617626973847e+00,3.673813957187187729e+00,2.071280014784459222e+00,1.536076884587280089e+00,1.555492960077850650e+00,2.273261784762629301e+00,1.301530983702960276e+00,3.707941180687728711e+00,2.959866444707363708e+00,2.887364555410512956e+00,4.962038075781967983e+00 2 | 9.226444224672827321e-01,9.294591185928728905e-01,7.002944365636005752e-01,1.214768112917615106e+00,2.220671295345369778e+00,7.028872875674521126e-01,2.854787501134320671e+00,1.578881654341096841e+00,1.347245512962064407e+00,3.053131857267003646e+00,1.267924982888176899e+00,2.066448273275208525e+00,2.459082940853127841e-01,2.586784327916302750e+00,-3.816831012145938118e-01,8.348152790525591449e-01,1.500159348577410956e+00,1.959291556997183159e+00,8.895327013178553344e-01,2.673384305236095493e+00,3.980384014054975772e+00,1.485686131980424429e+00,1.698202483406016983e+00,2.121672268788325155e+00,3.280314026453695853e+00,2.615412507064239112e+00,2.598968940933325644e+00,1.550967258816849048e+00,2.958004972041246905e+00,1.644484064283614266e+00,4.357586806316280370e-01,1.661719711317397818e+00,1.425404059032008952e+00,2.178345792779842416e+00,2.925505829592847995e+00,1.752562483565006657e+00,4.034986208906158112e+00,1.414653155077058466e+00,1.950849988570988680e+00,2.618677088394797625e+00,8.273283355360867564e-01,1.899082246161310383e+00,1.144617940458477534e+00,2.592427050821792811e+00,1.486062706339739137e+00,1.033898605356742717e+00,2.566116294174732637e+00,-3.880444860614269409e-01,1.813562753594238464e+00,-2.766991301515813184e-01,2.913603784550382159e+00,4.111215695780565227e+00,2.015277101913835800e+00,2.275225856603500407e+00,1.737871170116660036e+00,8.397768311201527780e-01,2.856584568453246931e+00,3.133429816507246723e+00,2.554090585277532632e+00,1.283442940804921939e+00,1.117082572633564475e+00,3.105855985922947760e+00,3.858719807608227192e+00,1.270285904122562481e+00,-6.795559289248753743e-02,9.842824013279380946e-01,3.081630386351368323e+00,1.610787220630656824e+00,3.490901641888224383e+00,-9.286431243068204999e-01,2.484381353258106273e+00,2.017156215836847455e+00,3.545741111733347495e+00,2.037739018068327468e-01,1.138675542468900126e+00,1.584668423102027157e+00,1.444482379728466048e+00,8.961596153787898533e-01,1.534705816631292352e+00,5.534918066502170220e-01,1.563024323236638313e+00,2.627548034679705324e-02,1.408695602852080953e+00,4.273769226171175717e+00,7.479071339699840948e-01,2.926296834191342455e+00,1.764046869509698023e+00,2.420768785032239023e+00,1.712826826892674736e+00,3.086933511273086683e-01,3.756076547833910872e+00,1.147073523527369687e+00,2.331918668805544659e+00,1.435971571752406639e+00,2.016486646683709250e+00,5.617860815255697471e-01,3.250288977965842818e+00,2.595799008717896150e+00,2.516543384472706091e+00,2.949865096669038955e+00,2.590307183465468288e+00,1.594287982062542630e+00,3.008864857520583858e+00,2.838178853735017881e+00,2.173015465838874505e-01,2.460621889220960590e+00,3.630488233985303737e+00,2.596546765093882048e+00,2.392659500447760657e+00,2.932216193001763394e+00,1.625606078019369516e+00,1.240724906279075057e+00,1.104421226189336025e-01,1.653771712760893298e+00,-4.484330469960928767e-02,1.972260203018928681e+00,5.561669050812310111e-01,3.733203896165386393e+00,1.203843794403832490e+00,1.455949558163741386e+00,2.311682183698389625e+00,3.100930678251502037e+00,2.811365432919936680e+00,9.728213305442048320e-01,3.619644314947672026e+00,4.151626406226346155e+00,2.057872045780690051e+00,2.540697549850804648e+00,1.888124090887733031e+00,6.840451515246686043e-01,2.279576619794861703e+00,3.207009377960936991e+00,7.311878049131035340e-01,1.889635362164568289e+00,2.803742164400907733e+00,1.238244034442924413e+00,7.680374872375730355e-01,1.699484775822294402e+00,3.055783738646319581e+00,2.317650966501082710e+00,2.024770437344936891e+00,1.405784882026752447e+00,9.352620520209848909e-01,-4.222668339577628771e-01,1.969867380821769487e+00,2.085718299641305951e+00,1.594652528933494740e+00,2.173465159254020396e+00,-2.711107860155448357e-01,2.057492955383466438e+00,4.044733512899605365e-01,3.587737208395782140e+00,2.180748555856562909e+00,1.325132036320018525e+00,5.583035425199660562e-01,3.524845610189490053e+00,1.740699619299023393e+00,2.372795718679364896e+00,1.528038879318628807e+00,1.477046197636668889e+00,1.863150565020782468e+00,4.121237273176616256e+00,1.381763411809243003e+00,1.246600917857183521e+00,1.417555416244185462e+00,2.084266068784129899e+00,9.628625992344579387e-01,1.530247081850105273e+00,2.617921714292243074e+00,2.047534669728675372e+00,1.672193199294033095e+00,2.015083016276819183e+00,1.602946451757258917e+00,3.417536348119231349e+00,1.968401116997628764e+00,3.341673089051506729e+00,3.634718718329692955e+00,2.744982928778197717e+00,2.552197030304320435e+00,1.853510358329978569e+00,2.483756836787297750e+00,2.428042148441717618e+00,1.014578780959039417e+00,2.994803238446809068e+00,3.031989718547571933e+00,1.614299039547570702e+00,1.818583988266534224e+00,3.174659695401055970e+00,1.008945041624635497e+00,1.800579350400778322e+00,2.459298046216974676e+00,3.045729092536872962e+00,8.114356839967107327e-01,3.449774332051828374e+00,5.420014213123596214e-01,9.604658732033850566e-01,8.231517390179206028e-01,1.909812809958391089e+00,1.017897451595904057e+00,2.340290200891014472e+00 3 | -------------------------------------------------------------------------------- /data/ymdat.csv: -------------------------------------------------------------------------------- 1 | 2.182038219052062900e+01,4.997595577919093301e+01,1.927657565740222267e+01,2.875319226535445694e+01,1.749978522674554071e+01,4.005532091057374799e+01,5.483543855116311505e+01,2.202414548076778189e+01,1.180644641025265251e+01,1.594923316708714722e+01,1.371926785944701344e+01,2.291175866244375214e+01,9.410700038311114568e+00,3.689650311627569579e+01,1.173668955454167673e+01,1.347919477880805950e+01,2.375494574612701015e+01,3.323890611180402743e+01,1.624683159643514330e+01,1.708664643612440770e+01,1.505621077861277435e+01,9.297788023616206488e+00,2.578737777824161626e+01,2.350814820737820554e+01,2.156375984071571494e+01,2.698402757240040017e+01,1.640708523862237200e+01,4.362179111834603873e+01,1.711792785748716383e+01,1.851577348621624264e+00,3.542251733773925082e+01,3.362361239566890703e+01,4.094619065846721639e+00,6.758486571904080975e+00,1.863335492630890400e+01,1.440148563306321705e+01,2.410773006734984136e+01,1.899401760214360380e+01,1.039238411282638808e+01,4.097851113271271828e+01,3.233209241790897437e+01,3.072592343393058911e+01,3.147995515506708131e+01,5.664648237174674961e+01,2.874529819263754504e+01,8.243869409820716143e+00,2.641338746089727252e+01,8.420233401459777056e+00,3.208163450846512177e+01,1.811651641423544135e+01,2.166214236276573146e+01,3.263809194485312304e+01,2.370285126118993801e+01,2.069154115621012835e+01,1.395606058156408480e+01,2.792714886639074479e+01,2.523120959554750087e+01,2.130182189372545665e+01,8.884433499291587211e+00,1.241171465956329101e+01,2.199325426689552643e+01,1.752478832731793545e+01,5.381139764555535976e+01,3.389166508381494936e+01,2.049708746498247436e+01,1.981708264841735456e+01,2.026115439037201682e+01,1.216904268936335143e+01,3.343250503183892164e+01,1.107316839084746896e+01,2.875349818729499063e+01,1.566585421663068090e+01,2.421197854641841829e+01,3.306033585184455337e+00,1.633185775656713190e+01,2.315565540980893644e+01,2.710176390523562162e+01,1.847037964040584157e+01,8.054504809299011470e+00,2.652849682625526739e+01,8.489782267657629333e+00,3.262539011728481597e+01,3.403529260151431401e+01,8.683363748713405528e+01,2.353316920244013399e+00,2.015864805856001141e+01,1.386261498277408322e+01,3.947113995472016512e+01,3.425724910135014056e+01,1.618829128390364502e+01,2.328445261953703138e+01,1.682814792598574982e+01,2.609419904071546270e+01,4.008924268684722136e+01,8.149078550553586098e+00,2.966355428852331499e+01,1.712188303833779912e+01,4.177990918355217076e+01,3.098792978034493473e+01,3.335309564906496860e+01,2.341843130207487178e+01,1.912184077498021395e+01,2.381949068078670351e+01,1.241168895512104697e+01,2.186260289866094908e+01,1.533289505218550453e+01,2.114466072767647020e+01,1.645667043293711274e+01,2.592144709897212707e+01,4.460324194956123733e+01,1.014137199104183829e+01,1.747540409896163638e+01,1.262111437219627064e+01,1.729191285888235896e+01,1.389553458926058127e+01,5.092958167866881070e+01,5.823634648126631674e+00,4.691068561975162510e+01,2.435246267471941906e+01,1.817143492004750982e+01,2.082684785067222322e+01,2.669720678610700659e+01,1.954904879782473870e+01,2.062358764618625884e+01,4.197192789689321302e+01,3.636649226858063599e+01,5.436732000382837526e+01,1.909014538030401908e+01,5.759815694546732345e+01,3.068591316119649193e+01,3.043935067872708089e+01,3.181193185859714490e+01,1.158461799387198710e+01,2.031179994750292295e+01,2.565965736095541772e+01,3.896851477468401015e+01,1.877509627684147375e+01,6.266805993730997670e+00,1.600951937406585301e+01,2.061383111547197444e+01,3.600099523097947696e+01,2.157538204489343414e+01,3.621791333783030353e+01,2.599590676766593234e+01,2.062422478113046154e+01,3.352714835248782776e+01,3.584140043182153512e+00,3.181172291502383587e+01,9.624383939370266816e+00,1.386114716858813978e+01,3.041467117387632868e+01,2.685544848959738573e+01,1.274019362585517889e+01,2.736044987926767647e+01,3.741053916293402892e+01,3.395991914686376845e+01,2.884582206440506624e+01,2.360821140707702170e+01,2.349879553095469120e+01,2.801618457522650729e+01,2.699940626707740421e+01,4.128457708346966371e+01,2.330127640219743768e+01,1.308836468680874177e+01,2.483870787885062370e+01,2.474075458464359656e+01,1.892408996567152712e+01,6.488334146641673073e+00,1.973571627223910596e+01,2.046760472535605757e+01,6.075532102277242785e+01,3.348478411344633798e+01,3.870673421525027891e+01,1.524176971411133685e+01,4.156282978645695891e+01,1.875379773101173697e+01,2.479259857007430057e+01,3.681358898383765421e+01,3.777477508116712812e+01,2.972857731806846360e+01,2.108609564132464342e+01,7.407700033348016078e+00,1.291152486188560644e+01,2.186420138455603634e+01,5.042550787857149430e+01,3.987237034509044520e+01,8.279187331573455211e+00,4.447825108040420616e+01,1.335330451628503923e+01,4.409196102771181103e+01,3.346428065808478181e+01,1.424493612645799523e+01,8.461522630226587793e+00,1.099703030464012876e+01,1.317206966290840242e+01,7.045579407082920298e+00,3.219871946325374523e+01,2.511811193196017555e+01,1.983728549726030366e+01,5.684220588244687633e+01 2 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | ## Installing for Development 2 | 3 | It is recommended to use a virtual environment: 4 | 5 | ```sh 6 | # create virtualenv in .venv dir 7 | python -m venv .venv 8 | # active venv (run each time after opening a new shell) 9 | source .venv/bin/activate 10 | ``` 11 | 12 | Then install causing including it's dependencies by executing `pip -e .`. All 13 | these command must be executed inside the repository's root directory. 14 | 15 | ## Linting and Hooks 16 | 17 | This repo uses [pre-commit](https://pre-commit.com) to manage linting and 18 | pre-commit hooks. The list of all configured linters is found in 19 | [.pre-commit-config.yaml](../.pre-commit-config.yaml). 20 | 21 | ### Install pre-commit hook 22 | 23 | To prevent you from committing files that violate the linting rules, you should 24 | install the git pre-commit hook after cloning the repository. This is done by 25 | executing 26 | 27 | ``` 28 | pre-commit install 29 | ``` 30 | 31 | ### Run Linter/Fixes Across All Files 32 | 33 | ``` 34 | pre-commit run --all-files 35 | ``` 36 | 37 | ## Running the Examples 38 | 39 | The examples can be run using the `examples` module by giving the example name as parameter. 40 | 41 | ``` 42 | python -m examples example 43 | python -m examples education 44 | ``` 45 | -------------------------------------------------------------------------------- /docs/education.md: -------------------------------------------------------------------------------- 1 | # An Application of Causing: Education and Wages 2 | 3 | This case study is based on research and data from Gary Koop and Justin L. Tobias, "Learning about Heterogeneity in Returns to Schooling", Journal of Applied Econometrics, Vol. 19, No. 7, 2004, pp. 827-849. It is from the empirical education literature. 4 | 5 | See [Koop, Tobias (2004) "Learning about Heterogeneity in Returns to Schooling"](https://www.economics.uci.edu/files/docs/workingpapers/2001-02/Tobias-07.pdf). 6 | 7 | This panel data set consists of 17,919 observations from N=2,178 individuals. It contains the wage earnings history for young workers in the U.S. from 1979 until 1993. The data are taken from the National Longitudinal Survey of Youth (NLSY). 8 | 9 | See [Koop, Tobias (2004) Labor Market Experience Data](http://people.stern.nyu.edu/wgreene/Econometrics/PanelDataSets.htm). 10 | 11 | # The Data 12 | 13 | The NLSY is a rich panel study of 12,686 individuals in total ranging in 14 | age from 14-22 as of the first interview date in 1979. It contains detailed 15 | information on the earnings and wages, educational attainment, family 16 | characteristics, and test scores of the sampled individuals. 17 | 18 | Koop and Tobias (2004) use a version of the NLSY which allows one to obtain an earnings history until 1993. To abstract from selection issues in employment, and to remain consistent with the majority of the literature, they focus on the outcomes of white males in the NLSY. They restrict attention to those individuals who are active in the labor force for a good portion of each year, being at least 16 years of age in the given year, who reported working at least 30 weeks a year and at least 800 hours per year. They also deleted observations when the reported hourly wage is less than $1 or greater than $100 per hour, when education decreases across time for an individual, or when the reported change in years of schooling over time is not consistent with the change in time from consecutive interviews. As such, they are careful to delete individuals whose education is clearly mismeasured. 19 | 20 | The dataset contains the following variables in this order, the variables 0. to 4. being time-varying and variables 5. to 9. being time-invariant: 21 | 22 | 0. PERSONID = Person id (ranging from 1 to 2,178) # not used by us 23 | 1. EDUC = Education (years of schooling) 24 | 2. LOGWAGE = Log of an hourly wage, at the most recent job in 1993 dollars # we do not take log 25 | 3. POTEXPER = Potential experience (= AGE - EDUC - 5) 26 | 4. TIMETRND = Time trend (starting at 1 in 1979 and incrementing by year) # not used by us 27 | 5. ABILITY = Ability (cognitive ability measured by test score) 28 | 6. MOTHERED = Mother's education (highest grade completed, in years) 29 | 7. FATHERED = Father's education (highest grade completed, in years) 30 | 8. BRKNHOME = Dummy variable for residence in a broken home at age 14 31 | 9. SIBLINGS = Number of siblings 32 | 33 | The standardized test score is constructed from the 10 component tests of the Armed Services Vocational Aptitude Battery (ASVAB) administered to the NLSY participants in 1980. Since individuals varied in age, each of the 10 tests are first residualized on age, and the test score is defined as the first principal component of the standardized residuals. 34 | 35 | Koop and Tobias (2004) analyze the reduced form since heterogeneity in the reduced form will be of the identical form as that in the structural form. In contrast, we are interested in structural causal relations. Since heterogeneity across individuals is not in our focus we ignore the person's id. Further, we do not use the time dimension and therefore ignore the time trend. We reconstruct the AGE variable from the definition of POTEXPER = AGE - EDUC - 5 and use it as an exogenous variable. 36 | 37 | To estimate the effects, the Causing method always uses demeaned data. Just for the estimation of the bias terms, the original level data are used. 38 | 39 | # The Model 40 | 41 | The model comprises just three equations (EDUC, POTEXPER, LOGWAGE). There are six exogenous variables (FATHERED, MOTHERED, SIBLINGS, BRKNHOME, ABILITY, AGE). All variables are observed, there are no latent variables. Our final variable of interest are the hourly wages. The parameter signs are based on domain knowledge and their values are set to be roughly consistent with the data. The model effects are used as starting values for estimation. 42 | 43 | 1. Education is a constant plus a positive effect for parents having been schooled for more than 12 years. Negative effects are expected in the case of siblings or a broken home. 44 | 45 | ```python 46 | EDUC = 13 47 | + 0.1 * (FATHERED - 12) 48 | + 0.1 * (MOTHERED - 12) 49 | - 0.1 * SIBLINGS 50 | - 0.5 * BRKNHOME 51 | ``` 52 | 53 | 2. Potential experience simply are the years after schooling as defined by Koop and Tobias (2004). 54 | 55 | `POTEXPER = Max(AGE - EDUC - 5, 0)` 56 | 57 | 3. Logarithmic hourly wages are a base constant plus positive effects from education, potential experience, and ability. 58 | 59 | `WAGE = 7 + 1 * (EDUC - 12) + 0.5 * POTEXPER + 1 * ABILITY` 60 | 61 | We expect education to increase by 0.1 years if the father's education increases by one year. The same should hold for the mother's education. Each sibling is expected to reduce the duration of education by 0.1 years on average. If the young worker was raised in a broken home, we expect the education to be half a year shorter on average. 62 | 63 | Note that the equation constants just model the level forecasts, but they do not affect the effects, being derivatives independent of constants. Also note, that in SymPy some operators are special, e.g. Max() instead of max(). The observed potential experience is never negative, but this could occur 64 | in the estimation/optimization algorithm. 65 | 66 | See the [full model source code](https://github.com/realrate/Causing/blob/develop/causing/examples/models.py#L79-L160). 67 | 68 | # Results 69 | 70 | This is the causal graph for the Individual Mediation Effects (IME). The total effects of a variable on WAGE are shown in the corresponding nodes of the graph. These total effects are split up over their outgoing edges, yielding the mediation effects shown on the edges. However, just education has more than one outgoing edge to be interpreted in this way. 71 | 72 | ![Individual Mediation Effects (IME)](../images_education/IME_32.svg) 73 | 74 | We have a look at a single individual. Just for exposition, we analyze the mediation effects of individual/observation no. 32. This worker is aged 25 and his strongest disadvantage is being so young and having low potential experience, reducing his wage by 52 Cents. However, he showed high ability in his test scores (+39 Cents). And due to his father's long schooling (16 years instead of the average of 12 years), his education is also above average (13.2 years instead of 12.6 years in the median). In total, this worker achieves an hourly wage being 20 Cents above average. 75 | 76 | The total effect of education is decomposed into two different antagonistic direct effects. The positive effect is directly passed to wage but the negative effect is passed to potential experience. This effect is negative because longer education means shorter potential experience. In total, the net effect is positive, increasing wages by 34 Cents per hour. The graph allows us to distinguish between the different antagonistic causes. 77 | 78 | The observed exogenous and predicted endogenous variables for individual no. 32 are summarized in the following table, sorted by their total effect on WAGE: 79 | 80 | Variable | Individual no. 32 | Median | Total Effect on WAGE 81 | --- | --- | --- | --- 82 | ABILITY | 0.44 | 0.21 | +0.39 83 | EDUC | 13.20 | 12.60 | +0.34 84 | FATHERED | 16.00 | 12.00 | +0.21 85 | WAGE | 12.04 | 11.88 | +0.20 86 | SIBLINGS | 2.00 | 3.00 | +0.06 87 | BRKNHOME | 0.00 | 0.00 | 0.04 88 | MOTHERED | 12.00 | 12.00 | 0.03 89 | AGE | 25.00 | 26.00 | -0.52 90 | POTEXPER | 6.80 | 8.50 | -0.86 91 | 92 | This concludes the example usage of the Causing method with real-world data. We analyzed how wages earned by young American workers are determined by their educational attainment, family characteristics, and test scores. 93 | -------------------------------------------------------------------------------- /images_education/IME_32.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | 9 | %3 10 | 11 | 12 | 13 | SIBLINGS 14 | 15 | SIBLINGS 16 | 0.06 17 | 18 | 19 | 20 | EDUC 21 | 22 | EDUC 23 | 0.34 24 | 25 | 26 | 27 | SIBLINGS->EDUC 28 | 29 | 30 | 0.06 31 | 32 | 33 | 34 | POTEXPER 35 | 36 | POTEXPER 37 | -0.86 38 | 39 | 40 | 41 | EDUC->POTEXPER 42 | 43 | 44 | -0.34 45 | 46 | 47 | 48 | WAGE 49 | 50 | WAGE 51 | 0.20 52 | 53 | 54 | 55 | EDUC->WAGE 56 | 57 | 58 | 0.67 59 | 60 | 61 | 62 | MOTHERED 63 | 64 | MOTHERED 65 | 0.03 66 | 67 | 68 | 69 | MOTHERED->EDUC 70 | 71 | 72 | 0.03 73 | 74 | 75 | 76 | FATHERED 77 | 78 | FATHERED 79 | 0.21 80 | 81 | 82 | 83 | FATHERED->EDUC 84 | 85 | 86 | 0.21 87 | 88 | 89 | 90 | BRKNHOME 91 | 92 | BRKNHOME 93 | 0.04 94 | 95 | 96 | 97 | BRKNHOME->EDUC 98 | 99 | 100 | 0.04 101 | 102 | 103 | 104 | POTEXPER->WAGE 105 | 106 | 107 | -0.86 108 | 109 | 110 | 111 | AGE 112 | 113 | AGE 114 | -0.52 115 | 116 | 117 | 118 | AGE->POTEXPER 119 | 120 | 121 | -0.52 122 | 123 | 124 | 125 | ABILITY 126 | 127 | ABILITY 128 | 0.39 129 | 130 | 131 | 132 | ABILITY->WAGE 133 | 134 | 135 | 0.39 136 | 137 | 138 | 139 | -------------------------------------------------------------------------------- /images_readme/IME_1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | 9 | %3 10 | 11 | 12 | 13 | X1 14 | 15 | X1 16 | 29.81 17 | 18 | 19 | 20 | Y1 21 | 22 | Y1 23 | 29.81 24 | 25 | 26 | 27 | X1->Y1 28 | 29 | 30 | 29.81 31 | 32 | 33 | 34 | Y2 35 | 36 | Y2 37 | 25.27 38 | 39 | 40 | 41 | Y1->Y2 42 | 43 | 44 | 28.00 45 | 46 | 47 | 48 | Y3 49 | 50 | Y3 51 | 27.07 52 | 53 | 54 | 55 | Y1->Y3 56 | 57 | 58 | 1.80 59 | 60 | 61 | 62 | X2 63 | 64 | X2 65 | -0.97 66 | 67 | 68 | 69 | X2->Y2 70 | 71 | 72 | -0.97 73 | 74 | 75 | 76 | Y2->Y3 77 | 78 | 79 | 25.27 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /images_readme/RealRate_AI_Software_Winner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realrate/Causing/dd40a17061384763eaca7cfa6298dc25650aa4c4/images_readme/RealRate_AI_Software_Winner.png -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | check_untyped_defs = True 4 | exclude = .venv 5 | -------------------------------------------------------------------------------- /output/example2/graphs.json: -------------------------------------------------------------------------------- 1 | { 2 | "exj_indivs": [ 3 | [ 4 | 0.0091372691, 5 | -0.95471368, 6 | 1.9068772, 7 | -1.335133, 8 | -0.20447049, 9 | -1.1380462, 10 | 0.4977168, 11 | -0.75629707, 12 | -0.22812547, 13 | 0.4185105, 14 | 1.3227622, 15 | -1.4471449, 16 | 1.975879, 17 | 0.61581211, 18 | -0.0088611316, 19 | -0.28127045, 20 | -0.85181196, 21 | -0.34561648, 22 | -0.59607117, 23 | 1.3432436, 24 | -0.65534971, 25 | -0.46667535, 26 | 0.064231446, 27 | 0.20143018, 28 | -1.0613508, 29 | -1.4268921, 30 | 1.0341065, 31 | 0.53452104, 32 | -0.5739187, 33 | -2.1669216, 34 | -0.66539123, 35 | -0.90666637, 36 | 0.21515841, 37 | -0.40734387, 38 | 0.90209516, 39 | -0.078964396, 40 | -0.35875829, 41 | -0.91341182, 42 | -0.38088407, 43 | 0.9117405, 44 | -0.80314732, 45 | 2.3318856, 46 | -1.2783859, 47 | -0.11724708, 48 | 0.33919098, 49 | -0.23053498, 50 | 0.20772583, 51 | 0.22850227, 52 | -0.075180914, 53 | 1.4687842, 54 | 0.46702428, 55 | 0.67949472, 56 | -0.47343487, 57 | 0.85468249, 58 | 1.453103, 59 | -0.60818264, 60 | -0.36390549, 61 | 1.1987645, 62 | -2.4735071, 63 | 0.28877848, 64 | 1.1608099, 65 | -1.6867236, 66 | 0.88049124, 67 | -0.37826251, 68 | -2.2224429, 69 | 0.013932611, 70 | -1.3809374, 71 | 0.61062896, 72 | -0.43132822, 73 | 1.1793578, 74 | -0.74746756, 75 | 0.046759207, 76 | 0.014695335, 77 | 2.2206723, 78 | -0.21868763, 79 | -0.40605481, 80 | -1.1727665, 81 | 0.33594933, 82 | 1.2481199, 83 | 0.52338614, 84 | 0.88743562, 85 | -1.2312772, 86 | 0.6957088, 87 | -0.098286827, 88 | 0.76703928, 89 | -0.88286901, 90 | 2.0215889, 91 | 0.33871112, 92 | 0.62182068, 93 | -0.50474078, 94 | -0.88103392, 95 | -0.65945992, 96 | 0.32612804, 97 | 0.65794231, 98 | -0.95314716, 99 | -2.0960045, 100 | 0.76878052, 101 | -0.2004855, 102 | -0.050713886, 103 | -2.1665715, 104 | 0.0098064501, 105 | 1.0771641, 106 | 0.57658848, 107 | 2.1837778, 108 | 0.18431077, 109 | 0.12469274, 110 | -0.33170353, 111 | 0.4953328, 112 | -0.69126569, 113 | 0.02029279, 114 | 0.5675635, 115 | -1.1532783, 116 | 0.22968211, 117 | 0.97408719, 118 | -0.035001791, 119 | 1.3106696, 120 | -1.3504131, 121 | 0.98789114, 122 | -0.77153923, 123 | -0.42712031, 124 | 0.12331325, 125 | -0.77957215, 126 | -0.38369826, 127 | 1.3537045, 128 | 1.8800264, 129 | 1.6600121, 130 | 0.91649195, 131 | -0.78511658, 132 | 0.030673902, 133 | -1.9701367, 134 | -0.14626232, 135 | -0.86008387, 136 | -0.47211292, 137 | 1.3470269, 138 | -0.71171134, 139 | -0.10523827, 140 | 0.76597476, 141 | 1.5120141, 142 | -0.65990866, 143 | -2.7076077, 144 | 0.42704947, 145 | 0.55392143, 146 | -0.37622122, 147 | 0.24102871, 148 | 0.066416541, 149 | 1.7107811, 150 | -1.6904644, 151 | -1.3414781, 152 | -0.25734737, 153 | -0.67983189, 154 | 0.090750141, 155 | -0.29569742, 156 | 0.39199992, 157 | -0.50026656, 158 | -0.13911713, 159 | -0.95148232, 160 | -1.2042264, 161 | -0.082354323, 162 | 0.37978179, 163 | -1.407136, 164 | -1.0279157, 165 | -0.089441134, 166 | 0.95301515, 167 | -2.0622347, 168 | 0.85296052, 169 | -0.63088447, 170 | 3.3811062, 171 | 1.7772135, 172 | -1.9296584, 173 | -0.73729918, 174 | -0.11016667, 175 | 1.1146083, 176 | -0.76198339, 177 | 0.06144344, 178 | 1.1887885, 179 | 0.33350779, 180 | 0.8867252, 181 | -0.32737403, 182 | -0.39448413, 183 | -1.498936, 184 | 0.0080048302, 185 | 1.937377, 186 | -0.30470686, 187 | -0.66159352, 188 | 0.39775743, 189 | 0.40429407, 190 | 1.333364, 191 | -0.701108, 192 | -1.3282452, 193 | 0.43467641, 194 | 0.59820337, 195 | -1.4432558, 196 | -0.30688245, 197 | 1.4854359, 198 | 1.1613016, 199 | 0.517758, 200 | 0.49365496, 201 | 0.57315087, 202 | 0.74380512, 203 | 0.96434631 204 | ] 205 | ], 206 | "eyj_indivs": [ 207 | [ 208 | 0.0091372691, 209 | -0.95471368, 210 | 1.9068772, 211 | -1.335133, 212 | -0.20447049, 213 | -1.1380462, 214 | 0.4977168, 215 | -0.75629707, 216 | -0.22812547, 217 | 0.4185105, 218 | 1.3227622, 219 | -1.4471449, 220 | 1.975879, 221 | 0.61581211, 222 | -0.0088611316, 223 | -0.28127045, 224 | -0.85181196, 225 | -0.34561648, 226 | -0.59607117, 227 | 1.3432436, 228 | -0.65534971, 229 | -0.46667535, 230 | 0.064231446, 231 | 0.20143018, 232 | -1.0613508, 233 | -1.4268921, 234 | 1.0341065, 235 | 0.53452104, 236 | -0.5739187, 237 | -2.1669216, 238 | -0.66539123, 239 | -0.90666637, 240 | 0.21515841, 241 | -0.40734387, 242 | 0.90209516, 243 | -0.078964396, 244 | -0.35875829, 245 | -0.91341182, 246 | -0.38088407, 247 | 0.9117405, 248 | -0.80314732, 249 | 2.3318856, 250 | -1.2783859, 251 | -0.11724708, 252 | 0.33919098, 253 | -0.23053498, 254 | 0.20772583, 255 | 0.22850227, 256 | -0.075180914, 257 | 1.4687842, 258 | 0.46702428, 259 | 0.67949472, 260 | -0.47343487, 261 | 0.85468249, 262 | 1.453103, 263 | -0.60818264, 264 | -0.36390549, 265 | 1.1987645, 266 | -2.4735071, 267 | 0.28877848, 268 | 1.1608099, 269 | -1.6867236, 270 | 0.88049124, 271 | -0.37826251, 272 | -2.2224429, 273 | 0.013932611, 274 | -1.3809374, 275 | 0.61062896, 276 | -0.43132822, 277 | 1.1793578, 278 | -0.74746756, 279 | 0.046759207, 280 | 0.014695335, 281 | 2.2206723, 282 | -0.21868763, 283 | -0.40605481, 284 | -1.1727665, 285 | 0.33594933, 286 | 1.2481199, 287 | 0.52338614, 288 | 0.88743562, 289 | -1.2312772, 290 | 0.6957088, 291 | -0.098286827, 292 | 0.76703928, 293 | -0.88286901, 294 | 2.0215889, 295 | 0.33871112, 296 | 0.62182068, 297 | -0.50474078, 298 | -0.88103392, 299 | -0.65945992, 300 | 0.32612804, 301 | 0.65794231, 302 | -0.95314716, 303 | -2.0960045, 304 | 0.76878052, 305 | -0.2004855, 306 | -0.050713886, 307 | -2.1665715, 308 | 0.0098064501, 309 | 1.0771641, 310 | 0.57658848, 311 | 2.1837778, 312 | 0.18431077, 313 | 0.12469274, 314 | -0.33170353, 315 | 0.4953328, 316 | -0.69126569, 317 | 0.02029279, 318 | 0.5675635, 319 | -1.1532783, 320 | 0.22968211, 321 | 0.97408719, 322 | -0.035001791, 323 | 1.3106696, 324 | -1.3504131, 325 | 0.98789114, 326 | -0.77153923, 327 | -0.42712031, 328 | 0.12331325, 329 | -0.77957215, 330 | -0.38369826, 331 | 1.3537045, 332 | 1.8800264, 333 | 1.6600121, 334 | 0.91649195, 335 | -0.78511658, 336 | 0.030673902, 337 | -1.9701367, 338 | -0.14626232, 339 | -0.86008387, 340 | -0.47211292, 341 | 1.3470269, 342 | -0.71171134, 343 | -0.10523827, 344 | 0.76597476, 345 | 1.5120141, 346 | -0.65990866, 347 | -2.7076077, 348 | 0.42704947, 349 | 0.55392143, 350 | -0.37622122, 351 | 0.24102871, 352 | 0.066416541, 353 | 1.7107811, 354 | -1.6904644, 355 | -1.3414781, 356 | -0.25734737, 357 | -0.67983189, 358 | 0.090750141, 359 | -0.29569742, 360 | 0.39199992, 361 | -0.50026656, 362 | -0.13911713, 363 | -0.95148232, 364 | -1.2042264, 365 | -0.082354323, 366 | 0.37978179, 367 | -1.407136, 368 | -1.0279157, 369 | -0.089441134, 370 | 0.95301515, 371 | -2.0622347, 372 | 0.85296052, 373 | -0.63088447, 374 | 3.3811062, 375 | 1.7772135, 376 | -1.9296584, 377 | -0.73729918, 378 | -0.11016667, 379 | 1.1146083, 380 | -0.76198339, 381 | 0.06144344, 382 | 1.1887885, 383 | 0.33350779, 384 | 0.8867252, 385 | -0.32737403, 386 | -0.39448413, 387 | -1.498936, 388 | 0.0080048302, 389 | 1.937377, 390 | -0.30470686, 391 | -0.66159352, 392 | 0.39775743, 393 | 0.40429407, 394 | 1.333364, 395 | -0.701108, 396 | -1.3282452, 397 | 0.43467641, 398 | 0.59820337, 399 | -1.4432558, 400 | -0.30688245, 401 | 1.4854359, 402 | 1.1613016, 403 | 0.517758, 404 | 0.49365496, 405 | 0.57315087, 406 | 0.74380512, 407 | 0.96434631 408 | ] 409 | ], 410 | "eyx_indivs": [ 411 | [ 412 | [ 413 | 0.0091372691 414 | ] 415 | ], 416 | [ 417 | [ 418 | -0.95471368 419 | ] 420 | ], 421 | [ 422 | [ 423 | 1.9068772 424 | ] 425 | ], 426 | [ 427 | [ 428 | -1.335133 429 | ] 430 | ], 431 | [ 432 | [ 433 | -0.20447049 434 | ] 435 | ], 436 | [ 437 | [ 438 | -1.1380462 439 | ] 440 | ], 441 | [ 442 | [ 443 | 0.4977168 444 | ] 445 | ], 446 | [ 447 | [ 448 | -0.75629707 449 | ] 450 | ], 451 | [ 452 | [ 453 | -0.22812547 454 | ] 455 | ], 456 | [ 457 | [ 458 | 0.4185105 459 | ] 460 | ], 461 | [ 462 | [ 463 | 1.3227622 464 | ] 465 | ], 466 | [ 467 | [ 468 | -1.4471449 469 | ] 470 | ], 471 | [ 472 | [ 473 | 1.975879 474 | ] 475 | ], 476 | [ 477 | [ 478 | 0.61581211 479 | ] 480 | ], 481 | [ 482 | [ 483 | -0.0088611316 484 | ] 485 | ], 486 | [ 487 | [ 488 | -0.28127045 489 | ] 490 | ], 491 | [ 492 | [ 493 | -0.85181196 494 | ] 495 | ], 496 | [ 497 | [ 498 | -0.34561648 499 | ] 500 | ], 501 | [ 502 | [ 503 | -0.59607117 504 | ] 505 | ], 506 | [ 507 | [ 508 | 1.3432436 509 | ] 510 | ], 511 | [ 512 | [ 513 | -0.65534971 514 | ] 515 | ], 516 | [ 517 | [ 518 | -0.46667535 519 | ] 520 | ], 521 | [ 522 | [ 523 | 0.064231446 524 | ] 525 | ], 526 | [ 527 | [ 528 | 0.20143018 529 | ] 530 | ], 531 | [ 532 | [ 533 | -1.0613508 534 | ] 535 | ], 536 | [ 537 | [ 538 | -1.4268921 539 | ] 540 | ], 541 | [ 542 | [ 543 | 1.0341065 544 | ] 545 | ], 546 | [ 547 | [ 548 | 0.53452104 549 | ] 550 | ], 551 | [ 552 | [ 553 | -0.5739187 554 | ] 555 | ], 556 | [ 557 | [ 558 | -2.1669216 559 | ] 560 | ], 561 | [ 562 | [ 563 | -0.66539123 564 | ] 565 | ], 566 | [ 567 | [ 568 | -0.90666637 569 | ] 570 | ], 571 | [ 572 | [ 573 | 0.21515841 574 | ] 575 | ], 576 | [ 577 | [ 578 | -0.40734387 579 | ] 580 | ], 581 | [ 582 | [ 583 | 0.90209516 584 | ] 585 | ], 586 | [ 587 | [ 588 | -0.078964396 589 | ] 590 | ], 591 | [ 592 | [ 593 | -0.35875829 594 | ] 595 | ], 596 | [ 597 | [ 598 | -0.91341182 599 | ] 600 | ], 601 | [ 602 | [ 603 | -0.38088407 604 | ] 605 | ], 606 | [ 607 | [ 608 | 0.9117405 609 | ] 610 | ], 611 | [ 612 | [ 613 | -0.80314732 614 | ] 615 | ], 616 | [ 617 | [ 618 | 2.3318856 619 | ] 620 | ], 621 | [ 622 | [ 623 | -1.2783859 624 | ] 625 | ], 626 | [ 627 | [ 628 | -0.11724708 629 | ] 630 | ], 631 | [ 632 | [ 633 | 0.33919098 634 | ] 635 | ], 636 | [ 637 | [ 638 | -0.23053498 639 | ] 640 | ], 641 | [ 642 | [ 643 | 0.20772583 644 | ] 645 | ], 646 | [ 647 | [ 648 | 0.22850227 649 | ] 650 | ], 651 | [ 652 | [ 653 | -0.075180914 654 | ] 655 | ], 656 | [ 657 | [ 658 | 1.4687842 659 | ] 660 | ], 661 | [ 662 | [ 663 | 0.46702428 664 | ] 665 | ], 666 | [ 667 | [ 668 | 0.67949472 669 | ] 670 | ], 671 | [ 672 | [ 673 | -0.47343487 674 | ] 675 | ], 676 | [ 677 | [ 678 | 0.85468249 679 | ] 680 | ], 681 | [ 682 | [ 683 | 1.453103 684 | ] 685 | ], 686 | [ 687 | [ 688 | -0.60818264 689 | ] 690 | ], 691 | [ 692 | [ 693 | -0.36390549 694 | ] 695 | ], 696 | [ 697 | [ 698 | 1.1987645 699 | ] 700 | ], 701 | [ 702 | [ 703 | -2.4735071 704 | ] 705 | ], 706 | [ 707 | [ 708 | 0.28877848 709 | ] 710 | ], 711 | [ 712 | [ 713 | 1.1608099 714 | ] 715 | ], 716 | [ 717 | [ 718 | -1.6867236 719 | ] 720 | ], 721 | [ 722 | [ 723 | 0.88049124 724 | ] 725 | ], 726 | [ 727 | [ 728 | -0.37826251 729 | ] 730 | ], 731 | [ 732 | [ 733 | -2.2224429 734 | ] 735 | ], 736 | [ 737 | [ 738 | 0.013932611 739 | ] 740 | ], 741 | [ 742 | [ 743 | -1.3809374 744 | ] 745 | ], 746 | [ 747 | [ 748 | 0.61062896 749 | ] 750 | ], 751 | [ 752 | [ 753 | -0.43132822 754 | ] 755 | ], 756 | [ 757 | [ 758 | 1.1793578 759 | ] 760 | ], 761 | [ 762 | [ 763 | -0.74746756 764 | ] 765 | ], 766 | [ 767 | [ 768 | 0.046759207 769 | ] 770 | ], 771 | [ 772 | [ 773 | 0.014695335 774 | ] 775 | ], 776 | [ 777 | [ 778 | 2.2206723 779 | ] 780 | ], 781 | [ 782 | [ 783 | -0.21868763 784 | ] 785 | ], 786 | [ 787 | [ 788 | -0.40605481 789 | ] 790 | ], 791 | [ 792 | [ 793 | -1.1727665 794 | ] 795 | ], 796 | [ 797 | [ 798 | 0.33594933 799 | ] 800 | ], 801 | [ 802 | [ 803 | 1.2481199 804 | ] 805 | ], 806 | [ 807 | [ 808 | 0.52338614 809 | ] 810 | ], 811 | [ 812 | [ 813 | 0.88743562 814 | ] 815 | ], 816 | [ 817 | [ 818 | -1.2312772 819 | ] 820 | ], 821 | [ 822 | [ 823 | 0.6957088 824 | ] 825 | ], 826 | [ 827 | [ 828 | -0.098286827 829 | ] 830 | ], 831 | [ 832 | [ 833 | 0.76703928 834 | ] 835 | ], 836 | [ 837 | [ 838 | -0.88286901 839 | ] 840 | ], 841 | [ 842 | [ 843 | 2.0215889 844 | ] 845 | ], 846 | [ 847 | [ 848 | 0.33871112 849 | ] 850 | ], 851 | [ 852 | [ 853 | 0.62182068 854 | ] 855 | ], 856 | [ 857 | [ 858 | -0.50474078 859 | ] 860 | ], 861 | [ 862 | [ 863 | -0.88103392 864 | ] 865 | ], 866 | [ 867 | [ 868 | -0.65945992 869 | ] 870 | ], 871 | [ 872 | [ 873 | 0.32612804 874 | ] 875 | ], 876 | [ 877 | [ 878 | 0.65794231 879 | ] 880 | ], 881 | [ 882 | [ 883 | -0.95314716 884 | ] 885 | ], 886 | [ 887 | [ 888 | -2.0960045 889 | ] 890 | ], 891 | [ 892 | [ 893 | 0.76878052 894 | ] 895 | ], 896 | [ 897 | [ 898 | -0.2004855 899 | ] 900 | ], 901 | [ 902 | [ 903 | -0.050713886 904 | ] 905 | ], 906 | [ 907 | [ 908 | -2.1665715 909 | ] 910 | ], 911 | [ 912 | [ 913 | 0.0098064501 914 | ] 915 | ], 916 | [ 917 | [ 918 | 1.0771641 919 | ] 920 | ], 921 | [ 922 | [ 923 | 0.57658848 924 | ] 925 | ], 926 | [ 927 | [ 928 | 2.1837778 929 | ] 930 | ], 931 | [ 932 | [ 933 | 0.18431077 934 | ] 935 | ], 936 | [ 937 | [ 938 | 0.12469274 939 | ] 940 | ], 941 | [ 942 | [ 943 | -0.33170353 944 | ] 945 | ], 946 | [ 947 | [ 948 | 0.4953328 949 | ] 950 | ], 951 | [ 952 | [ 953 | -0.69126569 954 | ] 955 | ], 956 | [ 957 | [ 958 | 0.02029279 959 | ] 960 | ], 961 | [ 962 | [ 963 | 0.5675635 964 | ] 965 | ], 966 | [ 967 | [ 968 | -1.1532783 969 | ] 970 | ], 971 | [ 972 | [ 973 | 0.22968211 974 | ] 975 | ], 976 | [ 977 | [ 978 | 0.97408719 979 | ] 980 | ], 981 | [ 982 | [ 983 | -0.035001791 984 | ] 985 | ], 986 | [ 987 | [ 988 | 1.3106696 989 | ] 990 | ], 991 | [ 992 | [ 993 | -1.3504131 994 | ] 995 | ], 996 | [ 997 | [ 998 | 0.98789114 999 | ] 1000 | ], 1001 | [ 1002 | [ 1003 | -0.77153923 1004 | ] 1005 | ], 1006 | [ 1007 | [ 1008 | -0.42712031 1009 | ] 1010 | ], 1011 | [ 1012 | [ 1013 | 0.12331325 1014 | ] 1015 | ], 1016 | [ 1017 | [ 1018 | -0.77957215 1019 | ] 1020 | ], 1021 | [ 1022 | [ 1023 | -0.38369826 1024 | ] 1025 | ], 1026 | [ 1027 | [ 1028 | 1.3537045 1029 | ] 1030 | ], 1031 | [ 1032 | [ 1033 | 1.8800264 1034 | ] 1035 | ], 1036 | [ 1037 | [ 1038 | 1.6600121 1039 | ] 1040 | ], 1041 | [ 1042 | [ 1043 | 0.91649195 1044 | ] 1045 | ], 1046 | [ 1047 | [ 1048 | -0.78511658 1049 | ] 1050 | ], 1051 | [ 1052 | [ 1053 | 0.030673902 1054 | ] 1055 | ], 1056 | [ 1057 | [ 1058 | -1.9701367 1059 | ] 1060 | ], 1061 | [ 1062 | [ 1063 | -0.14626232 1064 | ] 1065 | ], 1066 | [ 1067 | [ 1068 | -0.86008387 1069 | ] 1070 | ], 1071 | [ 1072 | [ 1073 | -0.47211292 1074 | ] 1075 | ], 1076 | [ 1077 | [ 1078 | 1.3470269 1079 | ] 1080 | ], 1081 | [ 1082 | [ 1083 | -0.71171134 1084 | ] 1085 | ], 1086 | [ 1087 | [ 1088 | -0.10523827 1089 | ] 1090 | ], 1091 | [ 1092 | [ 1093 | 0.76597476 1094 | ] 1095 | ], 1096 | [ 1097 | [ 1098 | 1.5120141 1099 | ] 1100 | ], 1101 | [ 1102 | [ 1103 | -0.65990866 1104 | ] 1105 | ], 1106 | [ 1107 | [ 1108 | -2.7076077 1109 | ] 1110 | ], 1111 | [ 1112 | [ 1113 | 0.42704947 1114 | ] 1115 | ], 1116 | [ 1117 | [ 1118 | 0.55392143 1119 | ] 1120 | ], 1121 | [ 1122 | [ 1123 | -0.37622122 1124 | ] 1125 | ], 1126 | [ 1127 | [ 1128 | 0.24102871 1129 | ] 1130 | ], 1131 | [ 1132 | [ 1133 | 0.066416541 1134 | ] 1135 | ], 1136 | [ 1137 | [ 1138 | 1.7107811 1139 | ] 1140 | ], 1141 | [ 1142 | [ 1143 | -1.6904644 1144 | ] 1145 | ], 1146 | [ 1147 | [ 1148 | -1.3414781 1149 | ] 1150 | ], 1151 | [ 1152 | [ 1153 | -0.25734737 1154 | ] 1155 | ], 1156 | [ 1157 | [ 1158 | -0.67983189 1159 | ] 1160 | ], 1161 | [ 1162 | [ 1163 | 0.090750141 1164 | ] 1165 | ], 1166 | [ 1167 | [ 1168 | -0.29569742 1169 | ] 1170 | ], 1171 | [ 1172 | [ 1173 | 0.39199992 1174 | ] 1175 | ], 1176 | [ 1177 | [ 1178 | -0.50026656 1179 | ] 1180 | ], 1181 | [ 1182 | [ 1183 | -0.13911713 1184 | ] 1185 | ], 1186 | [ 1187 | [ 1188 | -0.95148232 1189 | ] 1190 | ], 1191 | [ 1192 | [ 1193 | -1.2042264 1194 | ] 1195 | ], 1196 | [ 1197 | [ 1198 | -0.082354323 1199 | ] 1200 | ], 1201 | [ 1202 | [ 1203 | 0.37978179 1204 | ] 1205 | ], 1206 | [ 1207 | [ 1208 | -1.407136 1209 | ] 1210 | ], 1211 | [ 1212 | [ 1213 | -1.0279157 1214 | ] 1215 | ], 1216 | [ 1217 | [ 1218 | -0.089441134 1219 | ] 1220 | ], 1221 | [ 1222 | [ 1223 | 0.95301515 1224 | ] 1225 | ], 1226 | [ 1227 | [ 1228 | -2.0622347 1229 | ] 1230 | ], 1231 | [ 1232 | [ 1233 | 0.85296052 1234 | ] 1235 | ], 1236 | [ 1237 | [ 1238 | -0.63088447 1239 | ] 1240 | ], 1241 | [ 1242 | [ 1243 | 3.3811062 1244 | ] 1245 | ], 1246 | [ 1247 | [ 1248 | 1.7772135 1249 | ] 1250 | ], 1251 | [ 1252 | [ 1253 | -1.9296584 1254 | ] 1255 | ], 1256 | [ 1257 | [ 1258 | -0.73729918 1259 | ] 1260 | ], 1261 | [ 1262 | [ 1263 | -0.11016667 1264 | ] 1265 | ], 1266 | [ 1267 | [ 1268 | 1.1146083 1269 | ] 1270 | ], 1271 | [ 1272 | [ 1273 | -0.76198339 1274 | ] 1275 | ], 1276 | [ 1277 | [ 1278 | 0.06144344 1279 | ] 1280 | ], 1281 | [ 1282 | [ 1283 | 1.1887885 1284 | ] 1285 | ], 1286 | [ 1287 | [ 1288 | 0.33350779 1289 | ] 1290 | ], 1291 | [ 1292 | [ 1293 | 0.8867252 1294 | ] 1295 | ], 1296 | [ 1297 | [ 1298 | -0.32737403 1299 | ] 1300 | ], 1301 | [ 1302 | [ 1303 | -0.39448413 1304 | ] 1305 | ], 1306 | [ 1307 | [ 1308 | -1.498936 1309 | ] 1310 | ], 1311 | [ 1312 | [ 1313 | 0.0080048302 1314 | ] 1315 | ], 1316 | [ 1317 | [ 1318 | 1.937377 1319 | ] 1320 | ], 1321 | [ 1322 | [ 1323 | -0.30470686 1324 | ] 1325 | ], 1326 | [ 1327 | [ 1328 | -0.66159352 1329 | ] 1330 | ], 1331 | [ 1332 | [ 1333 | 0.39775743 1334 | ] 1335 | ], 1336 | [ 1337 | [ 1338 | 0.40429407 1339 | ] 1340 | ], 1341 | [ 1342 | [ 1343 | 1.333364 1344 | ] 1345 | ], 1346 | [ 1347 | [ 1348 | -0.701108 1349 | ] 1350 | ], 1351 | [ 1352 | [ 1353 | -1.3282452 1354 | ] 1355 | ], 1356 | [ 1357 | [ 1358 | 0.43467641 1359 | ] 1360 | ], 1361 | [ 1362 | [ 1363 | 0.59820337 1364 | ] 1365 | ], 1366 | [ 1367 | [ 1368 | -1.4432558 1369 | ] 1370 | ], 1371 | [ 1372 | [ 1373 | -0.30688245 1374 | ] 1375 | ], 1376 | [ 1377 | [ 1378 | 1.4854359 1379 | ] 1380 | ], 1381 | [ 1382 | [ 1383 | 1.1613016 1384 | ] 1385 | ], 1386 | [ 1387 | [ 1388 | 0.517758 1389 | ] 1390 | ], 1391 | [ 1392 | [ 1393 | 0.49365496 1394 | ] 1395 | ], 1396 | [ 1397 | [ 1398 | 0.57315087 1399 | ] 1400 | ], 1401 | [ 1402 | [ 1403 | 0.74380512 1404 | ] 1405 | ], 1406 | [ 1407 | [ 1408 | 0.96434631 1409 | ] 1410 | ] 1411 | ], 1412 | "eyy_indivs": [ 1413 | [ 1414 | [ 1415 | NaN 1416 | ] 1417 | ], 1418 | [ 1419 | [ 1420 | NaN 1421 | ] 1422 | ], 1423 | [ 1424 | [ 1425 | NaN 1426 | ] 1427 | ], 1428 | [ 1429 | [ 1430 | NaN 1431 | ] 1432 | ], 1433 | [ 1434 | [ 1435 | NaN 1436 | ] 1437 | ], 1438 | [ 1439 | [ 1440 | NaN 1441 | ] 1442 | ], 1443 | [ 1444 | [ 1445 | NaN 1446 | ] 1447 | ], 1448 | [ 1449 | [ 1450 | NaN 1451 | ] 1452 | ], 1453 | [ 1454 | [ 1455 | NaN 1456 | ] 1457 | ], 1458 | [ 1459 | [ 1460 | NaN 1461 | ] 1462 | ], 1463 | [ 1464 | [ 1465 | NaN 1466 | ] 1467 | ], 1468 | [ 1469 | [ 1470 | NaN 1471 | ] 1472 | ], 1473 | [ 1474 | [ 1475 | NaN 1476 | ] 1477 | ], 1478 | [ 1479 | [ 1480 | NaN 1481 | ] 1482 | ], 1483 | [ 1484 | [ 1485 | NaN 1486 | ] 1487 | ], 1488 | [ 1489 | [ 1490 | NaN 1491 | ] 1492 | ], 1493 | [ 1494 | [ 1495 | NaN 1496 | ] 1497 | ], 1498 | [ 1499 | [ 1500 | NaN 1501 | ] 1502 | ], 1503 | [ 1504 | [ 1505 | NaN 1506 | ] 1507 | ], 1508 | [ 1509 | [ 1510 | NaN 1511 | ] 1512 | ], 1513 | [ 1514 | [ 1515 | NaN 1516 | ] 1517 | ], 1518 | [ 1519 | [ 1520 | NaN 1521 | ] 1522 | ], 1523 | [ 1524 | [ 1525 | NaN 1526 | ] 1527 | ], 1528 | [ 1529 | [ 1530 | NaN 1531 | ] 1532 | ], 1533 | [ 1534 | [ 1535 | NaN 1536 | ] 1537 | ], 1538 | [ 1539 | [ 1540 | NaN 1541 | ] 1542 | ], 1543 | [ 1544 | [ 1545 | NaN 1546 | ] 1547 | ], 1548 | [ 1549 | [ 1550 | NaN 1551 | ] 1552 | ], 1553 | [ 1554 | [ 1555 | NaN 1556 | ] 1557 | ], 1558 | [ 1559 | [ 1560 | NaN 1561 | ] 1562 | ], 1563 | [ 1564 | [ 1565 | NaN 1566 | ] 1567 | ], 1568 | [ 1569 | [ 1570 | NaN 1571 | ] 1572 | ], 1573 | [ 1574 | [ 1575 | NaN 1576 | ] 1577 | ], 1578 | [ 1579 | [ 1580 | NaN 1581 | ] 1582 | ], 1583 | [ 1584 | [ 1585 | NaN 1586 | ] 1587 | ], 1588 | [ 1589 | [ 1590 | NaN 1591 | ] 1592 | ], 1593 | [ 1594 | [ 1595 | NaN 1596 | ] 1597 | ], 1598 | [ 1599 | [ 1600 | NaN 1601 | ] 1602 | ], 1603 | [ 1604 | [ 1605 | NaN 1606 | ] 1607 | ], 1608 | [ 1609 | [ 1610 | NaN 1611 | ] 1612 | ], 1613 | [ 1614 | [ 1615 | NaN 1616 | ] 1617 | ], 1618 | [ 1619 | [ 1620 | NaN 1621 | ] 1622 | ], 1623 | [ 1624 | [ 1625 | NaN 1626 | ] 1627 | ], 1628 | [ 1629 | [ 1630 | NaN 1631 | ] 1632 | ], 1633 | [ 1634 | [ 1635 | NaN 1636 | ] 1637 | ], 1638 | [ 1639 | [ 1640 | NaN 1641 | ] 1642 | ], 1643 | [ 1644 | [ 1645 | NaN 1646 | ] 1647 | ], 1648 | [ 1649 | [ 1650 | NaN 1651 | ] 1652 | ], 1653 | [ 1654 | [ 1655 | NaN 1656 | ] 1657 | ], 1658 | [ 1659 | [ 1660 | NaN 1661 | ] 1662 | ], 1663 | [ 1664 | [ 1665 | NaN 1666 | ] 1667 | ], 1668 | [ 1669 | [ 1670 | NaN 1671 | ] 1672 | ], 1673 | [ 1674 | [ 1675 | NaN 1676 | ] 1677 | ], 1678 | [ 1679 | [ 1680 | NaN 1681 | ] 1682 | ], 1683 | [ 1684 | [ 1685 | NaN 1686 | ] 1687 | ], 1688 | [ 1689 | [ 1690 | NaN 1691 | ] 1692 | ], 1693 | [ 1694 | [ 1695 | NaN 1696 | ] 1697 | ], 1698 | [ 1699 | [ 1700 | NaN 1701 | ] 1702 | ], 1703 | [ 1704 | [ 1705 | NaN 1706 | ] 1707 | ], 1708 | [ 1709 | [ 1710 | NaN 1711 | ] 1712 | ], 1713 | [ 1714 | [ 1715 | NaN 1716 | ] 1717 | ], 1718 | [ 1719 | [ 1720 | NaN 1721 | ] 1722 | ], 1723 | [ 1724 | [ 1725 | NaN 1726 | ] 1727 | ], 1728 | [ 1729 | [ 1730 | NaN 1731 | ] 1732 | ], 1733 | [ 1734 | [ 1735 | NaN 1736 | ] 1737 | ], 1738 | [ 1739 | [ 1740 | NaN 1741 | ] 1742 | ], 1743 | [ 1744 | [ 1745 | NaN 1746 | ] 1747 | ], 1748 | [ 1749 | [ 1750 | NaN 1751 | ] 1752 | ], 1753 | [ 1754 | [ 1755 | NaN 1756 | ] 1757 | ], 1758 | [ 1759 | [ 1760 | NaN 1761 | ] 1762 | ], 1763 | [ 1764 | [ 1765 | NaN 1766 | ] 1767 | ], 1768 | [ 1769 | [ 1770 | NaN 1771 | ] 1772 | ], 1773 | [ 1774 | [ 1775 | NaN 1776 | ] 1777 | ], 1778 | [ 1779 | [ 1780 | NaN 1781 | ] 1782 | ], 1783 | [ 1784 | [ 1785 | NaN 1786 | ] 1787 | ], 1788 | [ 1789 | [ 1790 | NaN 1791 | ] 1792 | ], 1793 | [ 1794 | [ 1795 | NaN 1796 | ] 1797 | ], 1798 | [ 1799 | [ 1800 | NaN 1801 | ] 1802 | ], 1803 | [ 1804 | [ 1805 | NaN 1806 | ] 1807 | ], 1808 | [ 1809 | [ 1810 | NaN 1811 | ] 1812 | ], 1813 | [ 1814 | [ 1815 | NaN 1816 | ] 1817 | ], 1818 | [ 1819 | [ 1820 | NaN 1821 | ] 1822 | ], 1823 | [ 1824 | [ 1825 | NaN 1826 | ] 1827 | ], 1828 | [ 1829 | [ 1830 | NaN 1831 | ] 1832 | ], 1833 | [ 1834 | [ 1835 | NaN 1836 | ] 1837 | ], 1838 | [ 1839 | [ 1840 | NaN 1841 | ] 1842 | ], 1843 | [ 1844 | [ 1845 | NaN 1846 | ] 1847 | ], 1848 | [ 1849 | [ 1850 | NaN 1851 | ] 1852 | ], 1853 | [ 1854 | [ 1855 | NaN 1856 | ] 1857 | ], 1858 | [ 1859 | [ 1860 | NaN 1861 | ] 1862 | ], 1863 | [ 1864 | [ 1865 | NaN 1866 | ] 1867 | ], 1868 | [ 1869 | [ 1870 | NaN 1871 | ] 1872 | ], 1873 | [ 1874 | [ 1875 | NaN 1876 | ] 1877 | ], 1878 | [ 1879 | [ 1880 | NaN 1881 | ] 1882 | ], 1883 | [ 1884 | [ 1885 | NaN 1886 | ] 1887 | ], 1888 | [ 1889 | [ 1890 | NaN 1891 | ] 1892 | ], 1893 | [ 1894 | [ 1895 | NaN 1896 | ] 1897 | ], 1898 | [ 1899 | [ 1900 | NaN 1901 | ] 1902 | ], 1903 | [ 1904 | [ 1905 | NaN 1906 | ] 1907 | ], 1908 | [ 1909 | [ 1910 | NaN 1911 | ] 1912 | ], 1913 | [ 1914 | [ 1915 | NaN 1916 | ] 1917 | ], 1918 | [ 1919 | [ 1920 | NaN 1921 | ] 1922 | ], 1923 | [ 1924 | [ 1925 | NaN 1926 | ] 1927 | ], 1928 | [ 1929 | [ 1930 | NaN 1931 | ] 1932 | ], 1933 | [ 1934 | [ 1935 | NaN 1936 | ] 1937 | ], 1938 | [ 1939 | [ 1940 | NaN 1941 | ] 1942 | ], 1943 | [ 1944 | [ 1945 | NaN 1946 | ] 1947 | ], 1948 | [ 1949 | [ 1950 | NaN 1951 | ] 1952 | ], 1953 | [ 1954 | [ 1955 | NaN 1956 | ] 1957 | ], 1958 | [ 1959 | [ 1960 | NaN 1961 | ] 1962 | ], 1963 | [ 1964 | [ 1965 | NaN 1966 | ] 1967 | ], 1968 | [ 1969 | [ 1970 | NaN 1971 | ] 1972 | ], 1973 | [ 1974 | [ 1975 | NaN 1976 | ] 1977 | ], 1978 | [ 1979 | [ 1980 | NaN 1981 | ] 1982 | ], 1983 | [ 1984 | [ 1985 | NaN 1986 | ] 1987 | ], 1988 | [ 1989 | [ 1990 | NaN 1991 | ] 1992 | ], 1993 | [ 1994 | [ 1995 | NaN 1996 | ] 1997 | ], 1998 | [ 1999 | [ 2000 | NaN 2001 | ] 2002 | ], 2003 | [ 2004 | [ 2005 | NaN 2006 | ] 2007 | ], 2008 | [ 2009 | [ 2010 | NaN 2011 | ] 2012 | ], 2013 | [ 2014 | [ 2015 | NaN 2016 | ] 2017 | ], 2018 | [ 2019 | [ 2020 | NaN 2021 | ] 2022 | ], 2023 | [ 2024 | [ 2025 | NaN 2026 | ] 2027 | ], 2028 | [ 2029 | [ 2030 | NaN 2031 | ] 2032 | ], 2033 | [ 2034 | [ 2035 | NaN 2036 | ] 2037 | ], 2038 | [ 2039 | [ 2040 | NaN 2041 | ] 2042 | ], 2043 | [ 2044 | [ 2045 | NaN 2046 | ] 2047 | ], 2048 | [ 2049 | [ 2050 | NaN 2051 | ] 2052 | ], 2053 | [ 2054 | [ 2055 | NaN 2056 | ] 2057 | ], 2058 | [ 2059 | [ 2060 | NaN 2061 | ] 2062 | ], 2063 | [ 2064 | [ 2065 | NaN 2066 | ] 2067 | ], 2068 | [ 2069 | [ 2070 | NaN 2071 | ] 2072 | ], 2073 | [ 2074 | [ 2075 | NaN 2076 | ] 2077 | ], 2078 | [ 2079 | [ 2080 | NaN 2081 | ] 2082 | ], 2083 | [ 2084 | [ 2085 | NaN 2086 | ] 2087 | ], 2088 | [ 2089 | [ 2090 | NaN 2091 | ] 2092 | ], 2093 | [ 2094 | [ 2095 | NaN 2096 | ] 2097 | ], 2098 | [ 2099 | [ 2100 | NaN 2101 | ] 2102 | ], 2103 | [ 2104 | [ 2105 | NaN 2106 | ] 2107 | ], 2108 | [ 2109 | [ 2110 | NaN 2111 | ] 2112 | ], 2113 | [ 2114 | [ 2115 | NaN 2116 | ] 2117 | ], 2118 | [ 2119 | [ 2120 | NaN 2121 | ] 2122 | ], 2123 | [ 2124 | [ 2125 | NaN 2126 | ] 2127 | ], 2128 | [ 2129 | [ 2130 | NaN 2131 | ] 2132 | ], 2133 | [ 2134 | [ 2135 | NaN 2136 | ] 2137 | ], 2138 | [ 2139 | [ 2140 | NaN 2141 | ] 2142 | ], 2143 | [ 2144 | [ 2145 | NaN 2146 | ] 2147 | ], 2148 | [ 2149 | [ 2150 | NaN 2151 | ] 2152 | ], 2153 | [ 2154 | [ 2155 | NaN 2156 | ] 2157 | ], 2158 | [ 2159 | [ 2160 | NaN 2161 | ] 2162 | ], 2163 | [ 2164 | [ 2165 | NaN 2166 | ] 2167 | ], 2168 | [ 2169 | [ 2170 | NaN 2171 | ] 2172 | ], 2173 | [ 2174 | [ 2175 | NaN 2176 | ] 2177 | ], 2178 | [ 2179 | [ 2180 | NaN 2181 | ] 2182 | ], 2183 | [ 2184 | [ 2185 | NaN 2186 | ] 2187 | ], 2188 | [ 2189 | [ 2190 | NaN 2191 | ] 2192 | ], 2193 | [ 2194 | [ 2195 | NaN 2196 | ] 2197 | ], 2198 | [ 2199 | [ 2200 | NaN 2201 | ] 2202 | ], 2203 | [ 2204 | [ 2205 | NaN 2206 | ] 2207 | ], 2208 | [ 2209 | [ 2210 | NaN 2211 | ] 2212 | ], 2213 | [ 2214 | [ 2215 | NaN 2216 | ] 2217 | ], 2218 | [ 2219 | [ 2220 | NaN 2221 | ] 2222 | ], 2223 | [ 2224 | [ 2225 | NaN 2226 | ] 2227 | ], 2228 | [ 2229 | [ 2230 | NaN 2231 | ] 2232 | ], 2233 | [ 2234 | [ 2235 | NaN 2236 | ] 2237 | ], 2238 | [ 2239 | [ 2240 | NaN 2241 | ] 2242 | ], 2243 | [ 2244 | [ 2245 | NaN 2246 | ] 2247 | ], 2248 | [ 2249 | [ 2250 | NaN 2251 | ] 2252 | ], 2253 | [ 2254 | [ 2255 | NaN 2256 | ] 2257 | ], 2258 | [ 2259 | [ 2260 | NaN 2261 | ] 2262 | ], 2263 | [ 2264 | [ 2265 | NaN 2266 | ] 2267 | ], 2268 | [ 2269 | [ 2270 | NaN 2271 | ] 2272 | ], 2273 | [ 2274 | [ 2275 | NaN 2276 | ] 2277 | ], 2278 | [ 2279 | [ 2280 | NaN 2281 | ] 2282 | ], 2283 | [ 2284 | [ 2285 | NaN 2286 | ] 2287 | ], 2288 | [ 2289 | [ 2290 | NaN 2291 | ] 2292 | ], 2293 | [ 2294 | [ 2295 | NaN 2296 | ] 2297 | ], 2298 | [ 2299 | [ 2300 | NaN 2301 | ] 2302 | ], 2303 | [ 2304 | [ 2305 | NaN 2306 | ] 2307 | ], 2308 | [ 2309 | [ 2310 | NaN 2311 | ] 2312 | ], 2313 | [ 2314 | [ 2315 | NaN 2316 | ] 2317 | ], 2318 | [ 2319 | [ 2320 | NaN 2321 | ] 2322 | ], 2323 | [ 2324 | [ 2325 | NaN 2326 | ] 2327 | ], 2328 | [ 2329 | [ 2330 | NaN 2331 | ] 2332 | ], 2333 | [ 2334 | [ 2335 | NaN 2336 | ] 2337 | ], 2338 | [ 2339 | [ 2340 | NaN 2341 | ] 2342 | ], 2343 | [ 2344 | [ 2345 | NaN 2346 | ] 2347 | ], 2348 | [ 2349 | [ 2350 | NaN 2351 | ] 2352 | ], 2353 | [ 2354 | [ 2355 | NaN 2356 | ] 2357 | ], 2358 | [ 2359 | [ 2360 | NaN 2361 | ] 2362 | ], 2363 | [ 2364 | [ 2365 | NaN 2366 | ] 2367 | ], 2368 | [ 2369 | [ 2370 | NaN 2371 | ] 2372 | ], 2373 | [ 2374 | [ 2375 | NaN 2376 | ] 2377 | ], 2378 | [ 2379 | [ 2380 | NaN 2381 | ] 2382 | ], 2383 | [ 2384 | [ 2385 | NaN 2386 | ] 2387 | ], 2388 | [ 2389 | [ 2390 | NaN 2391 | ] 2392 | ], 2393 | [ 2394 | [ 2395 | NaN 2396 | ] 2397 | ], 2398 | [ 2399 | [ 2400 | NaN 2401 | ] 2402 | ], 2403 | [ 2404 | [ 2405 | NaN 2406 | ] 2407 | ], 2408 | [ 2409 | [ 2410 | NaN 2411 | ] 2412 | ] 2413 | ], 2414 | "yhat": [ 2415 | [ 2416 | 2.887287, 2417 | 1.923436, 2418 | 4.7850269, 2419 | 1.5430167, 2420 | 2.6736792, 2421 | 1.7401035, 2422 | 3.3758665, 2423 | 2.1218526, 2424 | 2.6500242, 2425 | 3.2966602, 2426 | 4.2009119, 2427 | 1.4310048, 2428 | 4.8540287, 2429 | 3.4939618, 2430 | 2.8692886, 2431 | 2.5968792, 2432 | 2.0263377, 2433 | 2.5325332, 2434 | 2.2820785, 2435 | 4.2213933, 2436 | 2.2228, 2437 | 2.4114744, 2438 | 2.9423811, 2439 | 3.0795799, 2440 | 1.8167989, 2441 | 1.4512576, 2442 | 3.9122562, 2443 | 3.4126707, 2444 | 2.304231, 2445 | 0.71122812, 2446 | 2.2127585, 2447 | 1.9714833, 2448 | 3.0933081, 2449 | 2.4708058, 2450 | 3.7802449, 2451 | 2.7991853, 2452 | 2.5193914, 2453 | 1.9647379, 2454 | 2.4972656, 2455 | 3.7898902, 2456 | 2.0750024, 2457 | 5.2100353, 2458 | 1.5997638, 2459 | 2.7609026, 2460 | 3.2173407, 2461 | 2.6476147, 2462 | 3.0858755, 2463 | 3.106652, 2464 | 2.8029688, 2465 | 4.3469339, 2466 | 3.345174, 2467 | 3.5576444, 2468 | 2.4047148, 2469 | 3.7328322, 2470 | 4.3312527, 2471 | 2.2699671, 2472 | 2.5142442, 2473 | 4.0769142, 2474 | 0.40464258, 2475 | 3.1669282, 2476 | 4.0389596, 2477 | 1.1914261, 2478 | 3.7586409, 2479 | 2.4998872, 2480 | 0.65570684, 2481 | 2.8920823, 2482 | 1.4972123, 2483 | 3.4887787, 2484 | 2.4468215, 2485 | 4.0575075, 2486 | 2.1306821, 2487 | 2.9249089, 2488 | 2.892845, 2489 | 5.098822, 2490 | 2.6594621, 2491 | 2.4720949, 2492 | 1.7053832, 2493 | 3.214099, 2494 | 4.1262696, 2495 | 3.4015358, 2496 | 3.7655853, 2497 | 1.6468725, 2498 | 3.5738585, 2499 | 2.7798629, 2500 | 3.645189, 2501 | 1.9952807, 2502 | 4.8997386, 2503 | 3.2168608, 2504 | 3.4999704, 2505 | 2.3734089, 2506 | 1.9971158, 2507 | 2.2186898, 2508 | 3.2042777, 2509 | 3.536092, 2510 | 1.9250025, 2511 | 0.78214524, 2512 | 3.6469302, 2513 | 2.6776642, 2514 | 2.8274358, 2515 | 0.7115782, 2516 | 2.8879562, 2517 | 3.9553138, 2518 | 3.4547382, 2519 | 5.0619275, 2520 | 3.0624605, 2521 | 3.0028424, 2522 | 2.5464462, 2523 | 3.3734825, 2524 | 2.186884, 2525 | 2.8984425, 2526 | 3.4457132, 2527 | 1.7248714, 2528 | 3.1078318, 2529 | 3.8522369, 2530 | 2.8431479, 2531 | 4.1888193, 2532 | 1.5277366, 2533 | 3.8660408, 2534 | 2.1066105, 2535 | 2.4510294, 2536 | 3.0014629, 2537 | 2.0985776, 2538 | 2.4944514, 2539 | 4.2318542, 2540 | 4.7581761, 2541 | 4.5381618, 2542 | 3.7946417, 2543 | 2.0930331, 2544 | 2.9088236, 2545 | 0.90801297, 2546 | 2.7318874, 2547 | 2.0180658, 2548 | 2.4060368, 2549 | 4.2251766, 2550 | 2.1664384, 2551 | 2.7729114, 2552 | 3.6441245, 2553 | 4.3901638, 2554 | 2.218241, 2555 | 0.17054201, 2556 | 3.3051992, 2557 | 3.4320711, 2558 | 2.5019285, 2559 | 3.1191784, 2560 | 2.9445662, 2561 | 4.5889308, 2562 | 1.1876853, 2563 | 1.5366716, 2564 | 2.6208023, 2565 | 2.1983178, 2566 | 2.9688998, 2567 | 2.5824523, 2568 | 3.2701496, 2569 | 2.3778831, 2570 | 2.7390326, 2571 | 1.9266674, 2572 | 1.6739233, 2573 | 2.7957954, 2574 | 3.2579315, 2575 | 1.4710137, 2576 | 1.850234, 2577 | 2.7887086, 2578 | 3.8311648, 2579 | 0.81591503, 2580 | 3.7311102, 2581 | 2.2472652, 2582 | 6.2592559, 2583 | 4.6553632, 2584 | 0.94849135, 2585 | 2.1408505, 2586 | 2.767983, 2587 | 3.992758, 2588 | 2.1161663, 2589 | 2.9395931, 2590 | 4.0669382, 2591 | 3.2116575, 2592 | 3.7648749, 2593 | 2.5507757, 2594 | 2.4836656, 2595 | 1.3792137, 2596 | 2.8861545, 2597 | 4.8155267, 2598 | 2.5734428, 2599 | 2.2165562, 2600 | 3.2759071, 2601 | 3.2824438, 2602 | 4.2115137, 2603 | 2.1770417, 2604 | 1.5499045, 2605 | 3.3128261, 2606 | 3.4763531, 2607 | 1.4348939, 2608 | 2.5712672, 2609 | 4.3635856, 2610 | 4.0394513, 2611 | 3.3959077, 2612 | 3.3718047, 2613 | 3.4513006, 2614 | 3.6219548, 2615 | 3.842496 2616 | ] 2617 | ] 2618 | } 2619 | -------------------------------------------------------------------------------- /output/heaviside/graphs.json: -------------------------------------------------------------------------------- 1 | { 2 | "exj_indivs": [ 3 | [ 4 | -2.0, 5 | -2.0, 6 | 1.0, 7 | 2.0, 8 | 3.0, 9 | 4.0 10 | ] 11 | ], 12 | "eyj_indivs": [ 13 | [ 14 | -3.0, 15 | -3.0, 16 | 0.0, 17 | 1.0, 18 | 2.0, 19 | 3.0 20 | ] 21 | ], 22 | "eyx_indivs": [ 23 | [ 24 | [ 25 | -2.0 26 | ] 27 | ], 28 | [ 29 | [ 30 | -2.0 31 | ] 32 | ], 33 | [ 34 | [ 35 | 1.0 36 | ] 37 | ], 38 | [ 39 | [ 40 | 2.0 41 | ] 42 | ], 43 | [ 44 | [ 45 | 3.0 46 | ] 47 | ], 48 | [ 49 | [ 50 | 4.0 51 | ] 52 | ] 53 | ], 54 | "eyy_indivs": [ 55 | [ 56 | [ 57 | NaN 58 | ] 59 | ], 60 | [ 61 | [ 62 | NaN 63 | ] 64 | ], 65 | [ 66 | [ 67 | NaN 68 | ] 69 | ], 70 | [ 71 | [ 72 | NaN 73 | ] 74 | ], 75 | [ 76 | [ 77 | NaN 78 | ] 79 | ], 80 | [ 81 | [ 82 | NaN 83 | ] 84 | ] 85 | ], 86 | "yhat": [ 87 | [ 88 | 0.0, 89 | 0.0, 90 | 3.0, 91 | 4.0, 92 | 5.0, 93 | 6.0 94 | ] 95 | ] 96 | } 97 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r", encoding="utf-8") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="causing", 8 | version="2.4.3", 9 | author="Dr. Holger Bartel", 10 | author_email="holger.bartel@realrate.ai", 11 | description="Causing: CAUSal INterpretation using Graphs", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/realrate/Causing", 15 | packages=setuptools.find_packages(), 16 | classifiers=[ 17 | "Programming Language :: Python :: 3", 18 | "License :: OSI Approved :: MIT License", 19 | "Operating System :: OS Independent", 20 | ], 21 | install_requires=[ 22 | "numpy~=1.23", 23 | "pandas~=1.3", 24 | "scipy~=1.9", 25 | "sympy~=1.5", 26 | "networkx~=2.7", 27 | "pre-commit", # TODO: move to dev-requirements 28 | ], 29 | python_requires=">=3.9", 30 | setup_requires=["wheel"], 31 | ) 32 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realrate/Causing/dd40a17061384763eaca7cfa6298dc25650aa4c4/tests/__init__.py -------------------------------------------------------------------------------- /tests/examples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realrate/Causing/dd40a17061384763eaca7cfa6298dc25650aa4c4/tests/examples/__init__.py -------------------------------------------------------------------------------- /tests/examples/models.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import numpy as np 3 | 4 | from causing.examples.models import example, education 5 | 6 | 7 | class TestExampleModels(unittest.TestCase): 8 | def test_example(self): 9 | """Checks coefficient matrices for direct, total and final effects of example.""" 10 | m, xdat, _, _ = example() 11 | generated_theo = m.theo(xdat.mean(axis=1)) 12 | 13 | # direct effects 14 | mx_theo = np.array([[1, "NaN"], ["NaN", 1], ["NaN", "NaN"]]).astype(np.float64) 15 | my_theo = np.array( 16 | [["NaN", "NaN", "NaN"], [11.92914837, "NaN", "NaN"], [1, 1, "NaN"]] 17 | ).astype(np.float64) 18 | 19 | # total effects 20 | ex_theo = np.array([[1, "NaN"], [11.92914837, 1], [12.92914837, 1]]).astype( 21 | np.float64 22 | ) 23 | ey_theo = np.array( 24 | [[1, "NaN", "NaN"], [11.92914837, 1, "NaN"], [12.92914837, 1, 1]] 25 | ).astype(np.float64) 26 | 27 | # final effects 28 | exj_theo = np.array([12.92914837, 1]).astype(np.float64) 29 | eyj_theo = np.array([12.92914837, 1, 1]).astype(np.float64) 30 | eyx_theo = np.array([[12.92914837, "NaN"], ["NaN", 1], ["NaN", "NaN"]]).astype( 31 | np.float64 32 | ) 33 | eyy_theo = np.array( 34 | [["NaN", "NaN", "NaN"], [11.92914837, "NaN", "NaN"], [1, 1, "NaN"]] 35 | ).astype(np.float64) 36 | 37 | expected_theo = dict( 38 | mx_theo=mx_theo, 39 | my_theo=my_theo, 40 | ex_theo=ex_theo, 41 | ey_theo=ey_theo, 42 | exj_theo=exj_theo, 43 | eyj_theo=eyj_theo, 44 | eyx_theo=eyx_theo, 45 | eyy_theo=eyy_theo, 46 | ) 47 | 48 | for k in expected_theo.keys(): 49 | self.assertIsNone( 50 | np.testing.assert_array_almost_equal( 51 | generated_theo[k], expected_theo[k] 52 | ) 53 | ) 54 | 55 | def test_education(self): 56 | """Checks coefficient matrices for direct, total and final effects of education example.""" 57 | m, xdat, _, _ = education() 58 | generated_theo = m.theo(xdat.mean(axis=1)) 59 | 60 | # direct effects 61 | mx_theo = np.array( 62 | [ 63 | [0.1, 0.1, -0.1, -0.5, "NaN", "NaN"], 64 | ["NaN", "NaN", "NaN", "NaN", "NaN", 1.0], 65 | ["NaN", "NaN", "NaN", "NaN", 1, "NaN"], 66 | ] 67 | ).astype(np.float64) 68 | my_theo = np.array( 69 | [["NaN", "NaN", "NaN"], [-1, "NaN", "NaN"], [1.0, 0.5, "NaN"]] 70 | ).astype(np.float64) 71 | 72 | # total effects 73 | ex_theo = np.array( 74 | [ 75 | [0.1, 0.1, -0.1, -0.5, "NaN", "NaN"], 76 | [-0.1, -0.1, 0.1, 0.5, "NaN", 1], 77 | [0.05, 0.05, -0.05, -0.25, 1, 0.5], 78 | ] 79 | ).astype(np.float64) 80 | ey_theo = np.array([[1, "NaN", "NaN"], [-1, 1, "NaN"], [0.5, 0.5, 1]]).astype( 81 | np.float64 82 | ) 83 | 84 | # final effects 85 | exj_theo = np.array([0.05, 0.05, -0.05, -0.25, 1, 0.5]).astype(np.float64) 86 | eyj_theo = np.array([0.5, 0.5, 1]).astype(np.float64) 87 | eyx_theo = np.array( 88 | [ 89 | [0.05, 0.05, -0.05, -0.25, "NaN", "NaN"], 90 | ["NaN", "NaN", "NaN", "NaN", "NaN", 0.5], 91 | ["NaN", "NaN", "NaN", "NaN", 1, "NaN"], 92 | ] 93 | ).astype(np.float64) 94 | eyy_theo = np.array( 95 | [["NaN", "NaN", "NaN"], [-0.5, "NaN", "NaN"], [1, 0.5, "NaN"]] 96 | ).astype(np.float64) 97 | 98 | expected_theo = dict( 99 | mx_theo=mx_theo, 100 | my_theo=my_theo, 101 | ex_theo=ex_theo, 102 | ey_theo=ey_theo, 103 | exj_theo=exj_theo, 104 | eyj_theo=eyj_theo, 105 | eyx_theo=eyx_theo, 106 | eyy_theo=eyy_theo, 107 | ) 108 | for k in expected_theo.keys(): 109 | self.assertIsNone( 110 | np.testing.assert_array_almost_equal( 111 | generated_theo[k], expected_theo[k] 112 | ) 113 | ) 114 | -------------------------------------------------------------------------------- /tests/test_estimate.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | from sympy import symbols 5 | 6 | import causing.bias 7 | from causing.model import Model 8 | 9 | 10 | class TestBias(unittest.TestCase): 11 | X1, X2, Y1, Y2, Y3 = symbols(["X1", "X2", "Y1", "Y2", "Y3"]) 12 | equations = ( 13 | X1, 14 | X2 + 2 * Y1, 15 | Y1 + Y2, 16 | ) 17 | m = Model( 18 | xvars=[X1, X2], 19 | yvars=[Y1, Y2, Y3], 20 | equations=equations, 21 | final_var=Y3, 22 | ) 23 | xdat = np.array( 24 | [ 25 | [1, 1, 1.01, 1.02, 0.99], 26 | [1, 1.01, 1, 1.03, 0.98], 27 | ] 28 | ) 29 | ymvars = [Y3] 30 | 31 | def test_no_bias(self): 32 | ymdat = np.array([[4, 4, 4, 3.9, 4.01]]) 33 | biases, biases_std = causing.bias.estimate_biases( 34 | self.m, self.xdat, self.ymvars, ymdat 35 | ) 36 | self.assertAlmostEqual(biases[0], 0, places=1) 37 | self.assertAlmostEqual(biases[1], 0, places=1) 38 | self.assertAlmostEqual(biases[2], 0, places=1) 39 | 40 | def test_bias(self): 41 | ymdat = np.array([[5, 5, 5, 4.9, 5.01]]) 42 | biases, biases_std = causing.bias.estimate_biases( 43 | self.m, self.xdat, self.ymvars, ymdat 44 | ) 45 | self.assertAlmostEqual(biases[0], 0.32, places=2) 46 | self.assertAlmostEqual(biases[1], 0.966, places=3) 47 | self.assertAlmostEqual(biases[2], 0.966, places=3) 48 | 49 | 50 | class TestBiasInvariant(unittest.TestCase): 51 | xdat = np.array( 52 | [ 53 | [1, 1, 1.01, 1.02, 0.99], 54 | [1, 1.01, 1, 1.03, 0.98], 55 | ] 56 | ) 57 | 58 | def test_bias_invariant(self): 59 | X1, X2, Y1, Y2, Y3 = symbols(["X1", "X2", "Y1", "Y2", "Y3"]) 60 | for bias in (0, 10, 100): 61 | with self.subTest(bias=bias): 62 | equations = ( 63 | X1, 64 | bias + X2 + 2 * Y1, 65 | Y1 + Y2, 66 | ) 67 | m = Model( 68 | xvars=[X1, X2], 69 | yvars=[Y1, Y2, Y3], 70 | equations=equations, 71 | final_var=Y3, 72 | ) 73 | ymvars = [Y3] 74 | ymdat = np.array([[4, 4, 4, 3.9, 4.01]]) 75 | biases, biases_std = causing.bias.estimate_biases( 76 | m, self.xdat, ymvars, ymdat 77 | ) 78 | self.assertAlmostEqual(biases[1], -bias, places=1) 79 | 80 | def test_bias_invariant_quotient(self): 81 | """This simple estimation fails with the SLSQP method""" 82 | X1, X2, Y1, Y2, Y3 = symbols(["X1", "X2", "Y1", "Y2", "Y3"]) 83 | for bias in (0, 5, 20): 84 | with self.subTest(bias=bias): 85 | equations = ( 86 | bias + 3, 87 | 1 / Y1, 88 | ) 89 | m = Model( 90 | xvars=[X1, X2], 91 | yvars=[Y1, Y2], 92 | equations=equations, 93 | final_var=Y2, 94 | ) 95 | ymvars = [Y2] 96 | ymdat = np.array([[1 / 3, 1 / 3, 1 / 3, 1 / 2.9, 1 / 3.01]]) 97 | biases, biases_std = causing.bias.estimate_biases( 98 | m, self.xdat, ymvars, ymdat 99 | ) 100 | self.assertAlmostEqual(biases[0], -bias, places=1) 101 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from causing.utils import round_sig_recursive 4 | 5 | 6 | class TestRoundSigRecursive(unittest.TestCase): 7 | def test_recursive(self) -> None: 8 | orig = { 9 | "a_list": [111.0, 0.111], 10 | "a_tuple": (111.0, 0.111), 11 | "a_dict": {"a": 111.0, "b": 0.111}, 12 | } 13 | rounded = { 14 | "a_list": [100, 0.1], 15 | "a_tuple": (100, 0.1), 16 | "a_dict": {"a": 100, "b": 0.1}, 17 | } 18 | self.assertEqual(round_sig_recursive(orig, 1), rounded) 19 | --------------------------------------------------------------------------------