├── .DS_Store
├── .gitattributes
├── .gitignore
├── .ipynb_checkpoints
└── README-checkpoint.md
├── README.md
└── lectures
├── .DS_Store
├── CH-1-Introduction-to-Causality.ipynb
├── CH-2-Activity-Bias.ipynb
├── CH-2-Ladder-of-Causality.ipynb
├── CH-3-Graphical-Causal-Models.ipynb
├── CH-4-Structural-Causal-Models.ipynb
├── CH-5-Causal-Model-Discovery.ipynb
├── Counterfactual_DAG
├── Counterfactual_DAG.png
├── causal_model_dag
├── causal_model_dag.pdf
├── causal_model_dag.png
├── chain_dag.png
├── collider_dag.png
├── data
├── Simpson_paradox_binary_example.csv
├── dyspnoea.csv
├── online_classroom.csv
└── smoke_dataset.pkl
├── fork_dag.png
├── healthcare_dag
├── healthcare_dag.pdf
├── img
├── .DS_Store
├── Book_of_why.jpg
├── What_if.jpg
├── causality_intro.jpg
├── causality_intro_image.png
├── ch1
│ ├── .DS_Store
│ ├── Aristotle_and_causal_thinking.png
│ ├── Beuchet_chair_a.png
│ ├── Beuchet_chair_b.png
│ ├── Causal_Ladder.png
│ ├── Causal_Paradigm.png
│ ├── Causality_History.png
│ ├── Cigarette_Commercials.png
│ ├── Courtroom.png
│ ├── DataGeneration.gif
│ ├── David_Hume_theory_of_causality_18th_century.png
│ ├── ML_Animal_Abilities.png
│ ├── Math_Learning.jpeg
│ ├── Oversimplified-model.png
│ ├── Plato_Cave.jpeg
│ ├── Spurious_Correlations_Muzzarella.png
│ ├── Stat_Paradigm.png
│ ├── baby_learning_brain.png
│ ├── causality_intro.jpg
│ ├── chatgpt-icon.png.sb-35134b4b-g2oqmo
│ ├── little-sick-boy.jpeg
│ ├── misinformation.png
│ └── normal-model.png
├── ch2
│ ├── .DS_Store
│ ├── Earnings_Education_US_2023.png
│ ├── Freia-Melkesjokolade.png
│ ├── Freia_melkesjokolade_2.png
│ ├── Intervention_1.png
│ ├── Intervention_2.png
│ ├── Intervention_3.png
│ ├── Intervention_4.png
│ ├── Obser2Interven.png
│ ├── Observation to Intervention to Counterfactual.pptx
│ ├── Observation_Intervention_Counterfactual.png
│ ├── Pearls-Ladder-of-Causation.png
│ ├── magician.png
│ └── randomized-controlled-trial.png
├── ch3
│ ├── .DS_Store
│ ├── Berkson_Covid_fracture.png
│ ├── Berkson_Covid_fracture_hospital.png
│ ├── Berkson_handsome_men.png
│ ├── Berkson_paradox_Covid.png
│ ├── Berkson_paradox_Covid_plot.png
│ ├── Causal_Model_Climate.png
│ ├── Causal_Model_Disease_Categories.png
│ ├── DAGs_PDFs.drawio
│ ├── DAGs_PDFs_conditionallyDep.png
│ ├── DAGs_PDFs_conditionallyIndep.png
│ ├── DAGs_PDFs_marginallyDep.png
│ ├── DAGs_PDFs_marginallyIndep.png
│ ├── Fork_paradox_fire.png
│ ├── Fork_paradox_fire_plot.png
│ ├── IceCream-Sunburn.png
│ ├── Italian-man.jpeg
│ ├── Markov_blanket.png
│ ├── Markov_equivalent.png
│ ├── Simpson_firefighter_Injures.png
│ ├── Simpson_firefighter_Injures_severity.png
│ ├── emperor-penguins-family.png
│ ├── graph_Dsep_example.drawio
│ ├── graph_Dsep_example.pdf
│ ├── graph_Dsep_example_case0.png
│ ├── graph_Dsep_example_case1.png
│ ├── graph_Dsep_example_case2.png
│ ├── graph_Dsep_example_case3.png
│ ├── graph_Dsep_example_case4.png
│ ├── graph_Dsep_example_case5.png
│ ├── markovEQ_example.drawio
│ └── markovEQ_example.png
├── ch4
│ ├── .DS_Store
│ ├── Alexander_mosaic.jpeg
│ ├── Causal-Graphs-Advantages.png
│ ├── Counterfactuals_notation.png
│ ├── Exp_Predictors_Intervention_Targets.png
│ ├── Graph-SEM-Excluded.png
│ ├── Graph-SEM-Independence.png
│ ├── Graph-SEM.png
│ ├── Grpah-SEM-Intervene.png
│ ├── Kidney-stones-graph.png
│ ├── Kidney-stones-xray.png
│ ├── Kidney-stones.png
│ ├── SEM-Observe-Colider.png
│ ├── SEM-Observe-Confounder.png
│ ├── SEM-Observe-Direct-Effects.png
│ ├── SEM-Observe-Independ-blocked.png
│ ├── SEM-Observe-Independ-condition-collider.png
│ ├── SEM-Observe-Independ-condition.png
│ ├── SEM-Observe-Independ-nopath.png
│ ├── SEM-Observe-Independent.png
│ ├── SEM-Observe-Share-Cause.png
│ ├── butterBeer_happiness.ai
│ ├── butterBeer_happiness.png
│ └── do-operator.png
├── ch5
│ ├── .DS_Store
│ └── kronbar.jpg
├── ch6
│ ├── .DS_Store
│ ├── PC-Method.png
│ ├── Trick1.png
│ ├── causal_structure_comparison.png
│ ├── dyspnoea.jpeg
│ └── independence_tests.png
├── ch7
│ └── Hotel.png
└── elements_of_causal_inference_book.jpg
├── kidney_treatment_dag
├── kidney_treatment_dag.png
├── kronbar_scm_dag.png
├── requirements.txt
├── scm_example_dag
├── scm_example_dag.png
└── utils.py
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/.DS_Store
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | lectures/__pycache__
2 | lectures/.ipynb_checkpoints
3 | lectures/myscript
4 | lectures/img/causality_intro_image.ai
5 | notes.ipynb
6 | assignments
7 | lectures/img/raw_img
8 |
9 | lectures/data/.DS_Store
10 | lectures/img/ch7/.DS_Store
11 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/README-checkpoint.md:
--------------------------------------------------------------------------------
1 | # Applied Causal Inference Course
2 |
3 | 
4 |
5 | This course offers a comprehensive overview of applied causal inference, focusing on developing a deep understanding of how to analyze and model cause-and-effect relationships in various domains.
6 |
7 | The course begins with an introduction to the foundations of causal inference, including core concepts like correlation, association, and the limitations of traditional statistical methods. It then progresses to more advanced topics, such as interventions, counterfactuals, and graphical causal models like Directed Acyclic Graphs (DAGs) and Structural Causal Models (SCMs). Practical examples, real-world case studies, and hands-on activities are used throughout the course to solidify learning.
8 |
9 | By the end of the course, students will be able to build, interpret, and analyze causal models to address a wide range of scientific and real-world problems using modern data science techniques.
10 |
11 | This is an ongoing course, so keep track of updates and feel free to share your feedback.
12 |
13 | Thanks,
14 | [Reza Arghandeh](https://www.hvl.no/en/employee/?user=Reza.Arghandeh)
15 |
16 |
17 | ---
18 |
19 | ### Learning Objectives:
20 |
21 | - Develop an understanding of how to use causal inference to move beyond correlation and address cause-and-effect relationships in data.
22 | - Learn to model causal structures using Directed Acyclic Graphs (DAGs) and apply them to real-world scenarios.
23 | - Understand the transition from observational data to making causal claims through interventions and counterfactual analysis.
24 | - Gain practical experience in using Structural Causal Models (SCMs) to formalize the data generation process and investigate causal effects.
25 | - Explore data-driven causal discovery methods and learn how to discover causal relationships from observational data using modern algorithms.
26 |
27 | ---
28 |
29 | # Lectures
30 |
31 | | | **Chapter** | **Description** |
32 | |---|---------------------------------|---------------------------|
33 | | | 1 - [Introduction to Causality](./lectures/CH-1-Introduction-to-Causality.ipynb) | Overview of causality, correlation vs. causation, and the role of observational data in making causal claims. |
34 | | | 2 - [Ladder of Causality](./lectures/CH-2-Ladder-of-Causality.ipynb) | Introduction to Judea Pearl’s Ladder of Causality: from associations to interventions and counterfactuals. [Activity](./lectures/CH-2-Activity-Bias.ipynb) |
35 | | | 3 - [Graphical Causal Models](./lectures/CH-3-Graphical-Causal-Models.ipynb) | Learning how to represent and analyze causal relationships using Directed Acyclic Graphs (DAGs) and their role in identifying independence and dependence relationships. |
36 | | | 4 - [Structural Causal Models](./lectures/CH-4-Structural-Causal-Models.ipynb) | Introduction to Structural Causal Models (SCMs), including how they capture the data generation process and formalize causal relationships. |
37 | | | 5 - [Causal Model Discovery from Data](./lectures/CH-5-Causal-Model-Discovery.ipynb) | Practical approaches to discovering causal models from observational data, including an overview of constraint-based and score-based methods. |
38 |
39 | ---
40 |
41 | ## Suggested Python Libraries
42 |
43 | - [DoWhy](https://py-why.github.io/dowhy/index.html): A Python library that provides several tools for causal inference, modeling causal assumptions, and validating them. It is user-friendly and widely used for causal inference tasks, including treatment effect estimation and counterfactual analysis.
44 |
45 | - [pgmpy](https://pgmpy.org): Python library for Probabilistic Graphical Models, supporting structure learning, parameter estimation, inference, and causal discovery. It's a more advanced library for those interested in Bayesian networks and probabilistic models.
46 |
47 | - [bnlearn](https://erdogant.github.io/bnlearn/pages/html/index.html): A library for learning the graphical structure of Bayesian networks in Python. It builds on pgmpy but with a simpler and more user-friendly interface, making it a good starting point for Bayesian network tasks.
48 |
49 | - [gCastle](https://github.com/huawei-noah/trustworthyAI/tree/master/gcastle): A powerful library for causal structure learning that supports a variety of algorithms, including constraint-based and score-based methods for discovering causal graphs from observational data.
50 |
51 | - [EconML](https://github.com/microsoft/EconML): Developed by Microsoft, this library is designed for estimating heterogeneous treatment effects using machine learning techniques. It combines econometrics and machine learning, offering advanced models like Double Machine Learning (DML) and Targeted Regularized Learning (TRL).
52 |
53 | - [CausalNex](https://causalnex.readthedocs.io/en/latest/): A library focusing on causal structure learning, particularly for Bayesian networks. It offers an intuitive API for building and visualizing causal graphs and provides support for interventions and counterfactual queries.
54 |
55 |
56 | ---
57 |
58 | ## Suggested Books
59 |
60 | All of these books are open access.
61 |
62 | - **[The Effect: An Introduction to Research Design and Causality](https://theeffectbook.net)** by Nick Huntington-Klein (2023) - A beginner-friendly, open-access book on causality.
63 | - **[Causal Inference, The Mixtape](https://mixtape.scunning.com)** by Scott Cunningham (2023) - A well-written introduction to causal inference.
64 | - **[The Elements of Causal Inference](https://mitpress.mit.edu/books/elements-causal-inference)** by Jonas Peters et al. (2017) - A technical book on causal inference in the context of machine learning.
65 | - **[Applied Causal Inference Powered by ML and AI](https://www.causalml-book.org)** by Victor Chernozhukov et al. (2024) - A comprehensive technical book merging causal inference with modern ML/AI techniques.
66 | - **[Introduction to Causal Inference](https://www.bradyneal.com/Introduction_to_Causal_Inference-Dec17_2020-Neal.pdf)** by Brady Neal (2020) - A structured, open-access textbook explaining the fundamentals of causal inference.
67 | - **[Causal Inference: What If?](https://www.hsph.harvard.edu/miguel-hernan/wp-content/uploads/sites/1268/2024/04/hernanrobins_WhatIf_26apr24.pdf)** by Miguel Hernan and Jamie Robins (2024) - A comprehensive textbook on causal inference with an academic focus.
68 |
69 | ---
70 |
71 | ## Acknowledgments
72 |
73 | This course was developed with partial support from the RCN-INTPART DTRF Project.
74 | https://www.bigdata.vestforsk.no/ongoing/intpart-dtrf
75 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Applied Causal Inference Course
2 |
3 | 
4 |
5 | This course offers a comprehensive overview of applied causal inference, focusing on developing a deep understanding of how to analyze and model cause-and-effect relationships in various domains.
6 |
7 | The course begins with an introduction to the foundations of causal inference, including core concepts like correlation, association, and the limitations of traditional statistical methods. It then progresses to more advanced topics, such as interventions, counterfactuals, and graphical causal models like Directed Acyclic Graphs (DAGs) and Structural Causal Models (SCMs). Practical examples, real-world case studies, and hands-on activities are used throughout the course to solidify learning.
8 |
9 | By the end of the course, students will be able to build, interpret, and analyze causal models to address a wide range of scientific and real-world problems using modern data science techniques.
10 |
11 | This is an ongoing course, so keep track of updates and feel free to share your feedback.
12 |
13 | Thanks,
14 | [Reza Arghandeh](https://www.hvl.no/en/employee/?user=Reza.Arghandeh)
15 |
16 |
17 | ---
18 |
19 | ### Learning Objectives:
20 |
21 | - Develop an understanding of how to use causal inference to move beyond correlation and address cause-and-effect relationships in data.
22 | - Learn to model causal structures using Directed Acyclic Graphs (DAGs) and apply them to real-world scenarios.
23 | - Understand the transition from observational data to making causal claims through interventions and counterfactual analysis.
24 | - Gain practical experience in using Structural Causal Models (SCMs) to formalize the data generation process and investigate causal effects.
25 | - Explore data-driven causal discovery methods and learn how to discover causal relationships from observational data using modern algorithms.
26 |
27 | ---
28 |
29 | # Lectures
30 |
31 | | | **Chapter** | **Description** |
32 | |---|---------------------------------|---------------------------|
33 | | | 1 - [Introduction to Causality](./lectures/CH-1-Introduction-to-Causality.ipynb) | Overview of causality, correlation vs. causation, and the role of observational data in making causal claims. |
34 | | | 2 - [Ladder of Causality](./lectures/CH-2-Ladder-of-Causality.ipynb) | Introduction to Judea Pearl’s Ladder of Causality: from associations to interventions and counterfactuals. [Activity](./lectures/CH-2-Activity-Bias.ipynb) |
35 | | | 3 - [Graphical Causal Models](./lectures/CH-3-Graphical-Causal-Models.ipynb) | Learning how to represent and analyze causal relationships using Directed Acyclic Graphs (DAGs) and their role in identifying independence and dependence relationships. |
36 | | | 4 - [Structural Causal Models](./lectures/CH-4-Structural-Causal-Models.ipynb) | Introduction to Structural Causal Models (SCMs), including how they capture the data generation process and formalize causal relationships. |
37 | | | 5 - [Causal Model Discovery from Data](./lectures/CH-5-Causal-Model-Discovery.ipynb) | Practical approaches to discovering causal models from observational data, including an overview of constraint-based and score-based methods. |
38 |
39 | ---
40 |
41 | ## Suggested Python Libraries
42 |
43 | - [DoWhy](https://py-why.github.io/dowhy/index.html): A Python library that provides several tools for causal inference, modeling causal assumptions, and validating them. It is user-friendly and widely used for causal inference tasks, including treatment effect estimation and counterfactual analysis.
44 |
45 | - [pgmpy](https://pgmpy.org): Python library for Probabilistic Graphical Models, supporting structure learning, parameter estimation, inference, and causal discovery. It's a more advanced library for those interested in Bayesian networks and probabilistic models.
46 |
47 | - [bnlearn](https://erdogant.github.io/bnlearn/pages/html/index.html): A library for learning the graphical structure of Bayesian networks in Python. It builds on pgmpy but with a simpler and more user-friendly interface, making it a good starting point for Bayesian network tasks.
48 |
49 | - [gCastle](https://github.com/huawei-noah/trustworthyAI/tree/master/gcastle): A powerful library for causal structure learning that supports a variety of algorithms, including constraint-based and score-based methods for discovering causal graphs from observational data.
50 |
51 | - [EconML](https://github.com/microsoft/EconML): Developed by Microsoft, this library is designed for estimating heterogeneous treatment effects using machine learning techniques. It combines econometrics and machine learning, offering advanced models like Double Machine Learning (DML) and Targeted Regularized Learning (TRL).
52 |
53 | - [CausalNex](https://causalnex.readthedocs.io/en/latest/): A library focusing on causal structure learning, particularly for Bayesian networks. It offers an intuitive API for building and visualizing causal graphs and provides support for interventions and counterfactual queries.
54 |
55 |
56 | ---
57 |
58 | ## Suggested Books
59 |
60 | All of these books are open access.
61 |
62 | - **[The Effect: An Introduction to Research Design and Causality](https://theeffectbook.net)** by Nick Huntington-Klein (2023) - A beginner-friendly, open-access book on causality.
63 | - **[Causal Inference, The Mixtape](https://mixtape.scunning.com)** by Scott Cunningham (2023) - A well-written introduction to causal inference.
64 | - **[The Elements of Causal Inference](https://mitpress.mit.edu/books/elements-causal-inference)** by Jonas Peters et al. (2017) - A technical book on causal inference in the context of machine learning.
65 | - **[Applied Causal Inference Powered by ML and AI](https://www.causalml-book.org)** by Victor Chernozhukov et al. (2024) - A comprehensive technical book merging causal inference with modern ML/AI techniques.
66 | - **[Introduction to Causal Inference](https://www.bradyneal.com/Introduction_to_Causal_Inference-Dec17_2020-Neal.pdf)** by Brady Neal (2020) - A structured, open-access textbook explaining the fundamentals of causal inference.
67 | - **[Causal Inference: What If?](https://www.hsph.harvard.edu/miguel-hernan/wp-content/uploads/sites/1268/2024/04/hernanrobins_WhatIf_26apr24.pdf)** by Miguel Hernan and Jamie Robins (2024) - A comprehensive textbook on causal inference with an academic focus.
68 |
69 | ---
70 |
71 | ## Acknowledgments
72 |
73 | This course was developed with partial support from the RCN-INTPART DTRF Project.
74 | https://www.bigdata.vestforsk.no/ongoing/intpart-dtrf
75 |
--------------------------------------------------------------------------------
/lectures/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/.DS_Store
--------------------------------------------------------------------------------
/lectures/CH-1-Introduction-to-Causality.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Chapter 1 - Introduction to Causality\n",
8 | "\n",
9 | "## Key Points Learned in the Chapter:\n",
10 | "\n",
11 | "### 1. Difference Between Causal and Statistical Inference:\n",
12 | "* The chapter highlights how causal inference differs from statistical inference by focusing on understanding cause-and-effect relationships rather than just identifying correlations.\n",
13 | "\n",
14 | "### 2. Historical Development of Causal Thinking:\n",
15 | "* The chapter covers the evolution of causal thinking, from **Aristotle's Four Causes** to **David Hume's empiricism**, and their impact on modern causal inference methods.\n",
16 | "\n",
17 | "### 3. Limitations of Observational Data:\n",
18 | "* It emphasizes the limits of using observational data alone to infer causal relationships, particularly in the presence of confounding variables.\n",
19 | "\n",
20 | "### 4. Importance of Interventions:\n",
21 | "* The role of **interventions** in distinguishing between correlation and causation is explored, with references to **Randomized Controlled Trials (RCTs)** as the gold standard for experimental design.\n",
22 | "\n",
23 | "### 5. Ladder of Causality:\n",
24 | "* The chapter introducesthe Judea Peral's **Ladder of Causality** and how we move from observations, to intervention to counterfactual."
25 | ]
26 | },
27 | {
28 | "cell_type": "markdown",
29 | "metadata": {
30 | "jp-MarkdownHeadingCollapsed": true
31 | },
32 | "source": [
33 | "\n",
34 | "## 1.1. Introduction\n",
35 | "\n",
36 | "Our journey into the world of Applied Causal Inference begins here. In this chapter, we deal with fundamental questions about causality: \n",
37 | "\n",
38 | "\n",
39 | "- **What is causality?** \n",
40 | "- **How does causal inference differ from statistical inference?** \n",
41 | "- **In an age of remarkable machine learning achievements, why do we need causality?**\n",
42 | "\n",
43 | "\n",
44 | "Over the past decade, the landscape of data science and artificial intelligence has been transformed by the \"unreasonable effectiveness\" of machine learning algorithms. From computer vision systems that outperform humans in image recognition to natural language models capable of generating coherent, context-aware text, the capabilities of AI have grown exponentially. Models like *Claude Sonnet* and *ChatGPT* have not only revolutionized research but have also captured the public imagination, leading some to question the need for alternative approaches to data analysis.\n",
45 | "\n",
46 | "\n",
47 | "Indeed, if you've been following the rapid evolution of machine learning, you've likely encountered numerous examples of its prowess across various domains. This might prompt you to ask: *If these algorithms work so well, why should we bother looking into something else?*\n",
48 | "\n",
49 | "The answer lies in the unique insights and capabilities that causal inference offers. Despite the power of modern machine learning, there are scenarios where understanding the underlying causal mechanisms becomes crucial. \n",
50 | "\n",
51 | "In this chapter, we'll explore:\n",
52 | "\n",
53 | "- The historical development of causal thinking and its impact on scientific inquiry\n",
54 | "- Specific cases where causal models provide advantages over purely statistical methods\n",
55 | "- Common misconceptions and oversimplifications in causal reasoning\n",
56 | "\n",
57 | "By examining these aspects, we'll uncover why causal inference remains a critical tool in the data scientist's arsenal, complementing rather than competing with machine learning approaches."
58 | ]
59 | },
60 | {
61 | "cell_type": "markdown",
62 | "metadata": {},
63 | "source": [
64 | "
\n",
65 | "## 1.2. A Brief History of Causality\n",
66 | "\n",
67 | "The concept of causality has been a cornerstone of human understanding across civilizations and throughout history. Its study has evolved significantly, shaping our approach to scientific inquiry and our understanding of the world around us.\n",
68 | "\n",
69 | "### Ancient Foundations: Aristotle's Four Causes\n",
70 | "\n",
71 | "In ancient Greece, Aristotle laid the groundwork for causal thinking that would influence Western philosophy for centuries. He posited that true knowledge of any process necessitates an understanding of its causal structure. Aristotle's framework included four types of causes:\n",
72 | "\n",
73 | "- **Material Cause**: The substance from which something is made\n",
74 | "- **Formal Cause**: The essential nature or form of the thing\n",
75 | "- **Efficient Cause**: The agent of change or the maker\n",
76 | "- **Final Cause**: The purpose or end for which something exists\n",
77 | "\n",
78 | "While this categorization may appear counterintuitive to modern scientists, it represents one of the earliest systematic attempts to categorize different aspects of causation. Aristotle argued that answering **why questions** forms the essence of scientific explanation.\n",
79 | "\n",
80 | "
\n",
81 | "\n",
82 | "\n",
83 | "BTW, what do you think about Aristotle picture?\n",
84 | "\n",
85 | "### The Enlightenment Shift: David Hume's Empiricism\n",
86 | "\n",
87 | "Fast forward to the 18th century, and we encounter David Hume, a Scottish philosopher who revolutionized causal thinking. Hume's approach marked a significant departure from Aristotelian ideas, focusing instead on empirical observation and human psychology.\n",
88 | "Hume's key insight was that we never directly observe cause-effect relationships in the world. Instead, we only experience the conjunction of events. As he famously wrote:\n",
89 | "\n",
90 | "\n",
91 | "\"We only find, that the one does actually, in fact, follow the other. The impulse of one billiard-ball is attended with motion in the second. This is the whole that appears to the outward senses.\" *(original spelling; Hume & Millican, 2007; originally published in 1739)*.\n",
92 | "\n",
93 | "\n",
94 | "Hume's theory of causality can be summarized as follows:\n",
95 | "\n",
96 | "- We observe sequences of events (e.g., object $A$ moves, then object $B$ moves.\n",
97 | "- Repeated observations of such sequences create an expectation in our minds.\n",
98 | "- This feeling of expectation is what we call **causality**.\n",
99 | "\n",
100 | "In essence, Hume argued that causality is not an inherent property of the world, but a psychological construct arising from our experiences.\n",
101 | "\n",
102 | "
\n",
103 | "\n",
104 | "\n",
105 | "### Implications for Modern Causal Inference\n",
106 | "The historical evolution of causal thinking, from Aristotle to Hume, set the stage for modern approaches to causal inference. These early philosophers wrestled with fundamental questions that still resonate today:\n",
107 | "\n",
108 | "- How can we distinguish genuine causal relationships from mere correlations?\n",
109 | "- To what extent can we infer causal structures from observational data alone?\n",
110 | "- What role does human cognition play in our understanding of causality?\n",
111 | "\n",
112 | "As we delve deeper into methods of causal inference, it's crucial to remember that we're building upon centuries of philosophical and scientific thought. The challenges we face in identifying and quantifying causal relationships echo those pondered by thinkers throughout history.\n",
113 | "\n",
114 | "Here are some of the main contemporary theorists of causal inference, along with their key contributions and primary academic affiliations:\n",
115 | "\n",
116 | "**[Judea Pearl](https://bayes.cs.ucla.edu/jp_home.html)**, Professor Emeritus, Department of Computer Science, University of California, Los Angeles (UCLA)\n",
117 | "*Key Contribution*: Developed causal diagrams (DAGs) and do-calculus, laying the groundwork for modern approaches to causal inference, particularly in distinguishing correlation from causation.\n",
118 | "\n",
119 | "**[Donald Rubin](https://statistics.fas.harvard.edu/people/donald-b-rubin)**, Professor Emeritus, Department of Statistics, Harvard University\n",
120 | "*Key Contribution*: Creator of the Rubin Causal Model (RCM) or potential outcomes framework, which is central to the analysis of causal effects, particularly in the context of randomized and observational studies.\n",
121 | "\n",
122 | "**[James Heckman](https://cehd.uchicago.edu/?page_id=71)**, Professor of Economics, University of Chicago\n",
123 | "*Key Contribution*: Significant contributions to econometrics, including the Heckman correction for addressing selection bias and the estimation of treatment effects in observational data.\n",
124 | "\n",
125 | "**[Guido Imbens](https://www.gsb.stanford.edu/faculty-research/faculty/guido-w-imbens)**, Professor of Economics, Stanford University\n",
126 | "*Key Contribution*: Known for work on instrumental variables and local average treatment effects (LATE), providing practical methods for estimating causal effects in both observational and quasi-experimental contexts.\n",
127 | "\n",
128 | "
"
129 | ]
130 | },
131 | {
132 | "cell_type": "markdown",
133 | "metadata": {},
134 | "source": [
135 | "## 1.3 What babies are telling us about Causality\n",
136 | "\n",
137 | "While Hume's theory of causation provided a foundational understanding of cause and effect, it left some questions unanswered. To address these gaps, we turn to an unexpected source: human babies. The study of how babies develop their understanding of the world offers profound insights into the nature of causal reasoning and its importance in human cognition.\n",
138 | "\n",
139 | "
\n",
140 | "\n",
141 | "\n",
142 | "### Beyond Passive Observation: The Active Learner\n",
143 | "\n",
144 | "Alison Gopnik, a developmental psychologist, has made significant contributions to our understanding of how children construct their models of the world. Her work, bridging developmental psychology and computer science, reveals that children are far more than passive observers of their environment [Gopnik,(2012)](https://doi.org/10.1126/science.1223416).\n",
145 | "\n",
146 | "Key insights from Gopnik's research include:\n",
147 | "\n",
148 | "1. **Children as Scientists**: Babies and young children engage in behaviors that, while sometimes interpreted as disruptive or random, are actually systematic experiments to understand their environment [Gopnik,(2009)](https://books.google.no/books/about/The_Philosophical_Baby.html).\n",
149 | "\n",
150 | "2. **Preference for the Unpredictable**: Infants as young as 11 months show a preference for objects that behave in unpredictable ways. This preference drives them to explore and learn about novel phenomena efficiently [Stahl,(2015)](https://doi.org/10.1126/science.aaa3799).\n",
151 | "\n",
152 | "3. **Active Interaction**: Unlike Hume's theory, which focuses on passive observation, babies actively interact with their environment to test hypotheses and build causal models.\n",
153 | "\n",
154 | "\n",
155 | "### The Power of Intervention\n",
156 | "\n",
157 | "What sets the infant's approach apart from Hume's conception is the crucial element of intervention. In the context of causal inference, these interactions are termed **interventions**, and they form the backbone of modern experimental design [Pearl,(2009)](https://doi.org/10.1017/CBO9780511803161).\n",
158 | "\n",
159 | "\n",
160 | "Interventions allow us to:\n",
161 | "\n",
162 | "- Distinguish between correlation and causation\n",
163 | "- Test hypotheses about causal relationships\n",
164 | "- Build more robust and accurate models of the world\n",
165 | "\n",
166 | "This concept of intervention is not just a part of infant behavior; it's at the heart of scientific inquiry. The gold standard of scientific experimentation, the **Randomized Controlled Trial (RCT)**, is essentially a formalized, rigorous application of the same principle that drives a baby to repeatedly drop a spoon from their high chair [5].\n",
167 | "\n",
168 | "\n",
169 | "### Implications for Causal Inference\n",
170 | "\n",
171 | "The insights from developmental psychology have profound implications for how we approach causal inference:\n",
172 | "\n",
173 | "1. **Active Learning**: We should design algorithms and studies that don't just passively observe data but actively interact with systems to uncover causal structures [6].\n",
174 | "\n",
175 | "2. **Embracing Uncertainty**: Like infants who are drawn to unpredictable phenomena, our causal inference methods should be capable of identifying and exploring areas of uncertainty [7].\n",
176 | "\n",
177 | "3. **Iterative Experimentation**: The scientific process, mirroring a child's repeated experiments, should involve iterative interventions and observations to refine our causal models [8].\n",
178 | "\n",
179 | "As we delve deeper into the methods and applications of causal inference in subsequent chapters, keep in mind this fundamental insight: *true understanding of Causality comes not just from observing the world, but from interacting with it*. This principle, so naturally embodied in the behavior of infants, forms the cornerstone of modern causal inference techniques and experimental design.\n",
180 | "\n",
181 | "
\n",
182 | "How can we, as researchers, approach the world of science with the same curiosity, experimentation, and openness to uncertainty that babies use to understand their world, and how might this change the way we design our studies and algorithms?\n",
183 | "
\n",
184 | " \n",
185 | "
"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {
191 | "cell_id": "fa524251-c07c-44c7-89a2-0fbaa27f6617",
192 | "deepnote_cell_height": 6039.234375,
193 | "deepnote_cell_type": "markdown"
194 | },
195 | "source": [
196 | "## 1.4. Anything wrong with data science?\n",
197 | "\n",
198 | "Let's start with a simple image (Courtesy of Markus Elsholz). \n",
199 | "What is the object in the image below? A chair, right?\n",
200 | "\n",
201 | "\n",
202 | "\n",
203 | "\n",
204 | "
\n",
205 | "What if we look at the object from a different angle? \n",
206 | "\n",
207 | "\n",
208 | "\n",
209 | "\n",
210 | "The object is not a chair! We just had an illusion of a chair if the parts are viewed from a single and specific angle. This is the **Beuchet chair experiment** on changing perception in observations.\n",
211 | "\n",
212 | "\n",
213 | "\n",
214 | "Do you have any other example?\n",
215 | "
\n",
216 | "\n",
217 | "\n",
218 | "
\n",
219 | "Unfortunately, most of our works in statistics, data science, and machine learning are based on **observations**! However, we can not just rely on observations to model and understand our world. \n",
220 | "\n",
221 | "\n",
222 | "\n",
223 | "\n",
224 | "Saying that, let's see what Causality is or is NOT.\n",
225 | "\n",
226 | "\n",
227 | "
\n",
228 | "## 1.5. What is NOT Causality?\n",
229 | "\n",
230 | "
\n",
231 | "### Causality is not Algebra\n",
232 | "\n",
233 | "Dario is a little boy that feels fever, so his Mom measures his temperature with a thermometer. Then, hopefully, he can skip school today. \n",
234 | "From an algebra point of view, the height of mercury $X$ in the pipe is related to Dario's body temperature $Y$ with a constant $k$. \n",
235 | "\n",
236 | "$Y = k * X$\n",
237 | "\n",
238 | "
\n",
239 | "\n",
240 | "For our algebra equation, it does not matter if Dario's body temperature increases the mercury column height or the other way.\n",
241 | "\n",
242 | "
\n",
243 | "### Causality is not Statistics\n",
244 | "\n",
245 | "- Most scientific inquiry/data analyses have one of the two goals:\n",
246 | "\n",
247 | " - **Association/prediction**, i.e., determine predictors or variables associated with the outcome of interest.\n",
248 | " - **Causality**, i.e., understanding factors that cause or influence the outcome of interest.\n",
249 | "\n",
250 | "- Statistical concepts are those expressible in terms of the joint distribution of observed variables.\n",
251 | "\n",
252 | "- We are often told that **association is not causation!** However, we forget about it. Therefore, we see numerous spurious/funny correlations like examples in the [Spurious Correlations collection](https://tylervigen.com/spurious-correlations). \n",
253 | "\n",
254 | " \n",
255 | "\n",
256 | "
\n",
257 | "Another example is related to cigarette commercials in the USA in the 50th that claim smoking is helpful for coughs treatment and even helps you have a more fit body!\n",
258 | "\n",
259 | " \n",
260 | "\n",
261 | "
\n",
262 | "### Causality is not Machine Learning\n",
263 | "\n",
264 | "* We hear about rapid advances in machine learning systems every day, such as deep-learning algorithms in self-driving cars, speech-recognition systems, image processing, virtual reality, and LLMs. Nevertheless, deep learning has succeeded primarily by performing repeatable tasks to answer specific questions that we thought were difficult. But, those questions are not that difficult. \n",
265 | "\n",
266 | "\n",
267 | "* Machine learning has not addressed the tough questions that prevent us from achieving human-level AI. The public believes that AI machines can think like humans. In reality, computers don't even have animal-like cognitive abilities yet. See [Gary Marcus's paper, The Next Decade in AI: Four Steps Towards Robust Artificial Intelligence.](https://arxiv.org/pdf/2002.06177). \n",
268 | "\n",
269 | "\n",
270 | "* The field of artificial intelligence is **bursting with micro discoveries**—the sort of things that make good press releases—but machines are still disappointingly far from human-like cognition. See [Gary Marcus's book, Rebooting AI: Building Machines We Can Trust](http://garymarcus.com/index.html)\n",
271 | "\n",
272 | " \n",
273 | "\n",
274 | "
\n",
275 | "\n",
276 | "\n",
277 | "Machine learning is trapped in the **Plato Cave**. See [Judea Pearl's book, the Book of Why](http://bayes.cs.ucla.edu/WHY/). \n",
278 | "\n",
279 | "\n",
280 | " \n",
281 | "\n",
282 | "\n",
283 | "
\n",
284 | "Followings are some shortcomings of machine learning when it comes to causal inference. \n",
285 | "\n",
286 | "- Machine learning is limited to transferability to new problems and any form of generalization to data with a different distribution. \n",
287 | "\n",
288 | "- Machine learning often disregards information that even animals use heavily, e.g., interventions, domain shifts, and temporal structure. \n",
289 | "\n",
290 | "- Most current successes of machine learning boil down to large-scale pattern recognition on suitably collected independent and identically distributed (i.i.d.) data which is not the case in reality!\n",
291 | "\n",
292 | "\n",
293 | "\n",
294 | "
\n",
295 | "## 1.6. Going Beyond Machine Learning to Answer a Different Kind of Questions\n",
296 | "\n",
297 | "Now, we explore three types of questions that machine learning and causal inference address: prediction, counterfactual (what-if), and causal questions.\n",
298 | "\n",
299 | "### 1. Prediction Questions:\n",
300 | "Machine Learning excels at answering prediction-based questions. As highlighted in the book *Prediction Machines*, \"the new wave of artificial intelligence does not bring us intelligence but instead a critical component of intelligence—prediction.\" We can do remarkable things with machine learning, provided that we frame our problems as prediction problems, where the focus is on predicting outcomes based on historical data.\n",
301 | "\n",
302 | "- **Example Questions**:\n",
303 | " \n",
304 | " - Can we predict customers choices based on their previous behavior?
\n",
305 | " - Can we forecast next quarter’s sales using historical sales data?\n",
306 | "
\n",
307 | "\n",
308 | "### 2. Counterfactual (What-If) Questions:\n",
309 | "Machine Learning, while powerful in prediction tasks, struggles with counterfactual reasoning—answering \"what if\" questions that require exploring alternate realities. Machine learning models tend to fail when data deviates from the training distribution, making them poor at handling inverse causality or what-if scenarios.\n",
310 | "\n",
311 | "- **Example Questions**:\n",
312 | " \n",
313 | " - What would happen to website traffic if we redesigned our homepage?
\n",
314 | " - What if we reduced product prices by 10%—how would sales change?\n",
315 | "
\n",
316 | "\n",
317 | "### 3. Causal Questions:\n",
318 | "At the heart of these types of questions is a causal inquiry—we want to know not just *what* happens but *why* it happens. Causal questions are central to real-world decision-making, from business strategies to personal life choices. Unfortunately, we can't rely on machine learning's correlations to answer these causal questions; instead, we need causal inference techniques to uncover these deeper insights.\n",
319 | "\n",
320 | "- **Example Questions**:\n",
321 | " \n",
322 | " - Does increasing training hours improve employee performance?
\n",
323 | " - Does advertising on social media lead to more product sales?\n",
324 | "
\n",
325 | "\n"
326 | ]
327 | },
328 | {
329 | "cell_type": "markdown",
330 | "metadata": {},
331 | "source": [
332 | "## 1.7. What is Causality?\n",
333 | "\n",
334 | "To understand causality, let's first revisit the fundamentals of **statistical inference** and then contrast it with **causal inference**.\n",
335 | "\n",
336 | "### Traditional Statistical Inference Paradigm\n",
337 | "\n",
338 | "Statistics summarize a population/set/observation into a distribution based on samples drawn from that population. Remember that we cannot derive causal claims from observational data alone.\n",
339 | "\n",
340 | "Causal inference is the scientific process in which **cause-and-effect** relationships are inferred from observational data, but only after assuming a **causal model** that drives the relationships between random variables. \n",
341 | "\n",
342 | "We use an analogy proposed initially by [Judea Pearl, 2016](http://bayes.cs.ucla.edu/jsm-august2016-bw.pdf) and later used by [Camilo Hurtado, 2017](https://repositorio.unal.edu.co/handle/unal/59495) to better explain causal inference. \n",
343 | "\n",
344 | "- We assume an unknown, invariant, and true data-generating process, $M$, that produces a set of observed random variables (data), $D$, and associated multivariate probability distribution, $P$. \n",
345 | "- The target of scientific inquiry in traditional statistical analysis is a probabilistic quantity, $Q(P)$, which summarizes some attribute of $D$ that is of interest.\n",
346 | "- $Q(P)$ can be estimated from $P$ and $D$ alone. \n",
347 | "\n",
348 | "\n",
349 | "\n",
350 | "### Causal Inference Paradigm\n",
351 | "\n",
352 | "Causal analysis differs from statistical analysis. Causal inference is interested in the effect of an **intervention (treatment)** on the causal system $M$ when experimental conditions change. \n",
353 | "\n",
354 | "- This **intervention** acts as a specific modification to the data-generating model $M$, leading to an **unobserved (counterfactual) set of data $D'$ and a distribution $P'$**. This change is known as the **causal effect of an intervention**. \n",
355 | "- In other words, changes in the data-generating process $M$ generate hypothetical (unobserved) $D'$ and $P'$. \n",
356 | "- Then, a causal target parameter $Q(P')$ is computed, which summarizes the causal effect of the given intervention (or treatment). \n",
357 | "\n",
358 | "\n",
359 | "\n",
360 | "### The Challenge:\n",
361 | "\n",
362 | "- The problem is that we only have access to $D$ and therefore $P$ in observational studies, while $D'$ and $P'$ remain unknown. Thus, $D$ or $P$ alone cannot answer the causal quantity of interest. \n",
363 | "- We use a set of **(un)testable causal assumptions** to estimate $Q(P')$ from $D$ and $P$.\n",
364 | "- With these assumptions at hand, we can mathematically express $Q(P')$ in terms of both $D$ and $P$, leaving $D'$ and $P'$ out.\n",
365 | "\n",
366 | "### Causality Goes Beyond Statistics\n",
367 | "\n",
368 | "- Causal inference requires extra information. The distribution of the data alone cannot tell us how it would change under new conditions.\n",
369 | "- To make causal inferences, we must make **causal assumptions** about the processes that generated the data. These assumptions are not statistical.\n",
370 | "\n",
371 | "\n",
372 | "### Two Schools of Thought for Inference:\n",
373 | "\n",
374 | "- **Associational Inference**: Includes any relationship that can be defined in terms of the joint distribution of observed variables\n",
375 | " - Correlation, conditional independence, dependence, likelihood, confidence level...\n",
376 | " - Testable in principle\n",
377 | "\n",
378 | "- **Causal Inference**: Includes any relationship that cannot be defined in terms of the joint distribution alone\n",
379 | " - Randomization, confounding, mediation, attribution, effect...\n",
380 | " - Not testable in principle (without experimental control)\n",
381 | " - Only testable if we can intervene and see what happens.\n",
382 | "\n",
383 | "### Key Takeaways:\n",
384 | "- **Statistical inference** focuses on identifying patterns and relationships within the data but does not allow for causal claims.\n",
385 | "- **Causal inference** requires making assumptions about how interventions would alter the data-generating process, going beyond statistical relationships.\n",
386 | "- Causal models help us predict what would happen under hypothetical scenarios, providing insights into the \"why\" behind the data.\n"
387 | ]
388 | },
389 | {
390 | "cell_type": "markdown",
391 | "metadata": {},
392 | "source": [
393 | "## 1.8. Causality Ladder\n",
394 | "\n",
395 | "In the [Book of Why](http://bayes.cs.ucla.edu/WHY/), Judea Pearl suggested the **Ladder of Causation**, which represents three levels of causality with different organisms at each level. \n",
396 | "\n",
397 | "* **Association (Rung One)**: This level deals with observing relationships between variables. Here, we can predict how one event affects our belief in another. For example, observing a SpaceX launch may increase our belief that SpaceX stock will rise. The primary activity here is **observation**.\n",
398 | "\n",
399 | "* **Intervention (Rung Two)**: At this level, we move beyond observation to actively intervening in a system. By changing one variable, we can see its effect on another, much like performing an experiment. For instance, going to bed earlier could result in more energy the next day. The activity here is **doing** or **intervening**.\n",
400 | "\n",
401 | "* **Counterfactual Reasoning (Rung Three)**: This level involves imagining alternative scenarios and understanding what would have happened under different conditions. It allows us to ask \"what if\" questions, such as whether you would have arrived on time had you taken the train instead of the car. The activity here is **imagining** or **understanding alternate outcomes**.\n",
402 | "\n",
403 | "\n",
404 | "\n",
405 | "\n",
406 | " Which rung of the Causality Ladder is your current PhD research addressing?
\n",
407 | "\n",
408 | "
\n"
409 | ]
410 | },
411 | {
412 | "cell_type": "markdown",
413 | "metadata": {
414 | "cell_id": "00009-1b357484-1425-412d-abc8-5a53506c4d3d",
415 | "deepnote_cell_height": 189.5625,
416 | "deepnote_cell_type": "markdown"
417 | },
418 | "source": [
419 | "## 1.9. The Danger of Oversimplification\n",
420 | "\n",
421 | "In our quest to understand causality, it's crucial to address a common pitfall: the temptation to oversimplify.\n",
422 | "\n",
423 | "* **Human Intuition vs. Complex Reality**: While simple models are more appealing to human intuition, they often fail to capture the intricacies of complex systems. This is precisely why we rely on statistics and advanced causal inference methods.\n",
424 | " \n",
425 | "* **The Need for Justified Models**: Before adopting simpler models, we must ensure they are justified by expert knowledge or thorough analysis. Simplicity should not come at the cost of accuracy or completeness.\n",
426 | " \n",
427 | "* **Misinformation Risk**: Oversimplification can lead to misinformation. In causal inference, this is particularly dangerous as it may result in incorrect conclusions about cause-effect relationships.\n",
428 | " \n",
429 | "* **Balancing Simplicity and Accuracy**: The challenge lies in finding the right balance between model simplicity and accurate representation of causal relationships. This balance is crucial for both understanding and practical application.\n",
430 | "\n",
431 | "Oversimplification in causal inference can lead to misleading conclusions. Always strive for models that are as simple as possible, but no simpler than the complexity of the system under study requires.\n",
432 | "\n",
433 | "\n",
434 | "Further Reading and Media on Oversimplification:\n",
435 | "\n",
436 | "**Book**: [Thinking, Fast and Slow](https://www.goodreads.com/book/show/11468377-thinking-fast-and-slow) by Daniel Kahneman, 2011, explores various cognitive biases, including our tendency to prefer simple explanations over complex ones.\n",
437 | "\n",
438 | "**Book**: [How Not To Be Wrong, The Power Of Mathematical Thinking](https://www.jordanellenberg.com/book/how-not-to-be-wrong/) by Jordan Ellenberg, 2014, connects various economic and societal philosophies with basic mathematics and statistical principles. \n",
439 | "\n",
440 | "**Podcast**: The \"[How we transferred our biases into our machines and what we can do about it](https://youarenotsosmart.com/2017/11/20/yanss-115-how-we-transferred-our-biases-into-our-machines-and-what-we-can-do-about-it/)\" episode from the podcast \"You Are Not So Smart\". This episode discusses how our tendency to oversimplify can lead to biases in machine learning and AI systems.\n",
441 | "\n",
442 | "\n"
443 | ]
444 | },
445 | {
446 | "cell_type": "markdown",
447 | "metadata": {},
448 | "source": [
449 | "## 1.10. Causal Inference in Business: The Promise of Causal AI\n",
450 | "This section inspired by the [Causal Artificial Intelligence](https://www.oreilly.com/library/view/causal-artificial-intelligence/9781394184132/) book by Hurwitz & Thompson, 2023.\n",
451 | "\n",
452 | "\n",
453 | "### The Limitations of Traditional Data Analysis\n",
454 | "Over the past decade, the prevailing trend in data science has been the mantra of \"more data is better.\" Organizations invested heavily in data collection, assuming that the sheer volume of information would inevitably lead to more profound insights and improvements in business performance. However, this data-centric approach has its limitations:\n",
455 | "\n",
456 | "* **Correlation vs. Causation**: While traditional analysis can identify correlations, it often struggles to differentiate them from true causal relationships.\n",
457 | "* **Data Overload**: Simply gathering more data does not guarantee better insights, especially if the underlying business questions are not well-posed.\n",
458 | "* **Lack of Context**: Data-driven approaches often miss the critical contextual knowledge that domain experts can provide, leading to incomplete or misleading conclusions.\n",
459 | "\n",
460 | "### The Causal AI Advantage\n",
461 | "Causal AI offers a solution to these limitations by combining causal inference techniques with the predictive power of AI models. The key benefits of Causal AI for businesses include:\n",
462 | "\n",
463 | "* **Understanding 'Why'**: Instead of merely forecasting what may happen, Causal AI seeks to uncover why events occur, leading to interventions that target root causes rather than symptoms.\n",
464 | "* **Enhanced Decision Making**: By understanding causal relationships, organizations can make better decisions about resource allocation and strategic planning.\n",
465 | "* **Robust Predictions**: Causal models tend to be more resilient to changes in external conditions, making them particularly useful in dynamic and uncertain business environments.\n"
466 | ]
467 | },
468 | {
469 | "cell_type": "markdown",
470 | "metadata": {
471 | "cell_id": "768600bfea1a4cbb81ed3db277469651",
472 | "deepnote_cell_height": 301.734375,
473 | "deepnote_cell_type": "markdown",
474 | "tags": []
475 | },
476 | "source": [
477 | "## Acknowledgement\n",
478 | "\n",
479 | "Most of the ideas in this chapter are taken from Judea Pearl Books. \n",
480 | "\n",
481 | "* [Causality, 2nd Edition](http://bayes.cs.ucla.edu/BOOK-2K/)\n",
482 | "* [The Book of Why](http://bayes.cs.ucla.edu/WHY/)\n",
483 | "\n",
484 | "We also like to reference the open-source book on causality by Matheus Facure Alves. He did a great job in explaining causal concepts with examples and fuuny memes.\n",
485 | "\n",
486 | "* [Causal Inference for The Brave and True](https://matheusfacure.github.io/python-causality-handbook/landing-page.html)\n"
487 | ]
488 | },
489 | {
490 | "cell_type": "code",
491 | "execution_count": null,
492 | "metadata": {},
493 | "outputs": [],
494 | "source": []
495 | }
496 | ],
497 | "metadata": {
498 | "celltoolbar": "Tags",
499 | "deepnote": {},
500 | "deepnote_execution_queue": [],
501 | "deepnote_notebook_id": "ca97ebbe-c21a-477a-b9d8-e5669467789a",
502 | "kernelspec": {
503 | "display_name": "Python 3 (ipykernel)",
504 | "language": "python",
505 | "name": "python3"
506 | },
507 | "language_info": {
508 | "codemirror_mode": {
509 | "name": "ipython",
510 | "version": 3
511 | },
512 | "file_extension": ".py",
513 | "mimetype": "text/x-python",
514 | "name": "python",
515 | "nbconvert_exporter": "python",
516 | "pygments_lexer": "ipython3",
517 | "version": "3.10.9"
518 | }
519 | },
520 | "nbformat": 4,
521 | "nbformat_minor": 4
522 | }
523 |
--------------------------------------------------------------------------------
/lectures/Counterfactual_DAG:
--------------------------------------------------------------------------------
1 | digraph {
2 | X1 [label="X_1 (Marketing Investment)"]
3 | Y [label="Y (Revenue)"]
4 | X2 [label="X_2 (Market Share)"]
5 | UX1 [label="U_{X1} (Noise)"]
6 | UY [label="U_Y (Noise)"]
7 | UX2 [label="U_{X2} (Noise)"]
8 | UX1 -> X1
9 | X1 -> Y
10 | UY -> Y
11 | Y -> X2
12 | UX2 -> X2
13 | }
14 |
--------------------------------------------------------------------------------
/lectures/Counterfactual_DAG.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/Counterfactual_DAG.png
--------------------------------------------------------------------------------
/lectures/causal_model_dag:
--------------------------------------------------------------------------------
1 | digraph {
2 | X [label="X (Exogenous Variable)"]
3 | T [label="T (Intervened)"]
4 | Y [label="Y (Outcome)"]
5 | UT [label="U_T (Noise for T)"]
6 | UY [label="U_Y (Noise for Y)"]
7 | X -> T
8 | X -> Y
9 | T -> Y
10 | UT -> T
11 | UY -> Y
12 | }
13 |
--------------------------------------------------------------------------------
/lectures/causal_model_dag.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/causal_model_dag.pdf
--------------------------------------------------------------------------------
/lectures/causal_model_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/causal_model_dag.png
--------------------------------------------------------------------------------
/lectures/chain_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/chain_dag.png
--------------------------------------------------------------------------------
/lectures/collider_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/collider_dag.png
--------------------------------------------------------------------------------
/lectures/data/Simpson_paradox_binary_example.csv:
--------------------------------------------------------------------------------
1 | count,treatment,recovery,group
2 | 1.0,0,1,0
3 | 21.0,0,0,0
4 | 1.0,0,1,1
5 | 4.0,0,0,1
6 | 2.0,0,1,2
7 | 2.0,0,0,2
8 | 61.0,0,1,3
9 | 30.0,0,0,3
10 | 112.0,0,1,4
11 | 16.0,0,0,4
12 | 18.0,1,1,0
13 | 91.0,1,0,0
14 | 13.0,1,1,1
15 | 23.0,1,0,1
16 | 3.0,1,1,2
17 | 2.0,1,0,2
18 | 51.0,1,1,3
19 | 21.0,1,0,3
20 | 25.0,1,1,4
21 | 3.0,1,0,4
22 |
--------------------------------------------------------------------------------
/lectures/data/online_classroom.csv:
--------------------------------------------------------------------------------
1 | gender,asian,black,hawaiian,hispanic,unknown,white,format_ol,format_blended,falsexam
2 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,63.299969999999995
3 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,79.96
4 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.37
5 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,90.01994
6 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,83.3
7 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,88.34996
8 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,78.4
9 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,90.0
10 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.31
11 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,73.37
12 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,83.3
13 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,86.64
14 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.37
15 | 1,,,,,,,0,1.0,76.64
16 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,89.98
17 | 0,,,,,,,0,1.0,66.64
18 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,80.05
19 | 1,,,,,,,0,0.0,78.38
20 | 0,,,,,,,0,1.0,86.64
21 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,74.97
22 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,63.35
23 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,71.73899999999999
24 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,81.65
25 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.33
26 | 0,0.0,1.0,0.0,0.0,0.0,0.0,1,0.0,59.94
27 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,80.04997
28 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,84.99
29 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,71.68
30 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,71.63
31 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,73.29899999999999
32 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,68.27
33 | 0,,,,,,,0,1.0,83.3
34 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,56.7
35 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,73.3
36 | 1,,,,,,,0,0.0,73.3
37 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,68.27
38 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,74.97
39 | 1,,,,,,,1,0.0,66.62
40 | 0,,,,,,,0,0.0,66.69
41 | 0,,,,,,,0,1.0,81.63
42 | 1,,,,,,,0,1.0,56.6
43 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,76.64
44 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,81.74
45 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,65.04
46 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,84.99
47 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,96.66
48 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.33
49 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.31
50 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,76.64
51 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,68.31
52 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,71.61
53 | 0,,,,,,,0,0.0,60.01
54 | 0,,,,,,,0,1.0,61.72
55 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,79.96
56 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.31
57 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,63.26
58 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,56.63998
59 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.32
60 | 0,,,,,,,1,0.0,78.31
61 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,69.95998
62 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.32
63 | 0,,,,,,,0,0.0,96.68
64 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,76.63996
65 | 0,,,,,,,1,0.0,78.36
66 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,65.04
67 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,84.99
68 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,68.34
69 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,88.38
70 | 1,0.0,0.0,0.0,1.0,0.0,0.0,0,0.0,79.959
71 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,63.35
72 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,70.08
73 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,68.34
74 | 1,,,,,,,0,1.0,90.0
75 | 0,,,,,,,1,0.0,86.66
76 | 0,,,,,,,0,1.0,59.99
77 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,55.07
78 | 1,,,,,,,0,1.0,88.33
79 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,81.76
80 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,66.69
81 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,85.04
82 | 1,,,,,,,1,0.0,90.0
83 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,73.3
84 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,73.28
85 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,73.3
86 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,91.69
87 | 1,0.0,0.0,1.0,0.0,0.0,0.0,0,1.0,69.98
88 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.32
89 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,54.949980000000004
90 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,70.0
91 | 1,,,,,,,0,1.0,51.66
92 | 1,,,,,,,0,0.0,81.63
93 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,81.72
94 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,90.05
95 | 0,,,,,,,1,0.0,51.67998
96 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,73.3
97 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,76.64
98 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,58.34
99 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,78.31
100 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,83.3
101 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,51.678999999999995
102 | 0,,,,,,,0,0.0,61.67997
103 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,76.66
104 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,56.70997
105 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,79.99996999999999
106 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,79.98
107 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,73.3
108 | 0,,,,,,,0,0.0,73.39
109 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,71.64996
110 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,79.97995
111 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,71.62997
112 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,79.98
113 | 0,,,,,,,0,0.0,86.64
114 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.39
115 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,64.979
116 | 1,,,,,,,1,0.0,93.39
117 | 0,,,,,,,0,0.0,70.03
118 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,84.97
119 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,89.98
120 | 1,,,,,,,1,0.0,56.65
121 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,78.34995
122 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,74.90996
123 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,78.31
124 | 0,0.0,0.0,1.0,0.0,0.0,0.0,1,0.0,81.7
125 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,66.64
126 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,86.64
127 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,79.98
128 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,81.65
129 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,84.97
130 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,76.71
131 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,60.08
132 | 0,,,,,,,0,1.0,91.65
133 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,75.06
134 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,80.05
135 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,81.67
136 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,86.65994
137 | 1,,,,,,,0,1.0,83.3
138 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,74.95997
139 | 0,,,,,,,0,0.0,69.95996
140 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,88.32994000000001
141 | 0,,,,,,,1,0.0,64.95
142 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,66.64
143 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,76.64
144 | 0,,,,,,,1,0.0,74.99
145 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,59.94
146 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,75.02
147 | 0,,,,,,,1,0.0,58.31
148 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,76.76
149 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,71.68
150 | 1,,,,,,,0,0.0,89.98
151 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,84.99
152 | 1,,,,,,,0,0.0,81.63
153 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,83.32
154 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,73.31997
155 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,73.27996999999999
156 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,88.38
157 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,68.36
158 | 0,,,,,,,0,1.0,68.29
159 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,71.63
160 | 1,,,,,,,0,1.0,71.63
161 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,91.65
162 | 1,,,,,,,0,0.0,68.31
163 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,75.02
164 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,73.44
165 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,50.1
166 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,63.31998000000001
167 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,70.03
168 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,73.32
169 | 0,,,,,,,1,0.0,83.3
170 | 1,,,,,,,1,0.0,75.06
171 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,86.66
172 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,68.32996
173 | 0,,,,,,,0,1.0,78.329
174 | 0,,,,,,,1,0.0,68.31
175 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,75.02
176 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,78.33
177 | 0,,,,,,,0,1.0,73.28
178 | 0,,,,,,,0,0.0,74.97
179 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,70.05
180 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,76.63996999999999
181 | 1,,,,,,,0,1.0,84.99
182 | 0,,,,,,,0,1.0,71.74
183 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,88.33
184 | 0,,,,,,,1,0.0,63.28
185 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,91.74
186 | 1,,,,,,,1,0.0,0.0
187 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,79.98
188 | 1,,,,,,,0,0.0,69.94
189 | 1,0.0,0.0,0.0,1.0,0.0,0.0,0,1.0,63.27997
190 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,84.99
191 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,73.3
192 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.33
193 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.29997
194 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,84.97
195 | 1,,,,,,,1,0.0,81.65
196 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.3
197 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,89.99994000000001
198 | 0,0.0,0.0,0.0,1.0,0.0,0.0,1,0.0,79.93996
199 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,78.29
200 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,86.64
201 | 1,,,,,,,0,0.0,91.67
202 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,76.75
203 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,1.0,73.28
204 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,70.03
205 | 1,,,,,,,0,0.0,83.32
206 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,90.0
207 | 1,,,,,,,0,1.0,83.32
208 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.3
209 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,78.31
210 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,88.33
211 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.37
212 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,86.66
213 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,86.66
214 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,84.99
215 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,88.33
216 | 1,0.0,0.0,1.0,0.0,0.0,0.0,0,1.0,88.33
217 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,81.65
218 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,78.229
219 | 1,,,,,,,0,1.0,76.73
220 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,89.98
221 | 1,,,,,,,0,1.0,74.95
222 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,75.02
223 | 0,,,,,,,0,1.0,56.759980000000006
224 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,81.669
225 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,58.27
226 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,81.63
227 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,58.299969999999995
228 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,78.38
229 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,81.72
230 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,84.99
231 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,81.67
232 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,86.66
233 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.31
234 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,65.02
235 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,80.0
236 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,78.37995
237 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,86.66
238 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,83.34
239 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,84.99
240 | 0,,,,,,,0,1.0,89.98
241 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,66.69
242 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.37
243 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,88.33
244 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,86.64
245 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,90.0
246 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,84.97
247 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,73.32
248 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,85.01
249 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,75.06
250 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,81.65
251 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,68.27
252 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.34
253 | 1,,,,,,,1,0.0,80.05
254 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,86.64
255 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,81.65
256 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,68.36
257 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,80.0
258 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,73.35
259 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,68.36
260 | 0,,,,,,,0,1.0,61.63
261 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,56.71
262 | 1,,,,,,,0,1.0,78.31
263 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,90.0
264 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,70.03
265 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,68.28996
266 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,76.73
267 | 1,,,,,,,1,0.0,86.65996
268 | 0,0.0,0.0,0.0,0.0,1.0,0.0,0,1.0,48.33998
269 | 1,0.0,1.0,0.0,0.0,0.0,0.0,1,0.0,81.7
270 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,81.72
271 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,56.59996999999999
272 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,81.65
273 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,94.99
274 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,59.92
275 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,61.68
276 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,86.64
277 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,86.66
278 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,88.31
279 | 1,,,,,,,1,0.0,81.7
280 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,68.34
281 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,78.4
282 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,74.97
283 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,83.32
284 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,84.99
285 | 1,0.0,0.0,0.0,1.0,0.0,0.0,1,0.0,76.62
286 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,68.36
287 | 1,,,,,,,0,1.0,76.62
288 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,66.62
289 | 0,,,,,,,1,0.0,74.90997
290 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,79.96
291 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,88.33
292 | 0,,,,,,,0,1.0,76.71
293 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,65.02
294 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,83.32
295 | 1,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,86.75
296 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,83.44
297 | 1,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,95.01
298 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,69.99996999999999
299 | 0,1.0,0.0,0.0,0.0,0.0,0.0,1,0.0,86.68
300 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,66.65899999999999
301 | 1,,,,,,,0,1.0,76.62
302 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,71.61
303 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,90.0
304 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,60.01
305 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,1.0,94.99
306 | 0,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,83.3
307 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,83.32
308 | 1,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,70.0
309 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,73.37
310 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,83.39
311 | 0,0.0,0.0,0.0,0.0,0.0,1.0,1,0.0,0.0
312 | 1,,,,,,,0,1.0,69.96
313 | 1,1.0,0.0,0.0,0.0,0.0,0.0,0,0.0,86.71
314 | 1,,,,,,,0,0.0,90.0
315 | 0,0.0,1.0,0.0,0.0,0.0,0.0,0,0.0,61.68
316 | 0,,,,,,,0,0.0,73.37
317 | 1,,,,,,,0,1.0,88.31
318 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,0.0,91.65
319 | 1,,,,,,,0,0.0,84.99
320 | 0,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,68.36
321 | 1,,,,,,,1,0.0,70.05
322 | 0,,,,,,,1,0.0,66.69
323 | 1,,,,,,,1,0.0,83.29997
324 | 1,0.0,0.0,0.0,0.0,0.0,1.0,0,1.0,96.68
325 |
--------------------------------------------------------------------------------
/lectures/data/smoke_dataset.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/data/smoke_dataset.pkl
--------------------------------------------------------------------------------
/lectures/fork_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/fork_dag.png
--------------------------------------------------------------------------------
/lectures/healthcare_dag:
--------------------------------------------------------------------------------
1 | digraph {
2 | Age
3 | Treatment
4 | Survival
5 | Age -> Treatment
6 | Age -> Survival
7 | Treatment -> Survival
8 | }
9 |
--------------------------------------------------------------------------------
/lectures/healthcare_dag.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/healthcare_dag.pdf
--------------------------------------------------------------------------------
/lectures/img/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/Book_of_why.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/Book_of_why.jpg
--------------------------------------------------------------------------------
/lectures/img/What_if.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/What_if.jpg
--------------------------------------------------------------------------------
/lectures/img/causality_intro.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/causality_intro.jpg
--------------------------------------------------------------------------------
/lectures/img/causality_intro_image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/causality_intro_image.png
--------------------------------------------------------------------------------
/lectures/img/ch1/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/ch1/Aristotle_and_causal_thinking.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Aristotle_and_causal_thinking.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Beuchet_chair_a.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Beuchet_chair_a.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Beuchet_chair_b.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Beuchet_chair_b.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Causal_Ladder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Causal_Ladder.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Causal_Paradigm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Causal_Paradigm.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Causality_History.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Causality_History.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Cigarette_Commercials.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Cigarette_Commercials.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Courtroom.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Courtroom.png
--------------------------------------------------------------------------------
/lectures/img/ch1/DataGeneration.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/DataGeneration.gif
--------------------------------------------------------------------------------
/lectures/img/ch1/David_Hume_theory_of_causality_18th_century.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/David_Hume_theory_of_causality_18th_century.png
--------------------------------------------------------------------------------
/lectures/img/ch1/ML_Animal_Abilities.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/ML_Animal_Abilities.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Math_Learning.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Math_Learning.jpeg
--------------------------------------------------------------------------------
/lectures/img/ch1/Oversimplified-model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Oversimplified-model.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Plato_Cave.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Plato_Cave.jpeg
--------------------------------------------------------------------------------
/lectures/img/ch1/Spurious_Correlations_Muzzarella.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Spurious_Correlations_Muzzarella.png
--------------------------------------------------------------------------------
/lectures/img/ch1/Stat_Paradigm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/Stat_Paradigm.png
--------------------------------------------------------------------------------
/lectures/img/ch1/baby_learning_brain.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/baby_learning_brain.png
--------------------------------------------------------------------------------
/lectures/img/ch1/causality_intro.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/causality_intro.jpg
--------------------------------------------------------------------------------
/lectures/img/ch1/chatgpt-icon.png.sb-35134b4b-g2oqmo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/chatgpt-icon.png.sb-35134b4b-g2oqmo
--------------------------------------------------------------------------------
/lectures/img/ch1/little-sick-boy.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/little-sick-boy.jpeg
--------------------------------------------------------------------------------
/lectures/img/ch1/misinformation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/misinformation.png
--------------------------------------------------------------------------------
/lectures/img/ch1/normal-model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch1/normal-model.png
--------------------------------------------------------------------------------
/lectures/img/ch2/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/ch2/Earnings_Education_US_2023.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Earnings_Education_US_2023.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Freia-Melkesjokolade.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Freia-Melkesjokolade.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Freia_melkesjokolade_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Freia_melkesjokolade_2.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Intervention_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Intervention_1.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Intervention_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Intervention_2.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Intervention_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Intervention_3.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Intervention_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Intervention_4.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Obser2Interven.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Obser2Interven.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Observation to Intervention to Counterfactual.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Observation to Intervention to Counterfactual.pptx
--------------------------------------------------------------------------------
/lectures/img/ch2/Observation_Intervention_Counterfactual.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Observation_Intervention_Counterfactual.png
--------------------------------------------------------------------------------
/lectures/img/ch2/Pearls-Ladder-of-Causation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/Pearls-Ladder-of-Causation.png
--------------------------------------------------------------------------------
/lectures/img/ch2/magician.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/magician.png
--------------------------------------------------------------------------------
/lectures/img/ch2/randomized-controlled-trial.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch2/randomized-controlled-trial.png
--------------------------------------------------------------------------------
/lectures/img/ch3/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/ch3/Berkson_Covid_fracture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Berkson_Covid_fracture.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Berkson_Covid_fracture_hospital.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Berkson_Covid_fracture_hospital.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Berkson_handsome_men.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Berkson_handsome_men.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Berkson_paradox_Covid.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Berkson_paradox_Covid.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Berkson_paradox_Covid_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Berkson_paradox_Covid_plot.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Causal_Model_Climate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Causal_Model_Climate.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Causal_Model_Disease_Categories.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Causal_Model_Disease_Categories.png
--------------------------------------------------------------------------------
/lectures/img/ch3/DAGs_PDFs.drawio:
--------------------------------------------------------------------------------
1 | 7V1db6M4FP01edwIf8Njk87s7EorVerD7DxScBI0JGQJnabz69cEmwQDJY0IJoGqUuGCDdx74B4fX+gEzdf7P2N3u/on8nk4gZa/n6DHCYSAMVv8SS3v0mLZILMs48CXtqPhOfjN1Y7S+hr4fFfYMYmiMAm2RaMXbTbcSwo2N46jt+JuiygsHnXrLnnJ8Oy5Ydn6PfCTlbQC6hw3fOPBciUPbUOWbVi7+c6ZYbdy/egtMx0uDn2ZoHkcRUm2tN7PeZh6T/kl88DXmq35icV8k5zTgDn/Lfz9X9++r//e7n8+Re9x9OMP2csvN3yVFzyBWPw+ZH/kmSfvyh2iS+F5sTJ7WwUJf966XrrlTURf2FbJOhRrQCy6u20WjkWw5+IMZotok8jwihMV60EYzqMwig8do4Xtcc8T9l0SRz/5yZYXm2CStpCnyuOE72t9AHLPCkzyaM2T+F3sIhvYElUSjdCS62/H0KpdVqdBlTZXgmmZd3z0t1iQLv+E+2GN+2ddu98n3PZxlftt+IIobcf9EPfM/6jG//Or+78FbwJa9CaChr3JKrypO3DjP6RPZbHmhe5uF3hFv8XR68ZPHfZoVTiN74PkX7lnuvxD7pcuP+5PV97VykZcWNrEmkJGlOHQbupQpAzHxoe1QusnHgfCPzyWxuwOUckA5rHkfimTaJEUnoheY483AzJx4yVPmh4cZWScRJ5URF7ZYh66SfCreLpVcJBHeIoCcSE58JhVBB5mGqKyy5StTlOS3hHROoJaR5kfSh0d0Jlf9uWAtbsCrHUJYBnUAUtuFq81T7Ju8CqS2BTaGDIgoIopBKj43MRgygBh0GYEEsRsdhmaIYPF7GazqYWc4w/tFNzOsKgd7hu3UCOYoZA7avUtAHWDm5tgdwT2jN2BqsHKSO8+lS4VJJvzpVUNjm4SJrBoSwwPAGCW4oGqIV49ajfRht8+wRMX9DVI/SS3u3FSvi0PZrkbMARzxyTKiaPRwquwQop6xQoBHhYttHvHSsiwaKFD+xYAesu00Lb7Rgs/p/pdmmB7xgnPzKnFRNxmhqXnEklilEhi2BaRzFuaIpKjWNgdaLFJ0DqkC17oOP3ihR/IhdaEzDdRki1sebwVS3VsRWS2RMuKYbDcpHeEwEuKs1ma/wLPDR/khnXg+2nzWcx3wW/35dBVCsZterGHyyezCXlM+3pNol12y4AScZGp5ZTlKNPJnUZakhu1qQsASSkPqxn5AkjxtRKxOthJBN3BhgdoNAmrcc9JeFCn0SmLkS+DjY4Y3vUsOmVt0xtsdByrb9GpK9O4W82i4H9ieogH6zSjO5Us9DIl8wGo04xuQrLQ65QoNu3OKgVolCyuLFkoEDfXNhkd/enFTcxpqbiJ6fnxyiM62I0w1y+14nZAjkyCvKEiitKixuFcdgfoBVHUYlMLd3sTlFW7cVCcP5GQaWLfIDqNWlO2Va+hYGUCBViXgVMoGcqIDMC+TSOjuuK2Ox2TAdS3eWRV12FgUKbfA4sFrL4HfPpCSVsR0HUJ41PP6HO1XWNF4gcobiSsyGgJP7Bbm0i2DU8kK9I1TiR3AFqzZbSok5lkgPo1lYzq1Mp7JYdEoybAKc8Fd5sY60rc7pUcMti3CFSJb/dMDqk2Z4KA6Qh0Vq91x+SQnZtnqck8W672oOSyVHrUv/K5b62na+fOKkWsHrX3Idqbfl3lfJibLaZl4EM6iWg7dFI01+gkNEknVWIZDJ10NDJjXOnCA9MaodW37wRhOCw6KRzeM60Rd/Meac+4pOnpdIX7xsyMzX5ghGijn4vVSUgcraeO1Uk8qpPdgdaoOgktVqSTxQduWx/FAaDYsWFxEn8gThZLDqwJE4ew6jLsYIoPCCuWg4OKVJx/rrOT6gNcljcHXNSD9K+xmS7Xx2Xtc7ivugBKtbcpLMdweMrC6HDfdYEW7NnLLnhg320rlVbls2mmhnqKXw1F7iiVVpmPgLkvt/WjtIrapiNQJTiNcseV5Q6F+8aRIzFb16IXY9lIw+HFxVhMR/SVh4NkiJ+HuxmYY6Pfh2so32JWKy/JlKq3KO78LRlSFv3GAfUx0MzwkIA0aFYV32YZxatsK7CLwztSMZMHaTvBFKvH/+aS3ZrHf4qDvvwP
--------------------------------------------------------------------------------
/lectures/img/ch3/DAGs_PDFs_conditionallyDep.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/DAGs_PDFs_conditionallyDep.png
--------------------------------------------------------------------------------
/lectures/img/ch3/DAGs_PDFs_conditionallyIndep.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/DAGs_PDFs_conditionallyIndep.png
--------------------------------------------------------------------------------
/lectures/img/ch3/DAGs_PDFs_marginallyDep.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/DAGs_PDFs_marginallyDep.png
--------------------------------------------------------------------------------
/lectures/img/ch3/DAGs_PDFs_marginallyIndep.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/DAGs_PDFs_marginallyIndep.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Fork_paradox_fire.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Fork_paradox_fire.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Fork_paradox_fire_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Fork_paradox_fire_plot.png
--------------------------------------------------------------------------------
/lectures/img/ch3/IceCream-Sunburn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/IceCream-Sunburn.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Italian-man.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Italian-man.jpeg
--------------------------------------------------------------------------------
/lectures/img/ch3/Markov_blanket.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Markov_blanket.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Markov_equivalent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Markov_equivalent.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Simpson_firefighter_Injures.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Simpson_firefighter_Injures.png
--------------------------------------------------------------------------------
/lectures/img/ch3/Simpson_firefighter_Injures_severity.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/Simpson_firefighter_Injures_severity.png
--------------------------------------------------------------------------------
/lectures/img/ch3/emperor-penguins-family.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/emperor-penguins-family.png
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example.drawio:
--------------------------------------------------------------------------------
1 | 5Vptk5s2EP41/lgGvaOPvcs1mUwyk/baOedTh7FlmwRbHiyf7f76CiNeJIOPEvCRKzdzRishxO7z7GoXJuh+fXyfhNvVZzkX8QT68+MEvZtACAjD+ieVnIwEIppJlkk0N7JS8Bj9I4zQN9J9NBc7a6CSMlbR1hbO5GYjZsqShUkiD/awhYztu27DpbgQPM7C+FL6FM3VykgB5WXHBxEtV+bWAWRZxzosBmeC3Sqcy0MmOj8cepig+0RKlZ2tj/ciTrWX6yXTwG8NvcXCErFRbS74+Mfm+3Rx97RfPUYP337/C8oP+18CszZ1yh9YzPXzm6ZM1Eou5SaMH0rpXSL3m7lIZ/V1qxzzScqtFgIt/CaUOhljhnsltWil1rHpFcdITSvnX9OpPGJa745m5nPjlDc2KjlNq43KVWmzvOzcyq9byI0yCwFct3cqkd8LU2oj3F3qMTeY3CczcUV5OR7DZCnUlXEwG5dqtnIDY6X3Qq6FXrEekIg4VNGzjbzQAHhZjCttrE+Mmf+Dyc28z2G8N3eaQM1S/Gf2cwmIONZkSw1/WEVKPG7Ds1IOmvC2WV1VNyr2WSRKHK+qIu+lxg0Y94GxaR8qXDS491dVGvoDaY//LIRxrdGVQH0SBrYkDOqbMObSLzLSSy7QRYiDLsY9BjAMSPY/sCfMlm3mcGBULKo7smADLz//Dfpm5iKK43sZy+Q8D1qQ9M+Mq8izo8BApYeej344jv2xcRzAn5XkfZIVtSQrHlV0Q40sgiOMbxSPDfu4QX9fR6i9IBib9kiD9p769+E96A9yW3+Qvbb+aKP+emfvqGIggcSyBHh1S7BGS6A3bQnGxsYJUOdUXN1v5r+mxQ7dmsXhbhfNbJXbm5PG1ABYqcELiUG+X/E9n1pbFkaD66m8bnwRSaTVI5JJ/1kGbblxIS03LhXLkxrD57IfTEaw64yhA6jsuS/Sj4uJqJsz+85EA+cxoM6J94vXAnkQEAt6HPLr0CuSYI8yG+2ej18CfNpygTs0mElLMINRgZkQ6BGGCEcg3aUxZEMbBx4MMGRAp9yYAt4N6NiHHk630DRgAHCY15rzu3Cgs3nCYMB0eEUBui0L6gLoMF7b7+a1eQBtt+2/wJ2RuG02LqRj6AFeHoGDdOBhVOl1QPg2oB7cDuoewsxx26A14pkdLAL2wiuHGsDXxoA+WcBasqBt1eU2LKDa3wcVnHMbnoh6DPrl0ZEFjLNrUcUhgfMQQ5OA15AgzZOmb7x86xYRKHzthCm/WY0p/l9VBPb6pmh62Tjtv4zQgwLd5H8EWIaDx9YiPFJuhUdO6fXwWAZlvTNxgjItwvaYkqm2L+xzZz6S6Iog8VgluHKb5ohgD1X3oLhbeNXL9UjzXhYT5FWDuE9uGmBh3ZudQZhQfQEH2lLAx045AWLahQK9wp23xHvu0kaCd4ypB0Aj3gnT6q4csBvc3WhJWeBBhzpDY7rubdsw3r0LpjmHNqYR6lQj6/UTktYfXY0L04Rwrc9KBm9jz08h/cMVMaoTJGznXpQAD1fIctusCA7/7qLYvpjKQLl/4a33LwwSG+jjrAXjttAfVzH4etpOuV0MZh2hrzdBvl0VYyQtO/SEd90sP9XNhpdfPKOHfwE=
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example.pdf
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example_case0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example_case0.png
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example_case1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example_case1.png
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example_case2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example_case2.png
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example_case3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example_case3.png
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example_case4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example_case4.png
--------------------------------------------------------------------------------
/lectures/img/ch3/graph_Dsep_example_case5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/graph_Dsep_example_case5.png
--------------------------------------------------------------------------------
/lectures/img/ch3/markovEQ_example.drawio:
--------------------------------------------------------------------------------
1 | 7Ztbc5s6EIB/jR/rQTcQj7n1ZHLamXby0OaRGsXmHIw8WI7t/voKI24CHHwDedKMZ2KtkECrT7urFR6hu/nmn9hbzL5yn4UjaPmbEbofQQgwhKPkY/nbVEJtkgqmceCriwrBc/CbKaGlpKvAZ8vKhYLzUASLqnDCo4hNREXmxTFfVy975WH1rgtvymqC54kX1qU/Al/MlBTYblHxyILpTN2aQietmHv5xalgOfN8vk5Fu8GhhxG6izkX6bf55o6FifIyvaQa+NxSmz9YzCLRpcHXpx9Pjy/43+j7zSv4/vTF9V8fPwE1jKXYZiNmvlSAKvJYzPiUR174UEhvY76KfJZ0a8lScc0XzhdSCKTwPybEVs2mtxJcimZiHqpatgnEz6T5mKjSS6nmfqN63hW2WSES8bbUKCm+lOuKZrtS1m4pYv5/PndS67fpiJNhtmoymzK+iidsj/oU3MKLp0zsuQ7l8y0XCuNzJp9QtotZ6IngrfocniJ2ml9XTKr8oub1gDlWD/nmhSt1pxHE8nOT/qsDEIZydSUTvZ4Fgj0vvJ0O1nKBV6fRWy7SJfcabBIclGbfWCzYZr9u67rIGthq4St7Aakqr4vVh5VoVlp3mezs2kMt2ru9Bu1ha2Dt4Rbt3ZmoPVpVHoEDK4+0KO/eROXlajBFe/Y1mT2MDTN7TQo6IDAA5wwM8ljgoMAADBkYOB0DA3vIwMC5Jtemr5DBXRu9JteGbMOss3tNvi3Xlinay3bH1+HcHGiacyN/ndsJzg1kiZj3vBuwmrnox71lj3kd/k1fJIP7N9CWNzDSwdmmbT9AW+LASA9HLdPU15Y5MNLDAcu4/Zv918Wd4uJIVxeHB3VxbSkiI11cbZUM7+PakkRG+jjXtE0caMsgGOnjADBtGwebIlRdZZF/k5xjytIk9JbLYFLVVNVml4wpTHRROicbW7b9jkndlb6xOJDDY3HJWP8s2ff3j+ouaplRR8NMmlkoTTVpmOpM1tl8qzt844EcSJEMR5qhIxpC6TBVq4KiWkcO0TqCWkepHmod7XDMh30CoU2bgNMIzUKEzjiVmHYQ1pgm8BimjSC0JXLoiVALjwGGABEbIgcBy65yhpurD+UXAI1f2jO/TVnas/Db3RyWbTLWbTKhxvHb9ezCHZJf7GpgoSMtLNETB6BnQpsy4QNaWEg0Qo8KGowAlA4KKLbH1HWojZFjQ5u6moFFYIxoYWC1/rvSi1xQpdfql95s1V2AXunsYTmJIEmEHSk+LY2QdKrzfEZ6s7zq+6mFlu1MT/zSQwIEyyVHml/kjm1q5X8azgiOEXCLv37hvuj2DAHd0B6B6r4Nm1xBtlNZQS6k+1fQhcnvfG7U8r5kP+Q72ntg2Dl284bImJLcBbh47wqiuF+4z7+zy+AGp9vfLFx2DvMAF+a3a+ABBk0+AEva1JLNdKmGMx7bTqtN7byzs1w6JlWnQOyeo4+mM7ZzGWjHQfpezT2FZUSqthihg1luNPfnBLzzqUfLe2s9AQ4k4LRKdRIpYHosylqsc1Ckc2nG6wehXg1ywTZCS6CHwTRKmJcQJKjcJqnzYOKFN6piHvh+eujHlsFv79euq4SnRTKS3djI7YjcJ32tBF+mR34gp/yOhzxBMOJR0strEIa6iEdCnRTCAtSzvt9OMtdcAg81gKcnB87364D6AdyvDzs7GBPDZqd+vDf5sLPjQNNmp3566H/Y2QEW6c20yWLxs8PUTxU/3kQPfwA=
--------------------------------------------------------------------------------
/lectures/img/ch3/markovEQ_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch3/markovEQ_example.png
--------------------------------------------------------------------------------
/lectures/img/ch4/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/ch4/Alexander_mosaic.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Alexander_mosaic.jpeg
--------------------------------------------------------------------------------
/lectures/img/ch4/Causal-Graphs-Advantages.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Causal-Graphs-Advantages.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Counterfactuals_notation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Counterfactuals_notation.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Exp_Predictors_Intervention_Targets.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Exp_Predictors_Intervention_Targets.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Graph-SEM-Excluded.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Graph-SEM-Excluded.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Graph-SEM-Independence.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Graph-SEM-Independence.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Graph-SEM.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Graph-SEM.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Grpah-SEM-Intervene.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Grpah-SEM-Intervene.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Kidney-stones-graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Kidney-stones-graph.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Kidney-stones-xray.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Kidney-stones-xray.png
--------------------------------------------------------------------------------
/lectures/img/ch4/Kidney-stones.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/Kidney-stones.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Colider.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Colider.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Confounder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Confounder.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Direct-Effects.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Direct-Effects.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Independ-blocked.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Independ-blocked.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Independ-condition-collider.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Independ-condition-collider.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Independ-condition.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Independ-condition.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Independ-nopath.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Independ-nopath.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Independent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Independent.png
--------------------------------------------------------------------------------
/lectures/img/ch4/SEM-Observe-Share-Cause.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/SEM-Observe-Share-Cause.png
--------------------------------------------------------------------------------
/lectures/img/ch4/butterBeer_happiness.ai:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/butterBeer_happiness.ai
--------------------------------------------------------------------------------
/lectures/img/ch4/butterBeer_happiness.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/butterBeer_happiness.png
--------------------------------------------------------------------------------
/lectures/img/ch4/do-operator.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch4/do-operator.png
--------------------------------------------------------------------------------
/lectures/img/ch5/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch5/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/ch5/kronbar.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch5/kronbar.jpg
--------------------------------------------------------------------------------
/lectures/img/ch6/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch6/.DS_Store
--------------------------------------------------------------------------------
/lectures/img/ch6/PC-Method.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch6/PC-Method.png
--------------------------------------------------------------------------------
/lectures/img/ch6/Trick1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch6/Trick1.png
--------------------------------------------------------------------------------
/lectures/img/ch6/causal_structure_comparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch6/causal_structure_comparison.png
--------------------------------------------------------------------------------
/lectures/img/ch6/dyspnoea.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch6/dyspnoea.jpeg
--------------------------------------------------------------------------------
/lectures/img/ch6/independence_tests.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch6/independence_tests.png
--------------------------------------------------------------------------------
/lectures/img/ch7/Hotel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/ch7/Hotel.png
--------------------------------------------------------------------------------
/lectures/img/elements_of_causal_inference_book.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/img/elements_of_causal_inference_book.jpg
--------------------------------------------------------------------------------
/lectures/kidney_treatment_dag:
--------------------------------------------------------------------------------
1 | digraph {
2 | Z [label="Stone Size (Z)"]
3 | T [label="Treatment (T)"]
4 | R [label="Recovery (R)"]
5 | Z -> T
6 | Z -> R
7 | T -> R
8 | }
9 |
--------------------------------------------------------------------------------
/lectures/kidney_treatment_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/kidney_treatment_dag.png
--------------------------------------------------------------------------------
/lectures/kronbar_scm_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/kronbar_scm_dag.png
--------------------------------------------------------------------------------
/lectures/requirements.txt:
--------------------------------------------------------------------------------
1 | graphviz==0.19.1
2 |
--------------------------------------------------------------------------------
/lectures/scm_example_dag:
--------------------------------------------------------------------------------
1 | digraph {
2 | X1 [label=X_1]
3 | X2 [label=X_2]
4 | Y [label=Y]
5 | UX1 [label="U_{X1}"]
6 | UY [label=U_Y]
7 | UX2 [label="U_{X2}"]
8 | UX1 -> X1
9 | X1 -> Y
10 | UY -> Y
11 | Y -> X2
12 | UX2 -> X2
13 | }
14 |
--------------------------------------------------------------------------------
/lectures/scm_example_dag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ci2Lab/Applied_Causal_Inference_Course/5a2c1764209034628384512b5f666d59fa82bb26/lectures/scm_example_dag.png
--------------------------------------------------------------------------------
/lectures/utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 | import bnlearn as bn
4 | import graphviz as gr
5 | import networkx as nx
6 | import matplotlib.pyplot as plt
7 |
8 | from sklearn.preprocessing import PolynomialFeatures
9 |
10 | def create_table(df):
11 |
12 | wrap = lambda s, tag, option="": "<" + tag + " " + option + ">" + s + "" + tag + ">"
13 | wrap_list = lambda lst, tag: "".join(map(lambda s: wrap(str(s), tag), lst))
14 |
15 | table = ""
16 |
17 | table += wrap(wrap("group", "th") +
18 | wrap("treatment=0","th","colspan='2'") +
19 | wrap("treatment=1","th","colspan='2'"), "tr")
20 |
21 |
22 |
23 | for group in sorted(df.group.unique()):
24 | d = df[df.group == group].set_index(["treatment", "recovery"])["count"].to_dict()
25 |
26 | n0 = int(d[(0,0)] + d[(0,1)])
27 | p0 = d[(0,1)] / n0 if n0 != 0 else 0
28 | n1 = int(d[(1,0)] + d[(1,1)])
29 | p1 = d[(1,1)] / n1 if n1 != 0 else 0
30 |
31 | f0 = "{}/{}".format(int(d[(0,1)]), n0)
32 | f1 = "{}/{}".format(int(d[(1,1)]), n1)
33 | p0s = "{:.2f}".format(p0)
34 | p1s = "{:.2f}".format(p1)
35 |
36 | if p0 >= p1:
37 | f0 = wrap(f0, "b")
38 | p0s = wrap(p0s, "b")
39 | else:
40 | f1 = wrap(f1, "b")
41 | p1s = wrap(p1s, "b")
42 |
43 | table += wrap(wrap_list([group, f0, p0s, f1, p1s], "td"), "tr")
44 |
45 | d = df.groupby(["treatment", "recovery"])["count"].sum().to_dict()
46 |
47 | n0 = int(d[(0,0)] + d[(0,1)])
48 | p0 = d[(0,1)] / n0 if n0 != 0 else 0
49 | n1 = int(d[(1,0)] + d[(1,1)])
50 | p1 = d[(1,1)] / n1 if n1 != 0 else 0
51 |
52 | f0 = "{}/{}".format(int(d[(0,1)]), n0)
53 | f1 = "{}/{}".format(int(d[(1,1)]), n1)
54 | p0s = "{:.2f}".format(p0)
55 | p1s = "{:.2f}".format(p1)
56 |
57 | if p0 >= p1:
58 | f0 = wrap(f0, "b")
59 | p0s = wrap(p0s, "b")
60 | else:
61 | f1 = wrap(f1, "b")
62 | p1s = wrap(p1s, "b")
63 |
64 | table += wrap(wrap_list(["total:", f0, p0s, f1, p1s], "td"), "tr")
65 | table = wrap(table, "table")
66 |
67 | return table
68 |
69 |
70 |
71 |
72 |
73 | def generate_dataset_0(n_samples=500, set_X=None, show_z=False):
74 | """
75 | Generate samples from the CSM:
76 | Nodes: (X,Y,Z)
77 | Edges: (Z -> X, Z-> Y, X -> Y)
78 |
79 | All variables are binary.
80 |
81 | Designed to generate simpson's paradox.
82 |
83 | Args
84 | ----
85 | n_samples: int, the number of samples to generate
86 |
87 | set_X: array, values to set x
88 |
89 | Returns
90 | -------
91 | samples: pandas.DateFrame
92 |
93 | """
94 | p_z = 0.5
95 | p_x_z = [0.9, 0.1]
96 | p_y_xz = [0.2, 0.4, 0.6, 0.8]
97 |
98 | z = np.random.binomial(n=1, p=p_z, size=n_samples)
99 |
100 | if set_X is not None:
101 | assert(len(set_X) == n_samples)
102 | x = set_X
103 | else:
104 | p_x = np.choose(z, p_x_z)
105 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
106 |
107 | p_y = np.choose(x+2*z, p_y_xz)
108 | y = np.random.binomial(n=1, p=p_y, size=n_samples)
109 |
110 | if show_z:
111 | return pd.DataFrame({"x":x, "y":y, "z":z})
112 |
113 | return pd.DataFrame({"x":x, "y":y})
114 |
115 |
116 | def generate_dataset_1(n_samples=500, set_X=None):
117 | """
118 | Generate samples from the CSM:
119 | Nodes: (X,Y,Z)
120 | Edges: (Z -> X, Z-> Y, X -> Y)
121 |
122 | X is binary, Z and Y are continuous.
123 |
124 | Args
125 | ----
126 | n_samples: int, the number of samples to generate
127 |
128 | set_X: array, values to set x
129 |
130 | Returns
131 | -------
132 | samples: pandas.DateFrame
133 |
134 | """
135 |
136 | z = np.random.uniform(size=n_samples)
137 |
138 | if set_X is not None:
139 | assert(len(set_X) == n_samples)
140 | x = set_X
141 | else:
142 | p_x = np.minimum(np.maximum(z,0.1), 0.9)
143 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
144 |
145 | y0 = 2 * z
146 | y1 = y0 - 0.5
147 |
148 | y = np.where(x == 0, y0, y1) + 0.3 * np.random.normal(size=n_samples)
149 |
150 | return pd.DataFrame({"x":x, "y":y, "z":z})
151 |
152 |
153 |
154 | def generate_dataset_2(n_samples=500, set_X=None):
155 | """
156 | Generate samples from the CSM:
157 | Nodes: (X,Y,Z)
158 | Edges: (Z -> X, Z-> Y, X -> Y)
159 |
160 | X is binary, Z and Y are continuous.
161 |
162 | Args
163 | ----
164 | n_samples: int, the number of samples to generate
165 |
166 | set_X: array, values to set x
167 |
168 | Returns
169 | -------
170 | samples: pandas.DateFrame
171 |
172 | """
173 |
174 | z = np.random.uniform(size=n_samples)
175 |
176 | if set_X is not None:
177 | assert(len(set_X) == n_samples)
178 | x = set_X
179 | else:
180 | p_x = np.minimum(np.maximum(z,0.1), 0.8)
181 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
182 |
183 | y0 = 2 * z
184 | y1 = np.where(z < 0.2, 3, y0)
185 |
186 | y = np.where(x == 0, y0, y1) + 0.3 * np.random.normal(size=n_samples)
187 |
188 | return pd.DataFrame({"x":x, "y":y, "z":z})
189 |
190 |
191 | def generate_dataset_3(n_samples=500, set_X=None):
192 | """
193 | Generate samples from the CSM:
194 | Nodes: (X,Y,Z)
195 | Edges: (Z -> X, Z-> Y, X -> Y)
196 |
197 | X is binary, Z and Y are continuous.
198 |
199 | Args
200 | ----
201 | n_samples: int, the number of samples to generate
202 |
203 | set_X: array, values to set x
204 |
205 | Returns
206 | -------
207 | samples: pandas.DateFrame
208 |
209 | """
210 |
211 | z = np.random.uniform(size=n_samples)
212 |
213 | if set_X is not None:
214 | assert(len(set_X) == n_samples)
215 | x = set_X
216 | else:
217 | p_x = np.where(z < 0.3, 0, np.where(z > 0.7, 1, 0.7))
218 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
219 |
220 | y0 = np.where(z >= 0.4, -4*(z - 0.4), 0)
221 | y1 = np.where(z < 0.6, -4*(z - 0.6), 0) + 1
222 |
223 | y = np.where(x == 0, y0, y1) + 0.3 * np.random.normal(size=n_samples)
224 |
225 | return pd.DataFrame({"x":x, "y":y, "z":z})
226 |
227 |
228 | def generate_exercise_dataset_0(n_samples=500, set_X=None):
229 | """
230 | Generate samples from the CSM:
231 | Nodes: (X,Y,Z)
232 | Edges: (Z -> X, Z-> Y, X -> Y)
233 |
234 | X is binary, Z and Y are continuous.
235 |
236 | Args
237 | ----
238 | n_samples: int, the number of samples to generate
239 |
240 | set_X: array, values to set x
241 |
242 | Returns
243 | -------
244 | samples: pandas.DateFrame
245 |
246 | """
247 |
248 | z = np.random.normal(size=(n_samples, 5))
249 | beta_treatment = np.array([0,1,2,0,0])
250 | beta_effect = np.array([1,1,2,0,0])
251 |
252 | if set_X is not None:
253 | assert(len(set_X) == n_samples)
254 | x = set_X
255 | else:
256 | p_x = _sigma(np.dot(z, beta_treatment))
257 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
258 |
259 | y0 = np.dot(z, beta_effect)
260 | y1 = np.dot(z, beta_effect) + 1
261 |
262 | y = np.where(x == 0, y0, y1) + 0.3 * np.random.normal(size=n_samples)
263 |
264 | df = pd.DataFrame({"x":x, "y":y})
265 |
266 | for i in range(z.shape[1]):
267 | df["z_{}".format(i)] = z[:, i]
268 |
269 | return df
270 |
271 |
272 | def generate_exercise_dataset_1(n_samples=500, set_X=None):
273 | """
274 | Generate samples from the CSM:
275 | Nodes: (X,Y,Z)
276 | Edges: (Z -> X, Z-> Y, X -> Y)
277 |
278 | X is binary, Z and Y are continuous.
279 |
280 | Args
281 | ----
282 | n_samples: int, the number of samples to generate
283 |
284 | set_X: array, values to set x
285 |
286 | Returns
287 | -------
288 | samples: pandas.DateFrame
289 |
290 | """
291 |
292 | z = np.random.normal(size=(n_samples, 5))
293 | beta_treatment = np.array([-1,-1,-2,0,0])
294 | beta_effect = np.array([-1,-1,-2,0, 0.5])
295 |
296 | p_x = _sigma(np.dot(z, beta_treatment))
297 |
298 | if set_X is not None:
299 | assert(len(set_X) == n_samples)
300 | x = set_X
301 | else:
302 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
303 |
304 | y0 = np.dot(z, beta_effect)
305 | y1 = np.dot(z, beta_effect) * (1 + p_x)
306 |
307 | y = np.where(x == 0, y0, y1) + 0.3 * np.random.normal(size=n_samples)
308 |
309 | df = pd.DataFrame({"x":x, "y":y})
310 |
311 | for i in range(z.shape[1]):
312 | df["z_{}".format(i)] = z[:, i]
313 |
314 | return df
315 |
316 |
317 |
318 | def generate_exercise_dataset_2(n_samples=500, set_X=None):
319 | """
320 | Generate samples from the CSM:
321 | Nodes: (X,Y,Z)
322 | Edges: (Z -> X, Z-> Y, X -> Y)
323 |
324 | X is binary, Z and Y are continuous.
325 |
326 | Args
327 | ----
328 | n_samples: int, the number of samples to generate
329 |
330 | set_X: array, values to set x
331 |
332 | Returns
333 | -------
334 | samples: pandas.DateFrame
335 |
336 | """
337 | beta_treatment = np.array([ 0.15207176, -0.11653175, -0.34068517, 0.64009405, -0.7243722 ,
338 | -2.7122607 , 2.3021001 , 0.04638091, 1.4096595 , -0.88538833,
339 | -1.27773486, 1.59597409, -1.27020399, 2.07570976, 0.99324477,
340 | -0.53702672, -0.10555752, 1.45058372, -1.80245312, -1.92714373,
341 | 1.65904829])
342 | beta_effect_y0 = np.array([ 0.33313179, -0.04529036, 0.0294476 , -1.57207538, -0.00679557,
343 | 0.87759851, -1.78974391, -0.78558499, -1.50506646, -0.17133791,
344 | 0.7489653 , -0.74583104, 0.79613557, -0.28718545, -1.194678 ,
345 | 0.3952664 , -0.32922775, 0.57037979, 1.19875008, 0.89582566,
346 | -1.34180865])
347 | beta_effect_y1 = np.array([-0.8001777 , 1.16531638, -0.82150055, -0.27853936, 1.74561238,
348 | 0.50031182, -1.74396855, -0.86928906, 0.26423181, 0.01572352,
349 | 1.22709648, -0.08222703, -0.91403023, 0.05014785, -1.34730904,
350 | 0.01790165, -0.60325542, 0.47473682, 0.40199847, 0.49554447,
351 | -0.13907751])
352 |
353 | Z = np.random.normal(size=(n_samples, 5))
354 | Z2 = PolynomialFeatures().fit_transform(Z)
355 |
356 | if set_X is not None:
357 | assert(len(set_X) == n_samples)
358 | x = set_X
359 | else:
360 | p_x = _sigma(np.dot(Z2, beta_treatment))
361 | x = np.random.binomial(n=1, p=p_x, size=n_samples)
362 |
363 | y0 = np.dot(Z2, beta_effect_y0)
364 | y1 = np.dot(Z2, beta_effect_y1) + 5
365 |
366 | y = np.where(x == 0, y0, y1) + np.random.normal(size=n_samples)
367 |
368 | df = pd.DataFrame({"x":x, "y":y})
369 |
370 | for i in range(Z.shape[1]):
371 | df["z_{}".format(i)] = Z[:, i]
372 |
373 | return df
374 |
375 |
376 |
377 | def _sigma(x):
378 | return 1 / (1 + np.exp(-x))
379 |
380 |
381 |
382 |
383 | """ Generate a pandas DataFrame sampled from the 'dyspnoea' dataset """
384 | def generate_dyspnoea_dataset():
385 | """ Generates the dyspnea dataset. It load the data from https://www.bnlearn.com/bnrepository/
386 | but it dropes the 'either' node, which is confusing
387 | """
388 | # Original dataset. We are not using that
389 | # DAG = bn.import_DAG('asia')
390 |
391 | # Define the network structure
392 | edges = [('asia', 'tub'),
393 | ('tub', 'dysp'),
394 | ('tub', 'xray'),
395 | ('lung', 'xray'),
396 | ('lung', 'dysp'),
397 | ('smoke', 'lung'),
398 | ('smoke', 'bronc'),
399 | ('bronc', 'dysp')]
400 |
401 | DAG = bn.make_DAG(edges)
402 | df = bn.import_example('asia')
403 | DAG = bn.parameter_learning.fit(DAG, df, methodtype='bayes', verbose = 0)
404 | df = bn.sampling(DAG, n=20000)
405 |
406 | df = df.rename(columns = {'asia' : 'tuberculosis_area',
407 | 'tub' : 'tuberculosis',
408 | 'lung' : 'lung_cancer',
409 | 'bronc' : 'bronchitis',
410 | 'dysp' : 'dyspnea'})
411 |
412 | return df
413 |
414 |
415 |
416 | # %% PLOT functions
417 |
418 | def plot_from_edges(edges):
419 | """ 'edges' is a Python list describing the directions of arrows
420 | """
421 | g = gr.Digraph()
422 | for i in range(0, len(edges)):
423 | g.edge(*edges[i])
424 | return g
425 |
426 |
427 | def plot_dyspnoea_dataset(mode = "graphviz"):
428 | # plot ground truth using bnlearn
429 | # plt.figure();
430 | # params_static = {
431 | # 'layout' : 'graphvix_layout'
432 | # }
433 |
434 | # G = bn.plot(DAG, node_color='red', node_size=5000, params_static = params_static)
435 |
436 | # plot using nx
437 | # plt.figure();
438 |
439 | edges = [('tuberculosis_area', 'tuberculosis'),
440 | ('tuberculosis', 'dyspnea'),
441 | ('tuberculosis', 'xray'),
442 | ('lung_cancer', 'xray'),
443 | ('lung_cancer', 'dyspnea'),
444 | ('smoke', 'lung_cancer'),
445 | ('smoke', 'bronchitis'),
446 | ('bronchitis', 'dyspnea')]
447 |
448 | if mode == "networkx":
449 | DAG = bn.make_DAG(edges)
450 | graph = nx.DiGraph(DAG['adjmat'])
451 | pos = nx.nx_agraph.graphviz_layout(graph, prog="neato")
452 | options = {
453 | 'node_color': 'red',
454 | 'node_size': 1000,
455 | 'width': 1,
456 | 'arrowstyle': '-|>',
457 | 'arrowsize': 12,
458 | }
459 | nx.draw(graph, pos, with_labels=True, **options)
460 |
461 | elif mode == "graphviz":
462 | g = plot_from_edges(edges)
463 | return g
464 |
465 |
466 |
467 | def plot_from_model_bnlearn(model):
468 | # plot
469 | g = gr.Digraph()
470 |
471 | for i in range(0, len(model['model_edges'])):
472 | g.edge(*model['model_edges'][i])
473 | return g
474 |
475 |
476 | def plot_from_model_pgmpy(edges):
477 | # plot
478 | edges = [el for el in edges] #unpack
479 | g = gr.Digraph()
480 |
481 | for i in range(0, len(edges)):
482 | g.edge(edges[i][0],edges[i][1])
483 | return g
--------------------------------------------------------------------------------