├── README.md ├── install.sh ├── part1 ├── ch01_02 │ ├── ch01_02_intro.Rmd │ ├── ch01_02_intro.html │ ├── ch01_02_intro.md │ └── figure │ │ ├── ab.png │ │ ├── bayes.png │ │ ├── bayesAB.png │ │ ├── eq2.1.png │ │ ├── eq2.2.png │ │ ├── fig2.1.png │ │ ├── fig2.2.png │ │ ├── fig2.3.png │ │ ├── fig2.4.png │ │ ├── fig2.5.png │ │ ├── fig2.6.png │ │ ├── pp.png │ │ └── whypuppies.png ├── ch03 │ └── Ggplot_Example │ │ ├── Ggplot_Example.Rmd │ │ ├── Ggplot_Example.html │ │ ├── Ggplot_Example.md │ │ ├── Ggplot_Example_files │ │ └── figure-html │ │ │ ├── unnamed-chunk-3-1.png │ │ │ ├── unnamed-chunk-4-1.png │ │ │ └── unnamed-chunk-6-1.png │ │ └── forR.csv ├── ch04 │ ├── ex04_Rbasic.Rmd │ ├── ex04_Rbasic.html │ ├── ex04_Rbasic.md │ └── person.xls ├── ch05 │ ├── ch5(2).pptx │ ├── ch5(3).pptx │ └── ch5.pptx └── ch06 │ ├── .Rproj.user │ ├── 62059814 │ │ └── sdb │ │ │ ├── prop │ │ │ ├── 827F99E8 │ │ │ └── INDEX │ │ │ └── s-49C3ABAE │ │ │ ├── 1F6592A3 │ │ │ ├── 591AD244 │ │ │ └── lock_file │ └── 4D38B2AA │ │ ├── pcs │ │ ├── files-pane.pper │ │ ├── source-pane.pper │ │ ├── windowlayoutstate.pper │ │ └── workbench-pane.pper │ │ ├── persistent-state │ │ └── sdb │ │ ├── prop │ │ ├── 2D2DA549 │ │ ├── 3B7C0482 │ │ ├── AFD7EFCA │ │ └── INDEX │ │ └── s-759A88F7 │ │ ├── C35CAB9B │ │ ├── E143050D │ │ └── lock_file │ ├── BernBeta.R │ ├── BernBetaExample.R │ ├── BernBetaExample.jpg │ ├── BernBetaExample.png │ ├── BernGrid.R │ ├── BernGridExample.R │ ├── DBDA2E-utilities.R │ ├── bayesch6.Rproj │ ├── beta_dist.PNG │ ├── ch6.Rmd │ ├── ch6.html │ ├── ch6.md │ └── figure │ ├── unnamed-chunk-2-1.png │ ├── unnamed-chunk-3-1.png │ ├── unnamed-chunk-4-1.png │ └── unnamed-chunk-5-1.png ├── part2 ├── .DS_Store ├── ch08 │ ├── .DS_Store │ ├── .ipynb_checkpoints │ │ └── 08_JAGS_Exercise-checkpoint.ipynb │ ├── 08_JAGS.ipynb │ ├── 08_JAGS_Exercise.ipynb │ ├── Mcmc.Rdata │ ├── data │ │ ├── .DS_Store │ │ ├── DBDA2E-utilities.R │ │ ├── Exercise.08.1.csv │ │ ├── Exercise.08.1Mcmc.Rdata │ │ ├── Exercise.08.1SummaryInfo.csv │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-A.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-B.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-C.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Diagtheta[1].eps │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Diagtheta[2].eps │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Example.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Mcmc.Rdata │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Post.eps │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-SummaryInfo.csv │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta.R │ │ ├── TEMPmodel.txt │ │ ├── ThetaDiag.eps │ │ ├── z15N50.csv │ │ └── z6N8z2N7.csv │ └── figures │ │ ├── eq8.1.png │ │ ├── eq8.2.png │ │ ├── fig8.1.png │ │ ├── fig8.2.png │ │ ├── fig8.3.png │ │ ├── fig8.4.png │ │ ├── fig8.5.png │ │ ├── fig8.6.png │ │ └── fig8.7.png ├── ch09 │ ├── BattingAverage.csv │ ├── DBDA2E-utilities.R │ ├── Jags-Ybinom-XnomSsubjCcat-MbinomBetaOmegaKappa.R │ ├── Jags-Ydich-XnomSsubj-MbernBetaOmegaKappa.R │ ├── Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa.R │ ├── TEMPmodel.txt │ ├── TherapeuticTouchData.csv │ ├── ch09_HierarchicalModels.Rmd │ ├── ch09_HierarchicalModels.html │ ├── ch09_HierarchicalModels.md │ ├── data │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-A.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-B.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-C.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Diagtheta[1].eps │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Diagtheta[2].eps │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Example.R │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Mcmc.Rdata │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-Post.eps │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta-SummaryInfo.csv │ │ ├── Jags-Ydich-XnomSsubj-MbernBeta.R │ │ ├── ThetaDiag.eps │ │ ├── z15N50.csv │ │ └── z6N8z2N7.csv │ ├── img │ │ ├── fig9.1.png │ │ ├── fig9.10.png │ │ ├── fig9.11.png │ │ ├── fig9.12.png │ │ ├── fig9.13.png │ │ ├── fig9.14.png │ │ ├── fig9.15.png │ │ ├── fig9.16.png │ │ ├── fig9.17.png │ │ ├── fig9.2.png │ │ ├── fig9.3.png │ │ ├── fig9.4.png │ │ ├── fig9.5.png │ │ ├── fig9.6.png │ │ ├── fig9.7.png │ │ ├── fig9.8.png │ │ └── fig9.9.png │ └── outputs │ │ ├── output9.1.png │ │ ├── output9.10.png │ │ ├── output9.11.png │ │ ├── output9.12.png │ │ ├── output9.13.png │ │ ├── output9.14.png │ │ ├── output9.15.png │ │ ├── output9.16.png │ │ ├── output9.2.png │ │ ├── output9.3.png │ │ ├── output9.4.png │ │ ├── output9.5.png │ │ ├── output9.6.png │ │ ├── output9.7.png │ │ ├── output9.8.png │ │ └── output9.9.png ├── ch09_sp │ ├── bayesian_word │ │ ├── .ipynb_checkpoints │ │ │ └── Untitled-checkpoint.ipynb │ │ ├── bayseian_word.pdf │ │ ├── bayseian_word.pptx │ │ ├── rt.jpg │ │ └── wordnet_len.ipynb │ └── deeplearning │ │ ├── Patent_kr.ipynb │ │ └── data │ │ ├── patent_kr.csv │ │ └── wips │ │ ├── A_kr.xlsx │ │ ├── B_kr.xlsx │ │ ├── C_kr.xlsx │ │ ├── D_kr.xlsx │ │ ├── E_kr.xlsx │ │ ├── G_kr.xlsx │ │ └── H_kr.xlsx ├── ch10 │ ├── DBDA2E-utilities.R │ ├── Jags-Ydich-Xnom1subj-MbernBeta.R │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelComp-Diagm.eps │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelComp-Diagtheta.eps │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelComp-Mcmc.Rdata │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelComp-Post.eps │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelComp.R │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior-PSEUDO-Diagm.eps │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior-PSEUDO-Mcmc.Rdata │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior-PSEUDO-Post.eps │ ├── Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior.R │ ├── Jags-Ydich-Xnom1subj-MbernBeta_ver1.R │ ├── TEMPmodel.txt │ ├── ch10.Rproj │ ├── ch10document.Rmd │ ├── ch10document.html │ ├── ch10document_files │ │ └── figure-html │ │ │ ├── unnamed-chunk-13-1.png │ │ │ └── unnamed-chunk-19-1.png │ └── figures │ │ ├── fig1.png │ │ ├── fig11.png │ │ ├── fig2.png │ │ ├── fig3.png │ │ ├── fig4.png │ │ └── fig5.png └── ch11 │ ├── Ch11(my presentation)-Copy1.ipynb │ ├── Ch11-presentation.ipynb │ ├── Ch11.ipynb │ ├── files │ ├── 1.png │ ├── 2.png │ ├── 3.png │ ├── 5.PNG │ ├── Beta_distribution.png │ ├── Eq1.png │ ├── Fig1.png │ ├── Fiq6.png │ ├── Fiq7.png │ ├── eq2.png │ ├── eq3.png │ ├── eq4.png │ ├── fig2.png │ ├── fig3.png │ ├── fig4.png │ ├── fig5.png │ ├── hist.png │ ├── nb.png │ └── nbinomial.jpg │ └── 캡처.PNG ├── part3 ├── ch13 │ ├── CHAPTER_13_GoalsPowerandSampleSize.html │ ├── CHAPTER_13_GoalsPowerandSampleSize.ipynb │ ├── CHAPTER_13_GoalsPowerandSampleSize.md │ ├── DBDA2E-utilities.R │ ├── Jags-Ydich-Xnom1subj-MbernBeta.R │ ├── Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa-Power-100-100.eps │ ├── Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa-Power.R │ └── Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa.R ├── ch14 │ ├── 14_Stan.ipynb │ ├── data │ │ ├── DBDA2E-utilities.R │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Diaglp__.eps │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Diaglp__.png │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Diagtheta.eps │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Diagtheta.png │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Example.R │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Mcmc.Rdata │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Post.eps │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-Post.png │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-StanDso.Rdata │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-StanFit.Rdata │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta-SummaryInfo.csv │ │ ├── Stan-Ydich-Xnom1subj-MbernBeta.R │ │ └── z15N50.csv │ └── figures │ │ ├── eq14.1.png │ │ ├── fig14.1.png │ │ ├── fig14.2.png │ │ ├── fig14.3.png │ │ ├── fig8.2.png │ │ └── fig8.3.png ├── ch16 │ ├── 16_Metric-Predicted_Variable_on_One_or_Two_Groups.ipynb │ ├── data │ │ ├── DBDA2E-utilities.R │ │ ├── Jags-Ymet-Xnom1grp-Mnormal-Example.R │ │ ├── Jags-Ymet-Xnom1grp-Mnormal.R │ │ ├── Jags-Ymet-Xnom1grp-Mrobust-Example.R │ │ ├── Jags-Ymet-Xnom1grp-Mrobust.R │ │ ├── OneGroupIQnormal-Diagmu.png │ │ ├── OneGroupIQnormal-Diagsigma.png │ │ ├── OneGroupIQnormal-Mcmc.Rdata │ │ ├── OneGroupIQnormal-Post.png │ │ ├── OneGroupIQnormal-PostPairs.png │ │ ├── OneGroupIQnormal-SummaryInfo.csv │ │ ├── Stan-Ymet-Xnom1grp-Mrobust-Example.R │ │ ├── Stan-Ymet-Xnom1grp-Mrobust.R │ │ ├── Stan-Ymet-Xnom2grp-MrobustHet-Example.R │ │ ├── Stan-Ymet-Xnom2grp-MrobustHet.R │ │ ├── TEMPmodel.txt │ │ └── TwoGroupIQ.csv │ └── figures │ │ ├── cap16.1.png │ │ ├── cap16.2.png │ │ ├── cap16.3.png │ │ ├── cap16.4.png │ │ ├── cap16.5.png │ │ ├── cap16.6.png │ │ ├── eq16.1.png │ │ ├── eq16.2.png │ │ ├── eq16.3.png │ │ ├── eq16.4.png │ │ ├── eq16.5.png │ │ ├── eq16.6.png │ │ ├── fig16.1.png │ │ ├── fig16.10-1.png │ │ ├── fig16.10-2.png │ │ ├── fig16.11.png │ │ ├── fig16.12.png │ │ ├── fig16.13.png │ │ ├── fig16.2.png │ │ ├── fig16.3.png │ │ ├── fig16.4.png │ │ ├── fig16.5.png │ │ ├── fig16.6.png │ │ ├── fig16.7.png │ │ ├── fig16.8.png │ │ ├── fig16.9.png │ │ ├── tbl15.1.png │ │ ├── tbl15.2.png │ │ └── tbl15.3.png └── sp_dist │ └── dist_types.ipynb ├── part4 ├── ch18 │ ├── 18_Metric_Predicted_Variable_with_Multiple_Metric_Predictors.ipynb │ ├── data │ │ ├── DBDA2E-utilities.R │ │ ├── Guber1999data.csv │ │ ├── Jags-Ymet-XmetMulti-Mrobust-Example.R │ │ ├── Jags-Ymet-XmetMulti-Mrobust.R │ │ ├── Jags-Ymet-XmetMulti-MrobustVarSelect-Example.R │ │ └── Jags-Ymet-XmetMulti-MrobustVarSelect.R │ └── figures │ │ ├── cap18.1.png │ │ ├── cap18.2.png │ │ ├── cap18.3.png │ │ ├── cap18.new.1.png │ │ ├── cap18.new.2.png │ │ ├── cap18.new.3.png │ │ ├── cap18.new.4.png │ │ ├── cap18.new.5.png │ │ ├── cap18.new.6.png │ │ ├── cap18.new.7.png │ │ ├── cap_apr.png │ │ ├── cap_fns.png │ │ ├── eq18.1.png │ │ ├── eq18.2-4.png │ │ ├── eq18.5.png │ │ ├── eq18.6.png │ │ ├── fig17.2.png │ │ ├── fig18.1.png │ │ ├── fig18.10.png │ │ ├── fig18.11.1.png │ │ ├── fig18.11.2.png │ │ ├── fig18.12.1.png │ │ ├── fig18.12.2.png │ │ ├── fig18.13.png │ │ ├── fig18.14.1.png │ │ ├── fig18.14.2.png │ │ ├── fig18.15.1.png │ │ ├── fig18.15.2.png │ │ ├── fig18.15.3.png │ │ ├── fig18.2.png │ │ ├── fig18.3.png │ │ ├── fig18.4.png │ │ ├── fig18.5.1.png │ │ ├── fig18.5.2.png │ │ ├── fig18.6.1.png │ │ ├── fig18.6.2.png │ │ ├── fig18.7.1.png │ │ ├── fig18.7.2.png │ │ ├── fig18.8.png │ │ ├── fig18.9.1.png │ │ ├── fig18.9.2.png │ │ ├── tbl15.1.png │ │ ├── tbl15.2.png │ │ └── tbl15.3.png ├── ch24 │ ├── ch.24 Count Predicted Variable.ipynb │ └── figures │ │ ├── equation24.1-.png │ │ ├── equation24.1.png │ │ ├── equation24.2.png │ │ ├── equation24.3.png │ │ ├── figure24.1.png │ │ ├── figure24.2.png │ │ ├── figure24.3.png │ │ ├── figure24.4.png │ │ ├── figure24.5.png │ │ ├── figure24.6.png │ │ ├── table24.1.png │ │ └── table24.2.png └── sp_bayes_coding │ ├── bayes_coding.ipynb │ ├── data │ ├── changepointdata.txt │ └── z15N50.csv │ └── figures │ ├── br.png │ ├── cd.png │ ├── fig8.2.png │ └── gr.png └── pip-requirements.txt /README.md: -------------------------------------------------------------------------------- 1 | bayesianR 2 | ========= 3 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | virtualenv --no-site-packages --distribute ~/.venv/bayesianr 2 | source ~/.venv/bayesianr/bin/activate 3 | pip install -r pip-requirements.txt 4 | -------------------------------------------------------------------------------- /part1/ch01_02/figure/ab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/ab.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/bayes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/bayes.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/bayesAB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/bayesAB.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/eq2.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/eq2.1.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/eq2.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/eq2.2.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/fig2.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/fig2.1.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/fig2.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/fig2.2.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/fig2.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/fig2.3.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/fig2.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/fig2.4.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/fig2.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/fig2.5.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/fig2.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/fig2.6.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/pp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/pp.png -------------------------------------------------------------------------------- /part1/ch01_02/figure/whypuppies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch01_02/figure/whypuppies.png -------------------------------------------------------------------------------- /part1/ch03/Ggplot_Example/Ggplot_Example.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Ggplot_Example" 3 | output: 4 | html_document: 5 | keep_md: yes 6 | --- 7 | 8 | 9 | 10 | 11 | 1. 목적론적 설명 : 대상의 기능이나, 의도 목적에 근거한 설명 12 | 13 | 14 | 예-박쥐들은 모기의 개체수를 조절하기 위해 사냥을 한다. 15 | 16 | 17 | 18 | 19 | 2. 기계론적 설명: 대상의 근접 기제에 근거한 설명 20 | 21 | 22 | 예-수소원자와 산소원자가 결합해서 물이 된다. 23 | 24 | 25 | 26 | 27 | 28 | 29 | 1.ggplot2 packages 30 | ```{r} 31 | #install.packages("ggplot2) 32 | library(ggplot2) 33 | ``` 34 | 35 | Read data 36 | ```{r} 37 | Replicate <- read.csv("C:/Users/Yoo/Documents/GitHub/bayesianR/part1/ch03/forR.csv", header=T) 38 | <<<<<<< HEAD 39 | Replicate 40 | ======= 41 | >>>>>>> origin/master 42 | ``` 43 | 44 | dv - 전체 목적론적 설명 중, 틀린 목적론적 설명이 옳다고 판단한 비율 45 | 46 | classnum - 대학에서 수강한 과학 관련 수업의 개수 47 | 48 | condition- 실험 처치 조건 49 | 50 | 51 | Scatter Plot 52 | ```{r} 53 | scatter<-ggplot(Replicate,aes(dv,classnum)) 54 | scatter+geom_point(aes(size=1))+labs(x="Correct Response to Teleological Explanation",y="Number of Science Class") 55 | ``` 56 | 57 | Scatter Plot with Smooth Line 58 | ```{r} 59 | scatter+geom_point(aes(size=1, color=as.factor(condition)),position='jitter')+geom_smooth(method='lm',aes(fill=as.factor(condition)),alpha=0.2)+labs(x="Correct Response to Teleological Explanation",y="Number of Science Class") 60 | ``` 61 | 62 | Correlation Coefficient 63 | ```{r} 64 | cor.test(Replicate$dv, Replicate$classnum, method= "pearson") 65 | ``` 66 | 67 | Boxplot 68 | ```{r} 69 | Boxplot <- ggplot(Replicate, aes(as.factor(condition), dv)) 70 | Boxplot+geom_boxplot() +labs(x="condition",y="Correct Responst to TE") 71 | ``` 72 | 73 | -------------------------------------------------------------------------------- /part1/ch03/Ggplot_Example/Ggplot_Example.md: -------------------------------------------------------------------------------- 1 | # Ggplot_Example 2 | 3 | 4 | 5 | 6 | 1. 목적론적 설명 : 대상의 기능이나, 의도 목적에 근거한 설명 7 | 8 | 9 | 예-박쥐들은 모기의 개체수를 조절하기 위해 사냥을 한다. 10 | 11 | 12 | 13 | 14 | 2. 기계론적 설명: 대상의 근접 기제에 근거한 설명 15 | 16 | 17 | 예-수소원자와 산소원자가 결합해서 물이 된다. 18 | 19 | 20 | 21 | 22 | 23 | 24 | 1.ggplot2 packages 25 | 26 | ```r 27 | #install.packages("ggplot2) 28 | library(ggplot2) 29 | ``` 30 | 31 | Read data 32 | 33 | ```r 34 | Replicate <- read.csv("C:/Users/Yoo/Documents/GitHub/bayesianR/part1/ch03/forR.csv", header=T) 35 | <<<<<<< HEAD 36 | Replicate 37 | ``` 38 | 39 | ``` 40 | ## subject dv classnum condition 41 | ## 1 1 25.0000 0 1 42 | ## 2 2 60.0000 0 1 43 | ## 3 3 20.0000 0 1 44 | ## 4 4 70.0000 8 1 45 | ## 5 5 65.0000 9 2 46 | ## 6 6 40.0000 30 2 47 | ## 7 7 47.3684 7 2 48 | ## 8 8 38.8889 1 2 49 | ## 9 9 52.6316 20 2 50 | ## 10 10 78.9474 8 2 51 | ## 11 11 58.8235 7 2 52 | ## 12 12 32.2587 0 3 53 | ## 13 13 12.9032 2 3 54 | ## 14 14 19.3548 0 3 55 | ## 15 15 29.0323 17 3 56 | ## 16 16 41.9355 1 3 57 | ## 17 17 51.6129 33 3 58 | ======= 59 | >>>>>>> origin/master 60 | ``` 61 | 62 | dv - 전체 목적론적 설명 중, 틀린 목적론적 설명이 옳다고 판단한 비율 63 | 64 | classnum - 대학에서 수강한 과학 관련 수업의 개수 65 | 66 | condition- 실험 처치 조건 67 | 68 | 69 | Scatter Plot 70 | 71 | ```r 72 | scatter<-ggplot(Replicate,aes(dv,classnum)) 73 | scatter+geom_point(aes(size=1))+labs(x="Correct Response to Teleological Explanation",y="Number of Science Class") 74 | ``` 75 | 76 | ![](Ggplot_Example_files/figure-html/unnamed-chunk-3-1.png) 77 | 78 | Scatter Plot with Smooth Line 79 | 80 | ```r 81 | scatter+geom_point(aes(size=1, color=as.factor(condition)),position='jitter')+geom_smooth(method='lm',aes(fill=as.factor(condition)),alpha=0.2)+labs(x="Correct Response to Teleological Explanation",y="Number of Science Class") 82 | ``` 83 | 84 | ![](Ggplot_Example_files/figure-html/unnamed-chunk-4-1.png) 85 | 86 | Correlation Coefficient 87 | 88 | ```r 89 | cor.test(Replicate$dv, Replicate$classnum, method= "pearson") 90 | ``` 91 | 92 | ``` 93 | ## 94 | ## Pearson's product-moment correlation 95 | ## 96 | ## data: Replicate$dv and Replicate$classnum 97 | ## t = 1.0122, df = 15, p-value = 0.3275 98 | ## alternative hypothesis: true correlation is not equal to 0 99 | ## 95 percent confidence interval: 100 | ## -0.2593038 0.6540151 101 | ## sample estimates: 102 | ## cor 103 | ## 0.252855 104 | ``` 105 | 106 | Boxplot 107 | 108 | ```r 109 | Boxplot <- ggplot(Replicate, aes(as.factor(condition), dv)) 110 | Boxplot+geom_boxplot() +labs(x="condition",y="Correct Responst to TE") 111 | ``` 112 | 113 | ![](Ggplot_Example_files/figure-html/unnamed-chunk-6-1.png) 114 | 115 | -------------------------------------------------------------------------------- /part1/ch03/Ggplot_Example/Ggplot_Example_files/figure-html/unnamed-chunk-3-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch03/Ggplot_Example/Ggplot_Example_files/figure-html/unnamed-chunk-3-1.png -------------------------------------------------------------------------------- /part1/ch03/Ggplot_Example/Ggplot_Example_files/figure-html/unnamed-chunk-4-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch03/Ggplot_Example/Ggplot_Example_files/figure-html/unnamed-chunk-4-1.png -------------------------------------------------------------------------------- /part1/ch03/Ggplot_Example/Ggplot_Example_files/figure-html/unnamed-chunk-6-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch03/Ggplot_Example/Ggplot_Example_files/figure-html/unnamed-chunk-6-1.png -------------------------------------------------------------------------------- /part1/ch03/Ggplot_Example/forR.csv: -------------------------------------------------------------------------------- 1 | subject,dv,classnum,condition 2 | 1,25,0,1 3 | 2,60,0,1 4 | 3,20,0,1 5 | 4,70,8,1 6 | 5,65,9,2 7 | 6,40,30,2 8 | 7,47.3684,7,2 9 | 8,38.8889,1,2 10 | 9,52.6316,20,2 11 | 10,78.9474,8,2 12 | 11,58.8235,7,2 13 | 12,32.2587,0,3 14 | 13,12.9032,2,3 15 | 14,19.3548,0,3 16 | 15,29.0323,17,3 17 | 16,41.9355,1,3 18 | 17,51.6129,33,3 19 | -------------------------------------------------------------------------------- /part1/ch04/ex04_Rbasic.Rmd: -------------------------------------------------------------------------------- 1 | R 기초 문제 2 | ======================================================== 3 | 4 | ## 연습문제 1. 다음의 데이터를 사용하시오 5 | 6 | ``` 7 | 2 3 5 6 7 10 8 | ``` 9 | 10 | #### a) 데이터 벡터 x를 만드시오 11 | 12 | ```{r} 13 | # 코드를 입력하세요 14 | ``` 15 | 16 | #### b) 각 데이터의 제곱으로 구성된 벡터 x2를 만드시오 17 | 18 | ```{r} 19 | # 코드를 입력하세요 20 | ``` 21 | 22 | #### c) 각 데이터의 제곱의 합을 구하시오 23 | 24 | ```{r} 25 | # 코드를 입력하세요 26 | ``` 27 | 28 | #### d) 각 데이터에서 2를 뺀 값을 구하시오 29 | 30 | ```{r} 31 | # 코드를 입력하세요 32 | ``` 33 | 34 | #### e) 최대값과 최소값을 구하시오 35 | 36 | ```{r} 37 | # 코드를 입력하세요 38 | ``` 39 | 40 | #### f) 5보다 큰 값들로만 구성된 데이터 벡터 x_up을 만드시오 41 | 42 | ```{r} 43 | # 코드를 입력하세요 44 | ``` 45 | 46 | #### g) 벡터 x의 길이를 구하시오 47 | 48 | ```{r} 49 | # 코드를 입력하세요 50 | ``` 51 | 52 | #### h) x'x를 구하시오 53 | 54 | ```{r} 55 | # 코드를 입력하세요 56 | ``` 57 | 58 | #### i) xx'를 구하시오 59 | 60 | ```{r} 61 | # 코드를 입력하세요 62 | ``` 63 | 64 | #### j) 벡터 x와 x2를 열결합(column bind)하여 xc에 저장하시오 65 | 66 | ```{r} 67 | # 코드를 입력하세요 68 | ``` 69 | 70 | #### k) 벡터 x와 x2를 행결합(row bind)하여 xr에 저장하시오 71 | 72 | ```{r} 73 | # 코드를 입력하세요 74 | ``` 75 | 76 | ---------------------------------------- 77 | 78 | ## 연습문제 2. 다음과 같이 UsingR 패키지를 인스톨한 후 내장되어 있는 데이터셋 primes를 이용하여 답하시오. primes는 1부터 2003 까지의 소수(prime number)들의 집합이다. 79 | 80 | ``` 81 | > library(UsingR) 82 | > data(primes) 83 | ``` 84 | 85 | #### a) 1부터 2003까지 몇 개의 소수가 있는가? 86 | 87 | ```{r} 88 | # 코드를 입력하세요 89 | ``` 90 | 91 | #### b) 1부터 200까지 몇 개의 소수가 있는가? 92 | 93 | ```{r} 94 | # 코드를 입력하세요 95 | ``` 96 | 97 | #### c) 평균은 얼마인가? 98 | 99 | ```{r} 100 | # 코드를 입력하세요 101 | ``` 102 | 103 | #### d) 1000 이상의 소수는 몇 개인가? 104 | 105 | ```{r} 106 | # 코드를 입력하세요 107 | ``` 108 | 109 | #### e) 500 부터 1000 사이의 소수만을 포함한 벡터 pp를 만드시오 110 | 111 | ```{r} 112 | # 코드를 입력하세요 113 | ``` 114 | 115 | ---------------------------------------- 116 | 117 | ## 연습문제 3. 다음과 같이 UsingR 패키지를 인스톨한 후 내장되어 있는 데이터셋 primes를 이용하여 답하시오. primes는 1부터 2003 까지의 소수(prime number)들의 집합이다. 118 | 119 | ``` 120 | > library(UsingR) 121 | > data(primes) 122 | ``` 123 | 124 | #### 적절한 히스토그램을 그리시오. 125 | 126 | ```{r} 127 | # 코드를 입력하세요 128 | ``` 129 | 130 | ----------------------------------------- 131 | 132 | ## 연습문제 4. 벡터를 입력을 받아 그 원소들의 값을 모두 더해서 결과를 반환하는 mysum 함수를 작성하시오. 133 | 134 | ```{r} 135 | # 코드를 입력하세요 136 | ``` 137 | 138 | ----------------------------------------- 139 | 140 | ## 연습문제 5. 다음과 같은 data가 있다. 141 | 142 | ``` 143 | x = 144 | [ 1, 5, 9 145 | 2, 6, 10 146 | 3, 7, 11 147 | 4, 8, 12] 148 | ``` 149 | 150 | #### a) 행렬(matrix) x를 만드시오. 151 | 152 | ```{r} 153 | # 코드를 입력하세요 154 | ``` 155 | 156 | #### b) x의 전치행렬 xt를 만드시오. 157 | 158 | ```{r} 159 | # 코드를 입력하세요 160 | ``` 161 | 162 | #### c) x의 첫번째 행(row)만 뽑아낸 xr1을 만드시오 163 | 164 | ```{r} 165 | # 코드를 입력하세요 166 | ``` 167 | 168 | #### d) x의 세번째 열(col)만 뽑아낸 xc3을 만드시오 169 | 170 | ```{r} 171 | # 코드를 입력하세요 172 | ``` 173 | 174 | #### e) x에서 6,7,10,11을 원소로 가지는 부분행렬 xs를 만드시오 175 | 176 | ```{r} 177 | # 코드를 입력하세요 178 | ``` 179 | 180 | #### f) x의 두번째 열(col)의 원소가 홀수인 행(row)들만 뽑아서 부분행렬 xs2를 만드시오 181 | 182 | ```{r} 183 | # 코드를 입력하세요 184 | ``` 185 | 186 | -------------------------------------- 187 | 188 | ## 연습문제6. 연습문제 4번의 행렬 x에 대해 189 | 190 | #### a) apply 함수와 mean 함수를 사용해서, 각 행(row)의 평균을 구하시오 191 | 192 | ```{r} 193 | # 코드를 입력하세요 194 | ``` 195 | 196 | #### b) apply 함수와 mean 함수를 사용해서, 각 열(col)의 평균을 구하시오 197 | 198 | ```{r} 199 | # 코드를 입력하세요 200 | ``` 201 | 202 | -------------------------------------- 203 | 204 | ### 연습문제 7. 학생 3명이 수학 쪽지시험과 영어 쪽지시험, 국어 쪽지시험을 3번 치렀다. 205 | 206 | ``` 207 | [첫번째 시험] 208 | 수학 209 | 학생 1 : 90 210 | 학생 2 : 85 211 | 학생 3 : 100 212 | 213 | 영어 214 | 학생 1 : 70 215 | 학생 2 : 80 216 | 학생 3 : 100 217 | 218 | 국어 219 | 학생 1 : 85 220 | 학생 2 : 100 221 | 학생 3 : 90 222 | 223 | [두번째 시험] 224 | 수학 225 | 학생 1 : 95 226 | 학생 2 : 90 227 | 학생 3 : 100 228 | 229 | 영어 230 | 학생 1 : 75 231 | 학생 2 : 85 232 | 학생 3 : 85 233 | 234 | 국어 235 | 학생 1 : 80 236 | 학생 2 : 100 237 | 학생 3 : 90 238 | 239 | [세번째 시험] 240 | 수학 241 | 학생 1 : 85 242 | 학생 2 : 75 243 | 학생 3 : 100 244 | 245 | 영어 246 | 학생 1 : 90 247 | 학생 2 : 95 248 | 학생 3 : 90 249 | 250 | 국어 251 | 학생 1 : 85 252 | 학생 2 : 95 253 | 학생 3 : 100 254 | ``` 255 | 256 | #### a) 위 결과를 다차원 배열로 나타내시오 257 | 258 | ```{r} 259 | # 코드를 입력하세요 260 | ``` 261 | 262 | #### b) 모든 3번의 테스트에서 채점된, 국어 시험 평균을 구하시오 263 | 264 | ```{r} 265 | # 코드를 입력하세요 266 | ``` 267 | 268 | #### c) 모든 3번의 테스트에서, 2번째 학생의 영어 시험 평균을 구하시오 269 | 270 | ```{r} 271 | # 코드를 입력하세요 272 | ``` 273 | 274 | #### d) 각 학생들이 가장 잘하는 과목은 무엇인가? 가장 못하는 과목은 무엇인가? 가장 실력이 불안정한 과목은 무엇인가? 275 | 276 | ```{r} 277 | # 코드를 입력하세요 278 | ``` 279 | 280 | -------------------------------------------- 281 | 282 | ## 연습문제 8. 연습문제 7번의 첫번째 시험에 대해서만. 283 | 284 | #### a) 리스트(list)로 표현하시오 285 | 286 | ```{r} 287 | # 코드를 입력하세요 288 | ``` 289 | 290 | #### b) 인덱스 접근으로 수학 평균을 구하시오 291 | 292 | ```{r} 293 | # 코드를 입력하세요 294 | ``` 295 | 296 | #### c) 인덱스 접근으로 국어 평균을 구하시오 297 | 298 | ```{r} 299 | # 코드를 입력하세요 300 | ``` 301 | 302 | #### d) 물리시험 결과를 추가하시오(80,90,100) 303 | 304 | ```{r} 305 | # 코드를 입력하세요 306 | ``` 307 | 308 | #### e) 학생 2의 영어점수를 100점으로 정정하시오 309 | 310 | ```{r} 311 | # 코드를 입력하세요 312 | ``` 313 | 314 | #### f) 모든 학생의 국어점수를 삭제하시오 315 | 316 | ```{r} 317 | # 코드를 입력하세요 318 | ``` 319 | 320 | #### g) lapply 함수를 사용하여 각 과목의 평균을 구하시오 321 | 322 | ```{r} 323 | # 코드를 입력하세요 324 | ``` 325 | 326 | 327 | #### h) sapply 함수를 사용하여 각 과목의 평균을 구하시오 328 | 329 | ```{r} 330 | # 코드를 입력하세요 331 | ``` 332 | 333 | -------------------------------------------- 334 | 335 | 336 | ## 연습문제 9. 연습문제 7번의 세번째 시험에 대해서만, 337 | 338 | ```{r} 339 | # 코드를 입력하세요 340 | ``` 341 | 342 | #### a) data.frame으로 표현하시오 343 | 344 | ```{r} 345 | # 코드를 입력하세요 346 | ``` 347 | 348 | #### b) 성별 필드를 추가하시오. 학생1은 여학생, 나머지는 남학생. 349 | 350 | ```{r} 351 | # 코드를 입력하세요 352 | ``` 353 | 354 | #### c) 여학생인 학생 4의 데이터를 추가하시오. 수학 90, 영어 80, 국어 90. 355 | 356 | ```{r} 357 | # 코드를 입력하세요 358 | ``` 359 | 360 | #### d) 수학점수가 80점 이하인 학생의 데이터는 제거하시오 361 | 362 | ```{r} 363 | # 코드를 입력하세요 364 | ``` 365 | 366 | #### e) 각 과목의 평균을 구하시오 367 | 368 | ```{r} 369 | # 코드를 입력하세요 370 | ``` 371 | 372 | -------------------------------------------- 373 | 374 | ## 연습문제 10. 375 | 376 | ### DMwR라는 패키지를 설치후, 패키지에 포함된 데이터셋인 algae를 로딩하시오. algae의 속성 중 NH4 의 값들에 대해, 377 | 378 | #### a) NA(결측치)가 몇개인지 구하시오 379 | 380 | ```{r} 381 | # 코드를 입력하세요 382 | ``` 383 | 384 | #### b) 결측치를 제거하고 평균을 구하시오. 385 | 386 | ```{r} 387 | # 코드를 입력하세요 388 | ``` 389 | 390 | ------------------------------------------- 391 | 392 | # 응용문제 393 | 394 | ## 1. 다음에 있는 엑셀파일을 R로 분석한다. 395 | 396 | ``` 397 | person.xls 398 | ``` 399 | 400 | #### 1) score의 평균을 구하기 401 | 402 | ```{r} 403 | # 코드를 입력하세요 404 | ``` 405 | 406 | #### 2) 남,녀별 score 평균 구하기 407 | 408 | ```{r} 409 | # 코드를 입력하세요 410 | ``` 411 | 412 | #### 3) 각 email의 값은 비워져있다. 채워넣어보자. 이메일은 '아이디@회사이름.com' 규칙을 따른다. 예를들어 id가 user1이고 회사가 naver이면 그 사람의 email은 user1@naver.com 이다. 413 | 414 | ```{r} 415 | # 코드를 입력하세요 416 | ``` 417 | 418 | -------------------------------------------------------------------------------- /part1/ch04/ex04_Rbasic.md: -------------------------------------------------------------------------------- 1 | R 기초 문제 2 | ======================================================== 3 | 4 | ## 연습문제 1. 다음의 데이터를 사용하시오 5 | 6 | ``` 7 | 2 3 5 6 7 10 8 | ``` 9 | 10 | #### a) 데이터 벡터 x를 만드시오 11 | 12 | 13 | ```r 14 | # 코드를 입력하세요 15 | ``` 16 | 17 | #### b) 각 데이터의 제곱으로 구성된 벡터 x2를 만드시오 18 | 19 | 20 | ```r 21 | # 코드를 입력하세요 22 | ``` 23 | 24 | #### c) 각 데이터의 제곱의 합을 구하시오 25 | 26 | 27 | ```r 28 | # 코드를 입력하세요 29 | ``` 30 | 31 | #### d) 각 데이터에서 2를 뺀 값을 구하시오 32 | 33 | 34 | ```r 35 | # 코드를 입력하세요 36 | ``` 37 | 38 | #### e) 최대값과 최소값을 구하시오 39 | 40 | 41 | ```r 42 | # 코드를 입력하세요 43 | ``` 44 | 45 | #### f) 5보다 큰 값들로만 구성된 데이터 벡터 x_up을 만드시오 46 | 47 | 48 | ```r 49 | # 코드를 입력하세요 50 | ``` 51 | 52 | #### g) 벡터 x의 길이를 구하시오 53 | 54 | 55 | ```r 56 | # 코드를 입력하세요 57 | ``` 58 | 59 | #### h) x'x를 구하시오 60 | 61 | 62 | ```r 63 | # 코드를 입력하세요 64 | ``` 65 | 66 | #### i) xx'를 구하시오 67 | 68 | 69 | ```r 70 | # 코드를 입력하세요 71 | ``` 72 | 73 | #### j) 벡터 x와 x2를 열결합(column bind)하여 xc에 저장하시오 74 | 75 | 76 | ```r 77 | # 코드를 입력하세요 78 | ``` 79 | 80 | #### k) 벡터 x와 x2를 행결합(row bind)하여 xr에 저장하시오 81 | 82 | 83 | ```r 84 | # 코드를 입력하세요 85 | ``` 86 | 87 | ---------------------------------------- 88 | 89 | ## 연습문제 2. 다음과 같이 UsingR 패키지를 인스톨한 후 내장되어 있는 데이터셋 primes를 이용하여 답하시오. primes는 1부터 2003 까지의 소수(prime number)들의 집합이다. 90 | 91 | ``` 92 | > library(UsingR) 93 | > data(primes) 94 | ``` 95 | 96 | #### a) 1부터 2003까지 몇 개의 소수가 있는가? 97 | 98 | 99 | ```r 100 | # 코드를 입력하세요 101 | ``` 102 | 103 | #### b) 1부터 200까지 몇 개의 소수가 있는가? 104 | 105 | 106 | ```r 107 | # 코드를 입력하세요 108 | ``` 109 | 110 | #### c) 평균은 얼마인가? 111 | 112 | 113 | ```r 114 | # 코드를 입력하세요 115 | ``` 116 | 117 | #### d) 1000 이상의 소수는 몇 개인가? 118 | 119 | 120 | ```r 121 | # 코드를 입력하세요 122 | ``` 123 | 124 | #### e) 500 부터 1000 사이의 소수만을 포함한 벡터 pp를 만드시오 125 | 126 | 127 | ```r 128 | # 코드를 입력하세요 129 | ``` 130 | 131 | ---------------------------------------- 132 | 133 | ## 연습문제 3. 다음과 같이 UsingR 패키지를 인스톨한 후 내장되어 있는 데이터셋 primes를 이용하여 답하시오. primes는 1부터 2003 까지의 소수(prime number)들의 집합이다. 134 | 135 | ``` 136 | > library(UsingR) 137 | > data(primes) 138 | ``` 139 | 140 | #### 적절한 히스토그램을 그리시오. 141 | 142 | 143 | ```r 144 | # 코드를 입력하세요 145 | ``` 146 | 147 | ----------------------------------------- 148 | 149 | ## 연습문제 4. 벡터를 입력을 받아 그 원소들의 값을 모두 더해서 결과를 반환하는 mysum 함수를 작성하시오. 150 | 151 | 152 | ```r 153 | # 코드를 입력하세요 154 | ``` 155 | 156 | ----------------------------------------- 157 | 158 | ## 연습문제 5. 다음과 같은 data가 있다. 159 | 160 | ``` 161 | x = 162 | [ 1, 5, 9 163 | 2, 6, 10 164 | 3, 7, 11 165 | 4, 8, 12] 166 | ``` 167 | 168 | #### a) 행렬(matrix) x를 만드시오. 169 | 170 | 171 | ```r 172 | # 코드를 입력하세요 173 | ``` 174 | 175 | #### b) x의 전치행렬 xt를 만드시오. 176 | 177 | 178 | ```r 179 | # 코드를 입력하세요 180 | ``` 181 | 182 | #### c) x의 첫번째 행(row)만 뽑아낸 xr1을 만드시오 183 | 184 | 185 | ```r 186 | # 코드를 입력하세요 187 | ``` 188 | 189 | #### d) x의 세번째 열(col)만 뽑아낸 xc3을 만드시오 190 | 191 | 192 | ```r 193 | # 코드를 입력하세요 194 | ``` 195 | 196 | #### e) x에서 6,7,10,11을 원소로 가지는 부분행렬 xs를 만드시오 197 | 198 | 199 | ```r 200 | # 코드를 입력하세요 201 | ``` 202 | 203 | #### f) x의 두번째 열(col)의 원소가 홀수인 행(row)들만 뽑아서 부분행렬 xs2를 만드시오 204 | 205 | 206 | ```r 207 | # 코드를 입력하세요 208 | ``` 209 | 210 | -------------------------------------- 211 | 212 | ## 연습문제6. 연습문제 4번의 행렬 x에 대해 213 | 214 | #### a) apply 함수와 mean 함수를 사용해서, 각 행(row)의 평균을 구하시오 215 | 216 | 217 | ```r 218 | # 코드를 입력하세요 219 | ``` 220 | 221 | #### b) apply 함수와 mean 함수를 사용해서, 각 열(col)의 평균을 구하시오 222 | 223 | 224 | ```r 225 | # 코드를 입력하세요 226 | ``` 227 | 228 | -------------------------------------- 229 | 230 | ### 연습문제 7. 학생 3명이 수학 쪽지시험과 영어 쪽지시험, 국어 쪽지시험을 3번 치렀다. 231 | 232 | ``` 233 | [첫번째 시험] 234 | 수학 235 | 학생 1 : 90 236 | 학생 2 : 85 237 | 학생 3 : 100 238 | 239 | 영어 240 | 학생 1 : 70 241 | 학생 2 : 80 242 | 학생 3 : 100 243 | 244 | 국어 245 | 학생 1 : 85 246 | 학생 2 : 100 247 | 학생 3 : 90 248 | 249 | [두번째 시험] 250 | 수학 251 | 학생 1 : 95 252 | 학생 2 : 90 253 | 학생 3 : 100 254 | 255 | 영어 256 | 학생 1 : 75 257 | 학생 2 : 85 258 | 학생 3 : 85 259 | 260 | 국어 261 | 학생 1 : 80 262 | 학생 2 : 100 263 | 학생 3 : 90 264 | 265 | [세번째 시험] 266 | 수학 267 | 학생 1 : 85 268 | 학생 2 : 75 269 | 학생 3 : 100 270 | 271 | 영어 272 | 학생 1 : 90 273 | 학생 2 : 95 274 | 학생 3 : 90 275 | 276 | 국어 277 | 학생 1 : 85 278 | 학생 2 : 95 279 | 학생 3 : 100 280 | ``` 281 | 282 | #### a) 위 결과를 다차원 배열로 나타내시오 283 | 284 | 285 | ```r 286 | # 코드를 입력하세요 287 | ``` 288 | 289 | #### b) 모든 3번의 테스트에서 채점된, 국어 시험 평균을 구하시오 290 | 291 | 292 | ```r 293 | # 코드를 입력하세요 294 | ``` 295 | 296 | #### c) 모든 3번의 테스트에서, 2번째 학생의 영어 시험 평균을 구하시오 297 | 298 | 299 | ```r 300 | # 코드를 입력하세요 301 | ``` 302 | 303 | #### d) 각 학생들이 가장 잘하는 과목은 무엇인가? 가장 못하는 과목은 무엇인가? 가장 실력이 불안정한 과목은 무엇인가? 304 | 305 | 306 | ```r 307 | # 코드를 입력하세요 308 | ``` 309 | 310 | -------------------------------------------- 311 | 312 | ## 연습문제 8. 연습문제 7번의 첫번째 시험에 대해서만. 313 | 314 | #### a) 리스트(list)로 표현하시오 315 | 316 | 317 | ```r 318 | # 코드를 입력하세요 319 | ``` 320 | 321 | #### b) 인덱스 접근으로 수학 평균을 구하시오 322 | 323 | 324 | ```r 325 | # 코드를 입력하세요 326 | ``` 327 | 328 | #### c) 인덱스 접근으로 국어 평균을 구하시오 329 | 330 | 331 | ```r 332 | # 코드를 입력하세요 333 | ``` 334 | 335 | #### d) 물리시험 결과를 추가하시오(80,90,100) 336 | 337 | 338 | ```r 339 | # 코드를 입력하세요 340 | ``` 341 | 342 | #### e) 학생 2의 영어점수를 100점으로 정정하시오 343 | 344 | 345 | ```r 346 | # 코드를 입력하세요 347 | ``` 348 | 349 | #### f) 모든 학생의 국어점수를 삭제하시오 350 | 351 | 352 | ```r 353 | # 코드를 입력하세요 354 | ``` 355 | 356 | #### g) lapply 함수를 사용하여 각 과목의 평균을 구하시오 357 | 358 | 359 | ```r 360 | # 코드를 입력하세요 361 | ``` 362 | 363 | 364 | #### h) sapply 함수를 사용하여 각 과목의 평균을 구하시오 365 | 366 | 367 | ```r 368 | # 코드를 입력하세요 369 | ``` 370 | 371 | -------------------------------------------- 372 | 373 | 374 | ## 연습문제 9. 연습문제 7번의 세번째 시험에 대해서만, 375 | 376 | 377 | ```r 378 | # 코드를 입력하세요 379 | ``` 380 | 381 | #### a) data.frame으로 표현하시오 382 | 383 | 384 | ```r 385 | # 코드를 입력하세요 386 | ``` 387 | 388 | #### b) 성별 필드를 추가하시오. 학생1은 여학생, 나머지는 남학생. 389 | 390 | 391 | ```r 392 | # 코드를 입력하세요 393 | ``` 394 | 395 | #### c) 여학생인 학생 4의 데이터를 추가하시오. 수학 90, 영어 80, 국어 90. 396 | 397 | 398 | ```r 399 | # 코드를 입력하세요 400 | ``` 401 | 402 | #### d) 수학점수가 80점 이하인 학생의 데이터는 제거하시오 403 | 404 | 405 | ```r 406 | # 코드를 입력하세요 407 | ``` 408 | 409 | #### e) 각 과목의 평균을 구하시오 410 | 411 | 412 | ```r 413 | # 코드를 입력하세요 414 | ``` 415 | 416 | -------------------------------------------- 417 | 418 | ## 연습문제 10. 419 | 420 | ### DMwR라는 패키지를 설치후, 패키지에 포함된 데이터셋인 algae를 로딩하시오. algae의 속성 중 NH4 의 값들에 대해, 421 | 422 | #### a) NA(결측치)가 몇개인지 구하시오 423 | 424 | 425 | ```r 426 | # 코드를 입력하세요 427 | ``` 428 | 429 | #### b) 결측치를 제거하고 평균을 구하시오. 430 | 431 | 432 | ```r 433 | # 코드를 입력하세요 434 | ``` 435 | 436 | ------------------------------------------- 437 | 438 | # 응용문제 439 | 440 | ## 1. 다음에 있는 엑셀파일을 R로 분석한다. 441 | 442 | ``` 443 | person.xls 444 | ``` 445 | 446 | #### 1) score의 평균을 구하기 447 | 448 | 449 | ```r 450 | # 코드를 입력하세요 451 | ``` 452 | 453 | #### 2) 남,녀별 score 평균 구하기 454 | 455 | 456 | ```r 457 | # 코드를 입력하세요 458 | ``` 459 | 460 | #### 3) 각 email의 값은 비워져있다. 채워넣어보자. 이메일은 '아이디@회사이름.com' 규칙을 따른다. 예를들어 id가 user1이고 회사가 naver이면 그 사람의 email은 user1@naver.com 이다. 461 | 462 | 463 | ```r 464 | # 코드를 입력하세요 465 | ``` 466 | 467 | -------------------------------------------------------------------------------- /part1/ch04/person.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch04/person.xls -------------------------------------------------------------------------------- /part1/ch05/ch5(2).pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch05/ch5(2).pptx -------------------------------------------------------------------------------- /part1/ch05/ch5(3).pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch05/ch5(3).pptx -------------------------------------------------------------------------------- /part1/ch05/ch5.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch05/ch5.pptx -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/pcs/files-pane.pper: -------------------------------------------------------------------------------- 1 | { 2 | "path" : "~/bayesch6", 3 | "sortOrder" : [ 4 | { 5 | "ascending" : true, 6 | "columnIndex" : 2 7 | } 8 | ] 9 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/pcs/source-pane.pper: -------------------------------------------------------------------------------- 1 | { 2 | "activeTab" : 0 3 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/pcs/windowlayoutstate.pper: -------------------------------------------------------------------------------- 1 | { 2 | "left" : { 3 | "panelheight" : 719, 4 | "splitterpos" : 302, 5 | "topwindowstate" : "NORMAL", 6 | "windowheight" : 757 7 | }, 8 | "right" : { 9 | "panelheight" : 719, 10 | "splitterpos" : 454, 11 | "topwindowstate" : "NORMAL", 12 | "windowheight" : 757 13 | } 14 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/pcs/workbench-pane.pper: -------------------------------------------------------------------------------- 1 | { 2 | "TabSet1" : 0, 3 | "TabSet2" : 0 4 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/persistent-state: -------------------------------------------------------------------------------- 1 | build-last-errors="[]" 2 | build-last-errors-base-dir="" 3 | build-last-outputs="[]" 4 | compile_pdf_state="{\"errors\":[],\"output\":\"\",\"running\":false,\"tab_visible\":false,\"target_file\":\"\"}" 5 | console_procs="[]" 6 | files.monitored-path="" 7 | find-in-files-state="{\"handle\":\"\",\"input\":\"\",\"path\":\"\",\"regex\":true,\"results\":{\"file\":[],\"line\":[],\"lineValue\":[],\"matchOff\":[],\"matchOn\":[]},\"running\":false}" 8 | imageDirtyState="1" 9 | saveActionState="-1" 10 | -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/prop/2D2DA549: -------------------------------------------------------------------------------- 1 | { 2 | "tempName" : "Untitled1" 3 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/prop/3B7C0482: -------------------------------------------------------------------------------- 1 | { 2 | "tempName" : "Untitled1" 3 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/prop/AFD7EFCA: -------------------------------------------------------------------------------- 1 | { 2 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/prop/INDEX: -------------------------------------------------------------------------------- 1 | ~%2Fbayesch6%2FBernBetaExample.R="AFD7EFCA" 2 | ~%2Fbayesch6%2Fch6.Rmd="2D2DA549" 3 | ~%2Fbayesch6%2Fch6.rdf="3B7C0482" 4 | -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/s-759A88F7/C35CAB9B: -------------------------------------------------------------------------------- 1 | { 2 | "contents" : "Ch6. Inferring a Binomial Probability via Exact Mathematical Analysis\n========================================================\n- Date: Mar 18th, 2015\n- Created by: Jungwon Choi\n\n*program needed in the book: https://sites.google.com/site/doingbayesiandataanalysis/software-installation/DBDA2Eprograms.zip?attredirects=0&d=1\n\n0. 이거 왜 하나? \n-------------------------\n#### *Pure analytical mathematics* 을 이용한 베이지안 추론\n- 일부 단순하고, 이상적인 상황에만 적용가능\n- 따라서, 복잡한 분포 가정에서는 사용하기 힘들다 (approximation 필요. 예, MCMC)\n- 그러나, \"연속형 파라미터에 대해서 베이지안 추론\"이 무엇인지에 대한 감을 잡는 데에는 유용함\n- likelihood 가 _conjugate_ prior distrubution을 가지고 있는 경우 에대해서만 논의하겠다. (특히 이 챕터에서는 __beta distribution__의 경우만)\n\n#### recall:\nBayesian Framework\nprior + data(observation) -> posterior distribution\n\n\n\n'동전 던지는 상황'으로 내용을 이해해 보자.\n\n\n\n\n\n\n\n1. Likelihood Fuction\n-------------------------\nrecall:\n\n__Bernoulli distribution__\n$$latex p(y|\\theta) = \\theta ^{y}(1-\\theta) ^ {(1-y)}$$\n\ntwo way of interpreting this\n- a __probability distrubution__: y a random variable, and $\\theta$ a fixed parameter\n- a __likelihood function__ : y a fixed observation, and $\\theta$ a random variable\n$$p(\\left \\{ y_{i} \\right \\}|\\theta) = \\prod_{i}p(y_{i}|\\theta) \\\\=\\prod_{i}\\theta^{y_{i}}(1-\\theta)^{(1-y_{i})}\\\\=\\theta^{\\sum_{i}y_{i}}(1-\\theta)^{\\sum_{i}(1-y_{i})} \\\\= \\theta^{z}(1-\\theta)^{N-z}$$\n\n\n\n\n\n\n\n2. Prior나타내기: Beta Distribution\n-------------------------\n\n\n- interval (0,1) 사이의 값을 가지는 $\\theta$대한 prior probability를 나타낼 수있는 식이 필요함\n- 이 챕터에서처럼 수학적으로 계산이 가능하려면:\n다음 베이즈정리의 식에서\n$$p(\\theta|y) = \\frac{p(y|\\theta)p(\\theta)}{\\int p(y|\\theta)p(\\theta)d\\theta }$$\n\n1. 분자부분: 서로같은 형태의 prior $p(\\theta)$와 likelihood인 $p(y|\\theta)$가 곱해지면 같은 형태의 결과가 나올 것이다 \n$p(\\theta)$는 $p(y|\\theta)$에 대한_conjugate prior_의 형태여야함\n2. 분모부분: 적분 가능해야 함\n\n\n다시 Bernoulli 분포 상황으로 돌아가서, 우리는 prior가 $\\theta^{a}(1-\\theta)^{b}$와 같이 생겼을 때, Bernoulli 형태의 likelihood를 곱하면, 다시 posterior 분포도 Bernoulli의 형태를띌 것이다. \n이러한 특성을 가지는 분포가 Beta 분포!\n\n\n**Beta _distribution_**\n$$latex p(y|a,b) = beta(y|a,b) = \\theta ^{(a-1)}(1-\\theta) ^ {(b-1)}/B(a,b)$$\nwhere $B(a,b)$ a normalizing constant\n\n**cf. Beta _function_**\n$$latex B(a,b) = \\int_{0}^{1}\\theta ^{(a-1)}(1-\\theta) ^ {(b-1)}d\\theta$$ where$0\\leq\\theta\\leq1,\\\\ a>0\\;and\\;b>0$\n\n\ncompare with r function\n```{r}\nx <- seq(0, 1, length = 21)\ndbeta(x, 2, 2)\nbeta(2,2)\n```\n\n\n\n\n### (1) beta prior 지정하기\n- $\\theta$에 대한 우리의 믿음(belief) 반영하기\n- 동전던지기 상황을 다시 떠올려서...\n - 사전적으로 a=앞면의 개수 b=뒷면의 개수로 이루어진 총 n번의 시행이 있었다고 가정하자.\n \n\nbeta distribution의특성 \n\nmean: $\\mu = a/(a+b)$ \nmode: $\\omega = (a-1)/(a+b-2)$ \nfor a>1 and b>1 \n![alt text](beta_dist.PNG)\n\n\n\nconcentration \n$\\kappa = a+b$ \n\na and b, in terms of kappa, mean and mode \n$a=\\mu\\kappa\\;\\;and\\;\\;b=(1-\\mu)\\kappa\\\\a=\\omega(\\kappa-2)+1\\;\\;and\\;\\;b=(1-\\omega)(\\kappa-2)+1\\;\\;for\\;\\;\\kappa>2$ \n\nshape parameters \n$a = \\mu (\\frac{\\mu(1-\\mu)}{\\sigma^{2}})$ and $b = (1-\\mu) (\\frac{\\mu(1-\\mu)}{\\sigma^{2}})$ \n\n```{r}\nsource(\"DBDA2E-utilities.R\")\nbetaABfromMeanKappa(mean=0.25,kappa=4)\n\nbetaABfromModeKappa(mode=0.25,kappa=4)\n\nbetaABfromMeanSD(mean=0.5,sd=0.1)\n\nbetaParam = betaABfromModeKappa(mode=0.25,kappa=4)\nbetaParam$a\nbetaParam$b\n```\n\n\n\n\n\n\n\n\n\n\n\n\n3. The Posterior Beta\n-------------------------\nprior를 지정하였으면 Bayes Rule로 posterior가 어떠할지 파악해보자\n$N$번의 동전던지기 시행 중 $z$번의 앞면이 나왔다고 하면, 베이즈 정리에 따라\n\n$$p(\\theta|z,N) = p(z,N|\\theta)p(\\theta)/p(z,N)\\\\=\\theta^{z}(1-\\theta)^{(N-z)}\\frac{\\theta^{(a-1)}(b-1)^{(b-1)}}{B(a,b)}/p(z,N)\\\\=\\theta^{z}(1-\\theta)^{(N-z)}\\theta^{(a-1)}(b-1)^{(b-1)}/\\left [ B(a,b)p(z,N)) \\right ]\\\\=\\theta^{((z+a)-1)}(1-\\theta)^{((N-z+b)-1)}/\\left [ B(a,b)p(z,N)) \\right ]\\\\=\\theta^{((z+a)-1)}(1-\\theta)^{((N-z+b)-1)}/B(z+a,N-z+b)$$\n\n동전던지기의 예로 생각해보면: \n- prior는 beta($\\theta$|1,1)\n- 동전을 던졌더니 앞면이 나왔다\n- posterior는 beta($\\theta$|2,1)\n- 동전을 또 던졌더니 뒷면이 나왔다\n- 업데이트된 posterior는 beta($\\theta$|2,2)\n...\nprior가 beta distribution이면 prior도 항상 beta distribution임을 확인할 수 있다.\n\n\n### (1) posterior distribution은 prior와 likelihood의 타협이다\n동전의 예로 mean을 계산하는 경우를 생각해보면,\n$$\\frac{z+a}{N+a+b} = \\frac{z}{N}\\frac{N}{N+a+b}+\\frac{a}{a+b}\\frac{a+b}{N+a+b}$$\n\n\n\n\n\n\n\n\n\n\n4. Example\n-------------------------\n### (1) 베타분포로 prior를 나타낼 수 있을 때\n<상황>\n- 갓만든 신선한 정상 동전\n- 나의 굳은 믿음으로 mode는 0.5이고, 유효한 샘플 수는 500이라고 가정한다\n- 20번 동전을 던져서 17번(85%) 앞면이나옴\n- 이 때, 앞면이 나올 확률의 기대값은?\n\n```{r}\nsource(\"DBDA2E-utilities.R\") # Load definitions of graphics functions etc.\nsource(\"BernBeta.R\") # Load the definition of the BernBeta function\n\n# Specify the prior:\nt = 0.5 # Specify the prior MODE.\nn = 500 # Specify the effective prior sample size.\na = t*(n-2) + 1 # Convert to beta shape parameter a.\nb = (1-t)*(n-2) + 1 # Convert to beta shape parameter b.\n\nPrior = c(a,b) # Specify Prior as vector with the two shape parameters.\n\n# Specify the data:\nN = 20 # The total number of flips.\nz = 17 # The number of heads.\nData = c(rep(0,N-z),rep(1,z)) # Convert N and z into vector of 0's and 1's.\n\nopenGraph(width=5,height=7)\nposterior = BernBeta( priorBetaAB=Prior, Data=Data , plotType=\"Bars\" , \n showCentTend=\"Mode\" , showHDI=TRUE , showpD=FALSE )\nsaveGraph(file=\"BernBetaExample\",type=\"png\")\n\n```\n\n위의 예제를 활용해서 다양한 상황에 응용할 수 있다.\n\n### (2) 베타분포로 prior를 나타낼 수 없는 경우\n<상황>\n- 이상한 회사에서 2가지 동전을 생산한다: 앞면이 나올 확률이 25%인 동전과, 앞면이 나올 확률이 75%인 동전\n- 이 때, prior distribution은 bimodal한 형태를 띠게 됨\n\n\n(다시 grid approxomation으로...)\n```{r}\nsource(\"BernBeta.R\")\nsource(\"BernGrid.R\")\n\nTheta <- seq(0,1,length=1000)\npTheta <- c(rep(1,200),seq(1,100,length=50),seq(100,1,length=50),rep(1,200),\n rep(1,200),seq(1,100,length=50),seq(100,1,length=50),rep(1,200))\npTheta <- pTheta/sum(pTheta)\n\n\n\nData <- c(rep(0,13),rep(1,14))\nposterior <- BernGrid(Theta, pTheta,Data,plotType=\"Bars\",\n showCentTend=\"None\", showHDI=FALSE, showpD=FALSE)\n```\n\n", 3 | "created" : 1426603551585.000, 4 | "dirty" : false, 5 | "encoding" : "UTF-8", 6 | "folds" : "", 7 | "hash" : "266195321", 8 | "id" : "C35CAB9B", 9 | "lastKnownWriteTime" : 1426676457, 10 | "path" : "~/bayesch6/ch6.Rmd", 11 | "project_path" : "ch6.Rmd", 12 | "properties" : { 13 | "tempName" : "Untitled1" 14 | }, 15 | "source_on_save" : false, 16 | "type" : "r_markdown" 17 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/s-759A88F7/E143050D: -------------------------------------------------------------------------------- 1 | { 2 | "contents" : "source(\"DBDA2E-utilities.R\") # Load definitions of graphics functions etc.\nsource(\"BernBeta.R\") # Load the definition of the BernBeta function\n\n# Specify the prior:\nt = 0.75 # Specify the prior MODE.\nn = 25 # Specify the effective prior sample size.\na = t*(n-2) + 1 # Convert to beta shape parameter a.\nb = (1-t)*(n-2) + 1 # Convert to beta shape parameter b.\n\nPrior = c(a,b) # Specify Prior as vector with the two shape parameters.\n\n# Specify the data:\nN = 20 # The total number of flips.\nz = 17 # The number of heads.\nData = c(rep(0,N-z),rep(1,z)) # Convert N and z into vector of 0's and 1's.\n\nopenGraph(width=5,height=7)\nposterior = BernBeta( priorBetaAB=Prior, Data=Data , plotType=\"Bars\" , \n showCentTend=\"Mode\" , showHDI=TRUE , showpD=FALSE )\nsaveGraph(file=\"BernBetaExample\",type=\"png\")\n", 3 | "created" : 1426673926935.000, 4 | "dirty" : false, 5 | "encoding" : "UTF-8", 6 | "folds" : "", 7 | "hash" : "685145296", 8 | "id" : "E143050D", 9 | "lastKnownWriteTime" : 1421922078, 10 | "path" : "~/bayesch6/BernBetaExample.R", 11 | "project_path" : "BernBetaExample.R", 12 | "properties" : { 13 | }, 14 | "source_on_save" : false, 15 | "type" : "r_source" 16 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/4D38B2AA/sdb/s-759A88F7/lock_file: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/.Rproj.user/4D38B2AA/sdb/s-759A88F7/lock_file -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/62059814/sdb/prop/827F99E8: -------------------------------------------------------------------------------- 1 | { 2 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/62059814/sdb/prop/INDEX: -------------------------------------------------------------------------------- 1 | ~%2FDesktop%2Fbayesch6%2Fch6.Rmd="827F99E8" 2 | -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/62059814/sdb/s-49C3ABAE/1F6592A3: -------------------------------------------------------------------------------- 1 | { 2 | "contents" : "---\ntitle: \"Untitled\"\noutput: html_document\n---\n\nThis is an R Markdown document. Markdown is a simple formatting syntax for authoring HTML, PDF, and MS Word documents. For more details on using R Markdown see .\n\nWhen you click the **Knit** button a document will be generated that includes both content as well as the output of any embedded R code chunks within the document. You can embed an R code chunk like this:\n\n```{r}\nsummary(cars)\n```\n\nYou can also embed plots, for example:\n\n```{r, echo=FALSE}\nplot(cars)\n```\n\nNote that the `echo = FALSE` parameter was added to the code chunk to prevent printing of the R code that generated the plot.\n", 3 | "created" : 1426677606396.000, 4 | "dirty" : false, 5 | "encoding" : "", 6 | "folds" : "", 7 | "hash" : "630123551", 8 | "id" : "1F6592A3", 9 | "lastKnownWriteTime" : 8315177835134542706, 10 | "path" : null, 11 | "project_path" : null, 12 | "properties" : { 13 | "tempName" : "Untitled1" 14 | }, 15 | "source_on_save" : false, 16 | "type" : "r_markdown" 17 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/62059814/sdb/s-49C3ABAE/591AD244: -------------------------------------------------------------------------------- 1 | { 2 | "contents" : "---\ntitle: \"Ch6. Inferring a Binomial Probability via Exact Mathematical Analysis\"\noutput: html_document\n---\n\n- Date: Mar 18th, 2015\n- Created by: Jungwon Choi\n\n*program needed in the book: https://sites.google.com/site/doingbayesiandataanalysis/software-installation/DBDA2Eprograms.zip?attredirects=0&d=1\n\n0. 이거 왜 하나? \n-------------------------\n#### *Pure analytical mathematics* 을 이용한 베이지안 추론\n- 일부 단순하고, 이상적인 상황에만 적용가능\n- 따라서, 복잡한 분포 가정에서는 사용하기 힘들다 (approximation 필요. 예, MCMC)\n- 그러나, \"연속형 파라미터에 대해서 베이지안 추론\"이 무엇인지에 대한 감을 잡는 데에는 유용함\n- likelihood 가 _conjugate_ prior distrubution을 가지고 있는 경우 에대해서만 논의하겠다. (특히 이 챕터에서는 __beta distribution__의 경우만)\n\n#### recall:\nBayesian Framework\nprior + data(observation) -> posterior distribution\n\n\n\n'동전 던지는 상황'으로 내용을 이해해 보자.\n\n\n\n\n\n\n\n1. Likelihood Fuction\n-------------------------\nrecall:\n\n__Bernoulli distribution__\n$$latex p(y|\\theta) = \\theta ^{y}(1-\\theta) ^ {(1-y)}$$\n\ntwo way of interpreting this\n- a __probability distrubution__: y a random variable, and $\\theta$ a fixed parameter\n- a __likelihood function__ : y a fixed observation, and $\\theta$ a random variable\n$$p(\\left \\{ y_{i} \\right \\}|\\theta) = \\prod_{i}p(y_{i}|\\theta) \\\\=\\prod_{i}\\theta^{y_{i}}(1-\\theta)^{(1-y_{i})}\\\\=\\theta^{\\sum_{i}y_{i}}(1-\\theta)^{\\sum_{i}(1-y_{i})} \\\\= \\theta^{z}(1-\\theta)^{N-z}$$\n\n\n\n\n\n\n\n2. Prior나타내기: Beta Distribution\n-------------------------\n\n\n- interval (0,1) 사이의 값을 가지는 $\\theta$대한 prior probability를 나타낼 수있는 식이 필요함\n- 이 챕터에서처럼 수학적으로 계산이 가능하려면:\n다음 베이즈정리의 식에서\n$$p(\\theta|y) = \\frac{p(y|\\theta)p(\\theta)}{\\int p(y|\\theta)p(\\theta)d\\theta }$$\n\n1. 분자부분: 서로같은 형태의 prior $p(\\theta)$와 likelihood인 $p(y|\\theta)$가 곱해지면 같은 형태의 결과가 나올 것이다 \n$p(\\theta)$는 $p(y|\\theta)$에 대한_conjugate prior_의 형태여야함\n2. 분모부분: 적분 가능해야 함\n\n\n다시 Bernoulli 분포 상황으로 돌아가서, 우리는 prior가 $\\theta^{a}(1-\\theta)^{b}$와 같이 생겼을 때, Bernoulli 형태의 likelihood를 곱하면, 다시 posterior 분포도 Bernoulli의 형태를띌 것이다. \n이러한 특성을 가지는 분포가 Beta 분포!\n\n\n**Beta _distribution_**\n$$latex p(y|a,b) = beta(y|a,b) = \\theta ^{(a-1)}(1-\\theta) ^ {(b-1)}/B(a,b)$$\nwhere $B(a,b)$ a normalizing constant\n\n**cf. Beta _function_**\n$$latex B(a,b) = \\int_{0}^{1}\\theta ^{(a-1)}(1-\\theta) ^ {(b-1)}d\\theta$$ where$0\\leq\\theta\\leq1,\\\\ a>0\\;and\\;b>0$\n\n\ncompare with r function\n```{r}\nx <- seq(0, 1, length = 21)\ndbeta(x, 2, 2)\nbeta(2,2)\n```\n\n\n\n\n### (1) beta prior 지정하기\n- $\\theta$에 대한 우리의 믿음(belief) 반영하기\n- 동전던지기 상황을 다시 떠올려서...\n - 사전적으로 a=앞면의 개수 b=뒷면의 개수로 이루어진 총 n번의 시행이 있었다고 가정하자.\n \n\nbeta distribution의특성 \n\nmean: $\\mu = a/(a+b)$ \nmode: $\\omega = (a-1)/(a+b-2)$ \nfor a>1 and b>1 \n![alt text](beta_dist.PNG)\n\n\n\nconcentration \n$\\kappa = a+b$ \n\na and b, in terms of kappa, mean and mode \n$a=\\mu\\kappa\\;\\;and\\;\\;b=(1-\\mu)\\kappa\\\\a=\\omega(\\kappa-2)+1\\;\\;and\\;\\;b=(1-\\omega)(\\kappa-2)+1\\;\\;for\\;\\;\\kappa>2$ \n\nshape parameters \n$a = \\mu (\\frac{\\mu(1-\\mu)}{\\sigma^{2}})$ and $b = (1-\\mu) (\\frac{\\mu(1-\\mu)}{\\sigma^{2}})$ \n\n```{r}\nsource(\"DBDA2E-utilities.R\")\nbetaABfromMeanKappa(mean=0.25,kappa=4)\n\nbetaABfromModeKappa(mode=0.25,kappa=4)\n\nbetaABfromMeanSD(mean=0.5,sd=0.1)\n\nbetaParam = betaABfromModeKappa(mode=0.25,kappa=4)\nbetaParam$a\nbetaParam$b\n```\n\n\n\n\n\n\n\n\n\n\n\n\n3. The Posterior Beta\n-------------------------\nprior를 지정하였으면 Bayes Rule로 posterior가 어떠할지 파악해보자\n$N$번의 동전던지기 시행 중 $z$번의 앞면이 나왔다고 하면, 베이즈 정리에 따라\n\n$$p(\\theta|z,N) = p(z,N|\\theta)p(\\theta)/p(z,N)\\\\=\\theta^{z}(1-\\theta)^{(N-z)}\\frac{\\theta^{(a-1)}(b-1)^{(b-1)}}{B(a,b)}/p(z,N)\\\\=\\theta^{z}(1-\\theta)^{(N-z)}\\theta^{(a-1)}(b-1)^{(b-1)}/\\left [ B(a,b)p(z,N)) \\right ]\\\\=\\theta^{((z+a)-1)}(1-\\theta)^{((N-z+b)-1)}/\\left [ B(a,b)p(z,N)) \\right ]\\\\=\\theta^{((z+a)-1)}(1-\\theta)^{((N-z+b)-1)}/B(z+a,N-z+b)$$\n\n동전던지기의 예로 생각해보면: \n- prior는 beta($\\theta$|1,1)\n- 동전을 던졌더니 앞면이 나왔다\n- posterior는 beta($\\theta$|2,1)\n- 동전을 또 던졌더니 뒷면이 나왔다\n- 업데이트된 posterior는 beta($\\theta$|2,2)\n...\nprior가 beta distribution이면 prior도 항상 beta distribution임을 확인할 수 있다.\n\n\n### (1) posterior distribution은 prior와 likelihood의 타협이다\n동전의 예로 mean을 계산하는 경우를 생각해보면,\n$$\\frac{z+a}{N+a+b} = \\frac{z}{N}\\frac{N}{N+a+b}+\\frac{a}{a+b}\\frac{a+b}{N+a+b}$$\n\n\n\n\n\n\n\n\n\n\n4. Example\n-------------------------\n### (1) 베타분포로 prior를 나타낼 수 있을 때\n<상황>\n- 갓만든 신선한 정상 동전\n- 나의 굳은 믿음으로 mode는 0.5이고, 유효한 샘플 수는 500이라고 가정한다\n- 20번 동전을 던져서 17번(85%) 앞면이나옴\n- 이 때, 앞면이 나올 확률의 기대값은?\n\n```{r}\nsource(\"DBDA2E-utilities.R\") # Load definitions of graphics functions etc.\nsource(\"BernBeta.R\") # Load the definition of the BernBeta function\n\n# Specify the prior:\nt = 0.5 # Specify the prior MODE.\nn = 500 # Specify the effective prior sample size.\na = t*(n-2) + 1 # Convert to beta shape parameter a.\nb = (1-t)*(n-2) + 1 # Convert to beta shape parameter b.\n\nPrior = c(a,b) # Specify Prior as vector with the two shape parameters.\n\n# Specify the data:\nN = 20 # The total number of flips.\nz = 17 # The number of heads.\nData = c(rep(0,N-z),rep(1,z)) # Convert N and z into vector of 0's and 1's.\n\nopenGraph(width=5,height=7)\nposterior = BernBeta( priorBetaAB=Prior, Data=Data , plotType=\"Bars\" , \n showCentTend=\"Mode\" , showHDI=TRUE , showpD=FALSE )\nsaveGraph(file=\"BernBetaExample\",type=\"png\")\n\n```\n\n위의 예제를 활용해서 다양한 상황에 응용할 수 있다.\n\n### (2) 베타분포로 prior를 나타낼 수 없는 경우\n<상황>\n- 이상한 회사에서 2가지 동전을 생산한다: 앞면이 나올 확률이 25%인 동전과, 앞면이 나올 확률이 75%인 동전\n- 이 때, prior distribution은 bimodal한 형태를 띠게 됨\n\n\n(다시 grid approxomation으로...)\n```{r}\nsource(\"BernBeta.R\")\nsource(\"BernGrid.R\")\n\nTheta <- seq(0,1,length=1000)\npTheta <- c(rep(1,200),seq(1,100,length=50),seq(100,1,length=50),rep(1,200),\n rep(1,200),seq(1,100,length=50),seq(100,1,length=50),rep(1,200))\npTheta <- pTheta/sum(pTheta)\n\n\n\nData <- c(rep(0,13),rep(1,14))\nposterior <- BernGrid(Theta, pTheta,Data,plotType=\"Bars\",\n showCentTend=\"None\", showHDI=FALSE, showpD=FALSE)\n```\n\n", 3 | "created" : 1426677551225.000, 4 | "dirty" : false, 5 | "encoding" : "UTF-8", 6 | "folds" : "", 7 | "hash" : "459330197", 8 | "id" : "591AD244", 9 | "lastKnownWriteTime" : 1426677687, 10 | "path" : "~/Desktop/bayesch6/ch6.Rmd", 11 | "project_path" : "ch6.Rmd", 12 | "properties" : { 13 | }, 14 | "source_on_save" : false, 15 | "type" : "r_markdown" 16 | } -------------------------------------------------------------------------------- /part1/ch06/.Rproj.user/62059814/sdb/s-49C3ABAE/lock_file: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/.Rproj.user/62059814/sdb/s-49C3ABAE/lock_file -------------------------------------------------------------------------------- /part1/ch06/BernBetaExample.R: -------------------------------------------------------------------------------- 1 | source("DBDA2E-utilities.R") # Load definitions of graphics functions etc. 2 | source("BernBeta.R") # Load the definition of the BernBeta function 3 | 4 | # Specify the prior: 5 | t = 0.75 # Specify the prior MODE. 6 | n = 25 # Specify the effective prior sample size. 7 | a = t*(n-2) + 1 # Convert to beta shape parameter a. 8 | b = (1-t)*(n-2) + 1 # Convert to beta shape parameter b. 9 | 10 | Prior = c(a,b) # Specify Prior as vector with the two shape parameters. 11 | 12 | # Specify the data: 13 | N = 20 # The total number of flips. 14 | z = 17 # The number of heads. 15 | Data = c(rep(0,N-z),rep(1,z)) # Convert N and z into vector of 0's and 1's. 16 | 17 | openGraph(width=5,height=7) 18 | posterior = BernBeta( priorBetaAB=Prior, Data=Data , plotType="Bars" , 19 | showCentTend="Mode" , showHDI=TRUE , showpD=FALSE ) 20 | saveGraph(file="BernBetaExample",type="png") 21 | -------------------------------------------------------------------------------- /part1/ch06/BernBetaExample.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/BernBetaExample.jpg -------------------------------------------------------------------------------- /part1/ch06/BernBetaExample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/BernBetaExample.png -------------------------------------------------------------------------------- /part1/ch06/bayesch6.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | -------------------------------------------------------------------------------- /part1/ch06/beta_dist.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/beta_dist.PNG -------------------------------------------------------------------------------- /part1/ch06/ch6.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Ch6. Inferring a Binomial Probability via Exact Mathematical Analysis" 3 | output: html_document 4 | --- 5 | 6 | - Date: Mar 18th, 2015 7 | - Created by: Jungwon Choi 8 | 9 | *program needed in the book: https://sites.google.com/site/doingbayesiandataanalysis/software-installation/DBDA2Eprograms.zip?attredirects=0&d=1 10 | 11 | 0. 이거 왜 하나? 12 | ------------------------- 13 | #### *Pure analytical mathematics* 을 이용한 베이지안 추론 14 | - 일부 단순하고, 이상적인 상황에만 적용가능 15 | - 따라서, 복잡한 분포 가정에서는 사용하기 힘들다 (approximation 필요. 예, MCMC) 16 | - 그러나, "연속형 파라미터에 대해서 베이지안 추론"이 무엇인지에 대한 감을 잡는 데에는 유용함 17 | - likelihood 가 _conjugate_ prior distrubution을 가지고 있는 경우 에대해서만 논의하겠다. (특히 이 챕터에서는 __beta distribution__의 경우만) 18 | 19 | #### recall: 20 | Bayesian Framework 21 | prior + data(observation) -> posterior distribution 22 | 23 | 24 | 25 | '동전 던지는 상황'으로 내용을 이해해 보자. 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 1. Likelihood Fuction 34 | ------------------------- 35 | recall: 36 | 37 | __Bernoulli distribution__ 38 | $$latex p(y|\theta) = \theta ^{y}(1-\theta) ^ {(1-y)}$$ 39 | 40 | two way of interpreting this 41 | - a __probability distrubution__: y a random variable, and $\theta$ a fixed parameter 42 | - a __likelihood function__ : y a fixed observation, and $\theta$ a random variable 43 | $$p(\left \{ y_{i} \right \}|\theta) = \prod_{i}p(y_{i}|\theta) \\=\prod_{i}\theta^{y_{i}}(1-\theta)^{(1-y_{i})}\\=\theta^{\sum_{i}y_{i}}(1-\theta)^{\sum_{i}(1-y_{i})} \\= \theta^{z}(1-\theta)^{N-z}$$ 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 2. Prior나타내기: Beta Distribution 52 | ------------------------- 53 | 54 | 55 | - interval (0,1) 사이의 값을 가지는 $\theta$대한 prior probability를 나타낼 수있는 식이 필요함 56 | - 이 챕터에서처럼 수학적으로 계산이 가능하려면: 57 | 다음 베이즈정리의 식에서 58 | $$p(\theta|y) = \frac{p(y|\theta)p(\theta)}{\int p(y|\theta)p(\theta)d\theta }$$ 59 | 60 | 1. 분자부분: 서로같은 형태의 prior $p(\theta)$와 likelihood인 $p(y|\theta)$가 곱해지면 같은 형태의 결과가 나올 것이다 61 | $p(\theta)$는 $p(y|\theta)$에 대한_conjugate prior_의 형태여야함 62 | 2. 분모부분: 적분 가능해야 함 63 | 64 | 65 | 다시 Bernoulli 분포 상황으로 돌아가서, 우리는 prior가 $\theta^{a}(1-\theta)^{b}$와 같이 생겼을 때, Bernoulli 형태의 likelihood를 곱하면, 다시 posterior 분포도 Bernoulli의 형태를띌 것이다. 66 | 이러한 특성을 가지는 분포가 Beta 분포! 67 | 68 | 69 | **Beta _distribution_** 70 | $$latex p(y|a,b) = beta(y|a,b) = \theta ^{(a-1)}(1-\theta) ^ {(b-1)}/B(a,b)$$ 71 | where $B(a,b)$ a normalizing constant 72 | 73 | **cf. Beta _function_** 74 | $$latex B(a,b) = \int_{0}^{1}\theta ^{(a-1)}(1-\theta) ^ {(b-1)}d\theta$$ where$0\leq\theta\leq1,\\ a>0\;and\;b>0$ 75 | 76 | 77 | compare with r function 78 | ```{r} 79 | x <- seq(0, 1, length = 21) 80 | dbeta(x, 2, 2) 81 | beta(2,2) 82 | ``` 83 | 84 | 85 | 86 | 87 | ### (1) beta prior 지정하기 88 | - $\theta$에 대한 우리의 믿음(belief) 반영하기 89 | - 동전던지기 상황을 다시 떠올려서... 90 | - 사전적으로 a=앞면의 개수 b=뒷면의 개수로 이루어진 총 n번의 시행이 있었다고 가정하자. 91 | 92 | 93 | beta distribution의특성 94 | 95 | mean: $\mu = a/(a+b)$ 96 | mode: $\omega = (a-1)/(a+b-2)$ 97 | for a>1 and b>1 98 | ![alt text](beta_dist.PNG) 99 | 100 | 101 | 102 | concentration 103 | $\kappa = a+b$ 104 | 105 | a and b, in terms of kappa, mean and mode 106 | $a=\mu\kappa\;\;and\;\;b=(1-\mu)\kappa\\a=\omega(\kappa-2)+1\;\;and\;\;b=(1-\omega)(\kappa-2)+1\;\;for\;\;\kappa>2$ 107 | 108 | shape parameters 109 | $a = \mu (\frac{\mu(1-\mu)}{\sigma^{2}})$ and $b = (1-\mu) (\frac{\mu(1-\mu)}{\sigma^{2}})$ 110 | 111 | ```{r} 112 | source("DBDA2E-utilities.R") 113 | betaABfromMeanKappa(mean=0.25,kappa=4) 114 | 115 | betaABfromModeKappa(mode=0.25,kappa=4) 116 | 117 | betaABfromMeanSD(mean=0.5,sd=0.1) 118 | 119 | betaParam = betaABfromModeKappa(mode=0.25,kappa=4) 120 | betaParam$a 121 | betaParam$b 122 | ``` 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 3. The Posterior Beta 136 | ------------------------- 137 | prior를 지정하였으면 Bayes Rule로 posterior가 어떠할지 파악해보자 138 | $N$번의 동전던지기 시행 중 $z$번의 앞면이 나왔다고 하면, 베이즈 정리에 따라 139 | 140 | $$p(\theta|z,N) = p(z,N|\theta)p(\theta)/p(z,N)\\=\theta^{z}(1-\theta)^{(N-z)}\frac{\theta^{(a-1)}(b-1)^{(b-1)}}{B(a,b)}/p(z,N)\\=\theta^{z}(1-\theta)^{(N-z)}\theta^{(a-1)}(b-1)^{(b-1)}/\left [ B(a,b)p(z,N)) \right ]\\=\theta^{((z+a)-1)}(1-\theta)^{((N-z+b)-1)}/\left [ B(a,b)p(z,N)) \right ]\\=\theta^{((z+a)-1)}(1-\theta)^{((N-z+b)-1)}/B(z+a,N-z+b)$$ 141 | 142 | 동전던지기의 예로 생각해보면: 143 | - prior는 beta($\theta$|1,1) 144 | - 동전을 던졌더니 앞면이 나왔다 145 | - posterior는 beta($\theta$|2,1) 146 | - 동전을 또 던졌더니 뒷면이 나왔다 147 | - 업데이트된 posterior는 beta($\theta$|2,2) 148 | ... 149 | prior가 beta distribution이면 prior도 항상 beta distribution임을 확인할 수 있다. 150 | 151 | 152 | ### (1) posterior distribution은 prior와 likelihood의 타협이다 153 | 동전의 예로 mean을 계산하는 경우를 생각해보면, 154 | $$\frac{z+a}{N+a+b} = \frac{z}{N}\frac{N}{N+a+b}+\frac{a}{a+b}\frac{a+b}{N+a+b}$$ 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 4. Example 166 | ------------------------- 167 | ### (1) 베타분포로 prior를 나타낼 수 있을 때 168 | <상황> 169 | - 갓만든 신선한 정상 동전 170 | - 나의 굳은 믿음으로 mode는 0.5이고, 유효한 샘플 수는 500이라고 가정한다 171 | - 20번 동전을 던져서 17번(85%) 앞면이나옴 172 | - 이 때, 앞면이 나올 확률의 기대값은? 173 | 174 | ```{r} 175 | source("DBDA2E-utilities.R") # Load definitions of graphics functions etc. 176 | source("BernBeta.R") # Load the definition of the BernBeta function 177 | 178 | # Specify the prior: 179 | t = 0.5 # Specify the prior MODE. 180 | n = 500 # Specify the effective prior sample size. 181 | a = t*(n-2) + 1 # Convert to beta shape parameter a. 182 | b = (1-t)*(n-2) + 1 # Convert to beta shape parameter b. 183 | 184 | Prior = c(a,b) # Specify Prior as vector with the two shape parameters. 185 | 186 | # Specify the data: 187 | N = 20 # The total number of flips. 188 | z = 17 # The number of heads. 189 | Data = c(rep(0,N-z),rep(1,z)) # Convert N and z into vector of 0's and 1's. 190 | 191 | openGraph(width=5,height=7) 192 | posterior = BernBeta( priorBetaAB=Prior, Data=Data , plotType="Bars" , 193 | showCentTend="Mode" , showHDI=TRUE , showpD=FALSE ) 194 | saveGraph(file="BernBetaExample",type="png") 195 | 196 | ``` 197 | 198 | 위의 예제를 활용해서 다양한 상황에 응용할 수 있다. 199 | 200 | ### (2) 베타분포로 prior를 나타낼 수 없는 경우 201 | <상황> 202 | - 이상한 회사에서 2가지 동전을 생산한다: 앞면이 나올 확률이 25%인 동전과, 앞면이 나올 확률이 75%인 동전 203 | - 이 때, prior distribution은 bimodal한 형태를 띠게 됨 204 | 205 | 206 | (다시 grid approxomation으로...) 207 | ```{r} 208 | source("BernBeta.R") 209 | source("BernGrid.R") 210 | 211 | Theta <- seq(0,1,length=1000) 212 | pTheta <- c(rep(1,200),seq(1,100,length=50),seq(100,1,length=50),rep(1,200), 213 | rep(1,200),seq(1,100,length=50),seq(100,1,length=50),rep(1,200)) 214 | pTheta <- pTheta/sum(pTheta) 215 | 216 | 217 | 218 | Data <- c(rep(0,13),rep(1,14)) 219 | posterior <- BernGrid(Theta, pTheta,Data,plotType="Bars", 220 | showCentTend="None", showHDI=FALSE, showpD=FALSE) 221 | ``` 222 | 223 | -------------------------------------------------------------------------------- /part1/ch06/figure/unnamed-chunk-2-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/figure/unnamed-chunk-2-1.png -------------------------------------------------------------------------------- /part1/ch06/figure/unnamed-chunk-3-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/figure/unnamed-chunk-3-1.png -------------------------------------------------------------------------------- /part1/ch06/figure/unnamed-chunk-4-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/figure/unnamed-chunk-4-1.png -------------------------------------------------------------------------------- /part1/ch06/figure/unnamed-chunk-5-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part1/ch06/figure/unnamed-chunk-5-1.png -------------------------------------------------------------------------------- /part2/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/.DS_Store -------------------------------------------------------------------------------- /part2/ch08/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/.DS_Store -------------------------------------------------------------------------------- /part2/ch08/Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/Mcmc.Rdata -------------------------------------------------------------------------------- /part2/ch08/data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/data/.DS_Store -------------------------------------------------------------------------------- /part2/ch08/data/Exercise.08.1.csv: -------------------------------------------------------------------------------- 1 | "y","s" 2 | 1,"A" 3 | 1,"A" 4 | 1,"A" 5 | 1,"A" 6 | 1,"A" 7 | 1,"A" 8 | 1,"A" 9 | 1,"A" 10 | 1,"A" 11 | 0,"A" 12 | 0,"A" 13 | 0,"A" 14 | 1,"B" 15 | 1,"B" 16 | 1,"B" 17 | 1,"B" 18 | 1,"B" 19 | 1,"B" 20 | 1,"B" 21 | 1,"B" 22 | 1,"B" 23 | 1,"B" 24 | 1,"B" 25 | 1,"B" 26 | 1,"B" 27 | 1,"B" 28 | 1,"B" 29 | 1,"B" 30 | 1,"B" 31 | 1,"B" 32 | 1,"B" 33 | 1,"B" 34 | 1,"B" 35 | 1,"B" 36 | 1,"B" 37 | 1,"B" 38 | 1,"B" 39 | 1,"B" 40 | 1,"B" 41 | 1,"B" 42 | 1,"B" 43 | 1,"B" 44 | 1,"B" 45 | 1,"B" 46 | 1,"B" 47 | 1,"B" 48 | 1,"B" 49 | 1,"B" 50 | 1,"B" 51 | 1,"B" 52 | 1,"B" 53 | 1,"B" 54 | 1,"B" 55 | 1,"B" 56 | 1,"B" 57 | 1,"B" 58 | 1,"B" 59 | 0,"B" 60 | 0,"B" 61 | 0,"B" 62 | 0,"B" 63 | 0,"B" 64 | 0,"B" 65 | 0,"B" 66 | 0,"B" 67 | 0,"B" 68 | 0,"B" 69 | 0,"B" 70 | 0,"B" 71 | 0,"B" 72 | 0,"B" 73 | 0,"B" 74 | 1,"C" 75 | 1,"C" 76 | 1,"C" 77 | 0,"C" 78 | 0,"C" 79 | 0,"C" 80 | 0,"C" 81 | 0,"C" 82 | 0,"C" 83 | 0,"C" 84 | 0,"C" 85 | 0,"C" 86 | -------------------------------------------------------------------------------- /part2/ch08/data/Exercise.08.1Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/data/Exercise.08.1Mcmc.Rdata -------------------------------------------------------------------------------- /part2/ch08/data/Exercise.08.1SummaryInfo.csv: -------------------------------------------------------------------------------- 1 | "","Mean","Median","Mode","ESS","HDImass","HDIlow","HDIhigh","CompVal","PcntGtCompVal","ROPElow","ROPEhigh","PcntLtROPE","PcntInROPE","PcntGtROPE" 2 | "theta[1]",0.687448291347434,0.695113681104833,0.702414530008484,51257.7,0.95,0.468208918302519,0.892896940592527,0.5,94.176,0.45,0.55,2.5,9.51600000000001,87.984 3 | "theta[2]",0.734423209302432,0.736960287859088,0.741848256388242,50000,0.95,0.626300709888549,0.838877797656118,0.5,99.99,0.45,0.55,0,0.103999999999999,99.896 4 | "theta[3]",0.31296252597498,0.304848670409325,0.284305471968959,50000,0.95,0.103484538860023,0.531505596472017,0.5,5.912,0.45,0.55,87.824,9.524,2.652 5 | "theta[1]-theta[2]",-0.0469749179549979,-0.0411927568663351,-0.0385905027070802,50000,0.95,-0.29264803913855,0.18999514127024,0,37.18,-0.05,0.05,47.168,29.99,22.842 6 | "theta[1]-theta[3]",0.374485765372453,0.382151683700244,0.406627554094407,50786.7,0.95,0.0565541798717452,0.670298239815757,0,98.624,-0.05,0.05,0.632,2.012,97.356 7 | "theta[2]-theta[3]",0.421460683327451,0.428099953111193,0.441586019252548,50000,0.95,0.173712140632724,0.655538168037337,0,99.914,-0.05,0.05,0.018,0.233999999999995,99.748 8 | -------------------------------------------------------------------------------- /part2/ch08/data/Jags-Ydich-XnomSsubj-MbernBeta-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Jags-Ydich-XnomSsubj-Mbernbeta.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data 8 | myData = read.csv("z6N8z2N7.csv") 9 | # N.B.: The functions below expect the data to be a data frame, 10 | # with one component named y being a vector of integer 0,1 values, 11 | # and one component named s being a factor of subject identifiers. 12 | #------------------------------------------------------------------------------- 13 | # Load the relevant model into R's working memory: 14 | source("Jags-Ydich-XnomSsubj-MbernBeta-C.R") 15 | #------------------------------------------------------------------------------- 16 | # Optional: Specify filename root and graphical format for saving output. 17 | # Otherwise specify as NULL or leave saveName and saveType arguments 18 | # out of function calls. 19 | fileNameRoot = "Jags-Ydich-XnomSsubj-MbernBeta-" 20 | graphFileType = "eps" 21 | #------------------------------------------------------------------------------- 22 | # Generate the MCMC chain: 23 | mcmcCoda = genMCMC( data=myData , numSavedSteps=50000 , saveName=fileNameRoot ) 24 | #------------------------------------------------------------------------------- 25 | # Display diagnostics of chain, for specified parameters: 26 | parameterNames = varnames(mcmcCoda) # get all parameter names 27 | for ( parName in parameterNames ) { 28 | diagMCMC( codaObject=mcmcCoda , parName=parName , 29 | saveName=fileNameRoot , saveType=graphFileType ) 30 | } 31 | #------------------------------------------------------------------------------- 32 | # Get summary statistics of chain: 33 | summaryInfo = smryMCMC( mcmcCoda , compVal=NULL , #rope=c(0.45,0.55) , 34 | compValDiff=0.0 , #ropeDiff = c(-0.05,0.05) , 35 | saveName=fileNameRoot ) 36 | # Display posterior information: 37 | plotMCMC( mcmcCoda , data=myData , compVal=NULL , #rope=c(0.45,0.55) , 38 | compValDiff=0.0 , #ropeDiff = c(-0.05,0.05) , 39 | saveName=fileNameRoot , saveType=graphFileType ) 40 | #------------------------------------------------------------------------------- 41 | -------------------------------------------------------------------------------- /part2/ch08/data/Jags-Ydich-XnomSsubj-MbernBeta-Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/data/Jags-Ydich-XnomSsubj-MbernBeta-Mcmc.Rdata -------------------------------------------------------------------------------- /part2/ch08/data/Jags-Ydich-XnomSsubj-MbernBeta-SummaryInfo.csv: -------------------------------------------------------------------------------- 1 | "","Mean","Median","Mode","ESS","HDImass","HDIlow","HDIhigh","CompVal","PcntGtCompVal","ROPElow","ROPEhigh","PcntLtROPE","PcntInROPE","PcntGtROPE" 2 | "theta[1]",0.498332625207085,0.497555094713156,0.0285446066960012,50000,0.95,1.01154187460145e-09,0.993952552815069,NA,NA,NA,NA,NA,NA,NA 3 | "theta[2]",0.499357426815296,0.499681901057481,0.0287558325126463,50000,0.95,7.43031999559052e-11,0.993805901443364,NA,NA,NA,NA,NA,NA,NA 4 | "theta[1]-theta[2]",-0.0010248016082111,-0.00117974992640105,-0.00223624354669671,50000,0.95,-0.897692251589427,0.949009192118228,0,49.798,NA,NA,NA,NA,NA 5 | -------------------------------------------------------------------------------- /part2/ch08/data/TEMPmodel.txt: -------------------------------------------------------------------------------- 1 | 2 | model { 3 | for ( i in 1:Ntotal ) { 4 | y[i] ~ dbern( theta[s[i]] ) 5 | } 6 | for ( s in 1:Nsubj ) { 7 | theta[s] ~ dbeta( 2 , 2 ) # N.B.: 2,2 prior; change as appropriate. 8 | } 9 | } 10 | 11 | -------------------------------------------------------------------------------- /part2/ch08/data/ThetaDiag.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%DocumentNeededResources: font Helvetica 3 | %%+ font Helvetica-Bold 4 | %%+ font Helvetica-Oblique 5 | %%+ font Helvetica-BoldOblique 6 | %%+ font Symbol 7 | %%Title: R Graphics Output 8 | %%Creator: R Software 9 | %%Pages: (atend) 10 | %%BoundingBox: 0 0 504 504 11 | %%EndComments 12 | %%BeginProlog 13 | /bp { gs sRGB gs } def 14 | % begin .ps.prolog 15 | /gs { gsave } bind def 16 | /gr { grestore } bind def 17 | /ep { showpage gr gr } bind def 18 | /m { moveto } bind def 19 | /l { rlineto } bind def 20 | /np { newpath } bind def 21 | /cp { closepath } bind def 22 | /f { fill } bind def 23 | /o { stroke } bind def 24 | /c { newpath 0 360 arc } bind def 25 | /r { 4 2 roll moveto 1 copy 3 -1 roll exch 0 exch rlineto 0 rlineto -1 mul 0 exch rlineto closepath } bind def 26 | /p1 { stroke } bind def 27 | /p2 { gsave bg fill grestore newpath } bind def 28 | /p3 { gsave bg fill grestore stroke } bind def 29 | /p6 { gsave bg eofill grestore newpath } bind def 30 | /p7 { gsave bg eofill grestore stroke } bind def 31 | /t { 5 -2 roll moveto gsave rotate 32 | 1 index stringwidth pop 33 | mul neg 0 rmoveto show grestore } bind def 34 | /ta { 4 -2 roll moveto gsave rotate show } bind def 35 | /tb { 2 -1 roll 0 rmoveto show } bind def 36 | /cl { grestore gsave newpath 3 index 3 index moveto 1 index 37 | 4 -1 roll lineto exch 1 index lineto lineto 38 | closepath clip newpath } bind def 39 | /rgb { setrgbcolor } bind def 40 | /s { scalefont setfont } bind def 41 | % end .ps.prolog 42 | /sRGB { [ /CIEBasedABC 43 | << /DecodeLMN 44 | [ { dup 0.03928 le 45 | {12.92321 div} 46 | {0.055 add 1.055 div 2.4 exp } 47 | ifelse 48 | } bind dup dup 49 | ] 50 | /MatrixLMN [0.412457 0.212673 0.019334 51 | 0.357576 0.715152 0.119192 52 | 0.180437 0.072175 0.950301] 53 | /WhitePoint [0.9505 1.0 1.0890] 54 | >> 55 | ] setcolorspace } bind def 56 | /srgb { setcolor } bind def 57 | %%IncludeResource: font Helvetica 58 | /Helvetica findfont 59 | dup length dict begin 60 | {1 index /FID ne {def} {pop pop} ifelse} forall 61 | /Encoding ISOLatin1Encoding def 62 | currentdict 63 | end 64 | /Font1 exch definefont pop 65 | %%IncludeResource: font Helvetica-Bold 66 | /Helvetica-Bold findfont 67 | dup length dict begin 68 | {1 index /FID ne {def} {pop pop} ifelse} forall 69 | /Encoding ISOLatin1Encoding def 70 | currentdict 71 | end 72 | /Font2 exch definefont pop 73 | %%IncludeResource: font Helvetica-Oblique 74 | /Helvetica-Oblique findfont 75 | dup length dict begin 76 | {1 index /FID ne {def} {pop pop} ifelse} forall 77 | /Encoding ISOLatin1Encoding def 78 | currentdict 79 | end 80 | /Font3 exch definefont pop 81 | %%IncludeResource: font Helvetica-BoldOblique 82 | /Helvetica-BoldOblique findfont 83 | dup length dict begin 84 | {1 index /FID ne {def} {pop pop} ifelse} forall 85 | /Encoding ISOLatin1Encoding def 86 | currentdict 87 | end 88 | /Font4 exch definefont pop 89 | %%IncludeResource: font Symbol 90 | /Symbol findfont 91 | dup length dict begin 92 | {1 index /FID ne {def} {pop pop} ifelse} forall 93 | currentdict 94 | end 95 | /Font5 exch definefont pop 96 | %%EndProlog 97 | ep 98 | %%Trailer 99 | %%Pages: 0 100 | %%EOF 101 | -------------------------------------------------------------------------------- /part2/ch08/data/z15N50.csv: -------------------------------------------------------------------------------- 1 | "y" 2 | 0 3 | 1 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 1 13 | 0 14 | 0 15 | 0 16 | 1 17 | 1 18 | 1 19 | 0 20 | 0 21 | 1 22 | 0 23 | 0 24 | 0 25 | 0 26 | 1 27 | 1 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 1 34 | 0 35 | 1 36 | 0 37 | 1 38 | 0 39 | 0 40 | 0 41 | 1 42 | 0 43 | 0 44 | 1 45 | 1 46 | 0 47 | 1 48 | 0 49 | 0 50 | 0 51 | 0 52 | -------------------------------------------------------------------------------- /part2/ch08/data/z6N8z2N7.csv: -------------------------------------------------------------------------------- 1 | "y","s" 2 | "1","Reginald" 3 | "0","Reginald" 4 | "1","Reginald" 5 | "1","Reginald" 6 | "1","Reginald" 7 | "1","Reginald" 8 | "1","Reginald" 9 | "0","Reginald" 10 | "0","Tony" 11 | "0","Tony" 12 | "1","Tony" 13 | "0","Tony" 14 | "0","Tony" 15 | "1","Tony" 16 | "0","Tony" 17 | -------------------------------------------------------------------------------- /part2/ch08/figures/eq8.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/eq8.1.png -------------------------------------------------------------------------------- /part2/ch08/figures/eq8.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/eq8.2.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.1.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.2.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.3.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.4.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.5.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.6.png -------------------------------------------------------------------------------- /part2/ch08/figures/fig8.7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch08/figures/fig8.7.png -------------------------------------------------------------------------------- /part2/ch09/TEMPmodel.txt: -------------------------------------------------------------------------------- 1 | 2 | model { 3 | for ( s in 1:Nsubj ) { 4 | z[s] ~ dbin( theta[s] , N[s] ) 5 | theta[s] ~ dbeta( omega[c[s]]*(kappa[c[s]]-2)+1 , 6 | (1-omega[c[s]])*(kappa[c[s]]-2)+1 ) 7 | } 8 | for ( c in 1:Ncat ) { 9 | omega[c] ~ dbeta( omegaO*(kappaO-2)+1 , 10 | (1-omegaO)*(kappaO-2)+1 ) 11 | kappa[c] <- kappaMinusTwo[c] + 2 12 | kappaMinusTwo[c] ~ dgamma( 0.01 , 0.01 ) # mean=1 , sd=10 (generic vague) 13 | } 14 | omegaO ~ dbeta( 1.0 , 1.0 ) 15 | #omegaO ~ dbeta( 1.025 , 1.075 ) # mode=0.25 , concentration=2.1 16 | kappaO <- kappaMinusTwoO + 2 17 | kappaMinusTwoO ~ dgamma( 0.01 , 0.01 ) # mean=1 , sd=10 (generic vague) 18 | #kappaMinusTwoO ~ dgamma( 1.01005 , 0.01005012 ) # mode=1 , sd=100 19 | #kappaMinusTwoO ~ dgamma( 1.105125 , 0.1051249 ) # mode=1 , sd=10 20 | #kappaMinusTwoO ~ dgamma( 1.105125 , 0.01051249 ) # mode=10 , sd=100 21 | } 22 | 23 | -------------------------------------------------------------------------------- /part2/ch09/TherapeuticTouchData.csv: -------------------------------------------------------------------------------- 1 | "y","s" 2 | "1","S01" 3 | "0","S01" 4 | "0","S01" 5 | "0","S01" 6 | "0","S01" 7 | "0","S01" 8 | "0","S01" 9 | "0","S01" 10 | "0","S01" 11 | "0","S01" 12 | "0","S02" 13 | "0","S02" 14 | "0","S02" 15 | "1","S02" 16 | "0","S02" 17 | "0","S02" 18 | "1","S02" 19 | "0","S02" 20 | "0","S02" 21 | "0","S02" 22 | "0","S03" 23 | "0","S03" 24 | "0","S03" 25 | "0","S03" 26 | "0","S03" 27 | "1","S03" 28 | "0","S03" 29 | "0","S03" 30 | "1","S03" 31 | "1","S03" 32 | "0","S04" 33 | "1","S04" 34 | "1","S04" 35 | "0","S04" 36 | "1","S04" 37 | "0","S04" 38 | "0","S04" 39 | "0","S04" 40 | "0","S04" 41 | "0","S04" 42 | "0","S05" 43 | "1","S05" 44 | "0","S05" 45 | "0","S05" 46 | "0","S05" 47 | "1","S05" 48 | "0","S05" 49 | "0","S05" 50 | "1","S05" 51 | "0","S05" 52 | "0","S06" 53 | "0","S06" 54 | "0","S06" 55 | "1","S06" 56 | "1","S06" 57 | "0","S06" 58 | "0","S06" 59 | "0","S06" 60 | "1","S06" 61 | "0","S06" 62 | "0","S07" 63 | "0","S07" 64 | "1","S07" 65 | "0","S07" 66 | "0","S07" 67 | "1","S07" 68 | "1","S07" 69 | "0","S07" 70 | "0","S07" 71 | "0","S07" 72 | "0","S08" 73 | "1","S08" 74 | "1","S08" 75 | "0","S08" 76 | "0","S08" 77 | "1","S08" 78 | "0","S08" 79 | "0","S08" 80 | "0","S08" 81 | "0","S08" 82 | "1","S09" 83 | "0","S09" 84 | "1","S09" 85 | "0","S09" 86 | "0","S09" 87 | "0","S09" 88 | "0","S09" 89 | "0","S09" 90 | "1","S09" 91 | "0","S09" 92 | "0","S10" 93 | "1","S10" 94 | "0","S10" 95 | "0","S10" 96 | "0","S10" 97 | "0","S10" 98 | "1","S10" 99 | "0","S10" 100 | "1","S10" 101 | "0","S10" 102 | "0","S11" 103 | "1","S11" 104 | "0","S11" 105 | "1","S11" 106 | "0","S11" 107 | "0","S11" 108 | "0","S11" 109 | "1","S11" 110 | "1","S11" 111 | "0","S11" 112 | "0","S12" 113 | "1","S12" 114 | "0","S12" 115 | "0","S12" 116 | "0","S12" 117 | "0","S12" 118 | "0","S12" 119 | "1","S12" 120 | "1","S12" 121 | "1","S12" 122 | "1","S13" 123 | "0","S13" 124 | "1","S13" 125 | "1","S13" 126 | "1","S13" 127 | "0","S13" 128 | "0","S13" 129 | "0","S13" 130 | "0","S13" 131 | "0","S13" 132 | "0","S14" 133 | "0","S14" 134 | "0","S14" 135 | "0","S14" 136 | "0","S14" 137 | "0","S14" 138 | "1","S14" 139 | "1","S14" 140 | "1","S14" 141 | "1","S14" 142 | "1","S15" 143 | "0","S15" 144 | "0","S15" 145 | "1","S15" 146 | "0","S15" 147 | "0","S15" 148 | "0","S15" 149 | "1","S15" 150 | "1","S15" 151 | "0","S15" 152 | "0","S16" 153 | "1","S16" 154 | "1","S16" 155 | "1","S16" 156 | "0","S16" 157 | "0","S16" 158 | "0","S16" 159 | "1","S16" 160 | "0","S16" 161 | "1","S16" 162 | "0","S17" 163 | "1","S17" 164 | "1","S17" 165 | "0","S17" 166 | "1","S17" 167 | "0","S17" 168 | "0","S17" 169 | "1","S17" 170 | "0","S17" 171 | "1","S17" 172 | "1","S18" 173 | "1","S18" 174 | "0","S18" 175 | "1","S18" 176 | "1","S18" 177 | "0","S18" 178 | "1","S18" 179 | "0","S18" 180 | "0","S18" 181 | "0","S18" 182 | "1","S19" 183 | "0","S19" 184 | "1","S19" 185 | "1","S19" 186 | "1","S19" 187 | "1","S19" 188 | "0","S19" 189 | "0","S19" 190 | "0","S19" 191 | "0","S19" 192 | "0","S20" 193 | "0","S20" 194 | "1","S20" 195 | "0","S20" 196 | "1","S20" 197 | "1","S20" 198 | "1","S20" 199 | "0","S20" 200 | "0","S20" 201 | "1","S20" 202 | "1","S21" 203 | "0","S21" 204 | "0","S21" 205 | "1","S21" 206 | "1","S21" 207 | "1","S21" 208 | "0","S21" 209 | "0","S21" 210 | "1","S21" 211 | "0","S21" 212 | "0","S22" 213 | "1","S22" 214 | "1","S22" 215 | "0","S22" 216 | "0","S22" 217 | "1","S22" 218 | "0","S22" 219 | "1","S22" 220 | "1","S22" 221 | "0","S22" 222 | "1","S23" 223 | "1","S23" 224 | "1","S23" 225 | "0","S23" 226 | "0","S23" 227 | "0","S23" 228 | "1","S23" 229 | "1","S23" 230 | "1","S23" 231 | "0","S23" 232 | "1","S24" 233 | "0","S24" 234 | "0","S24" 235 | "1","S24" 236 | "1","S24" 237 | "1","S24" 238 | "0","S24" 239 | "0","S24" 240 | "1","S24" 241 | "1","S24" 242 | "1","S25" 243 | "0","S25" 244 | "0","S25" 245 | "1","S25" 246 | "0","S25" 247 | "1","S25" 248 | "1","S25" 249 | "1","S25" 250 | "1","S25" 251 | "1","S25" 252 | "1","S26" 253 | "1","S26" 254 | "1","S26" 255 | "1","S26" 256 | "1","S26" 257 | "0","S26" 258 | "1","S26" 259 | "0","S26" 260 | "0","S26" 261 | "1","S26" 262 | "0","S27" 263 | "0","S27" 264 | "1","S27" 265 | "1","S27" 266 | "1","S27" 267 | "0","S27" 268 | "1","S27" 269 | "1","S27" 270 | "1","S27" 271 | "1","S27" 272 | "1","S28" 273 | "1","S28" 274 | "1","S28" 275 | "1","S28" 276 | "0","S28" 277 | "1","S28" 278 | "1","S28" 279 | "1","S28" 280 | "0","S28" 281 | "1","S28" 282 | -------------------------------------------------------------------------------- /part2/ch09/data/Jags-Ydich-XnomSsubj-MbernBeta-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Jags-Ydich-XnomSsubj-Mbernbeta.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data 8 | myData = read.csv("z6N8z2N7.csv") 9 | # N.B.: The functions below expect the data to be a data frame, 10 | # with one component named y being a vector of integer 0,1 values, 11 | # and one component named s being a factor of subject identifiers. 12 | #------------------------------------------------------------------------------- 13 | # Load the relevant model into R's working memory: 14 | source("Jags-Ydich-XnomSsubj-MbernBeta-C.R") 15 | #------------------------------------------------------------------------------- 16 | # Optional: Specify filename root and graphical format for saving output. 17 | # Otherwise specify as NULL or leave saveName and saveType arguments 18 | # out of function calls. 19 | fileNameRoot = "Jags-Ydich-XnomSsubj-MbernBeta-" 20 | graphFileType = "eps" 21 | #------------------------------------------------------------------------------- 22 | # Generate the MCMC chain: 23 | mcmcCoda = genMCMC( data=myData , numSavedSteps=50000 , saveName=fileNameRoot ) 24 | #------------------------------------------------------------------------------- 25 | # Display diagnostics of chain, for specified parameters: 26 | parameterNames = varnames(mcmcCoda) # get all parameter names 27 | for ( parName in parameterNames ) { 28 | diagMCMC( codaObject=mcmcCoda , parName=parName , 29 | saveName=fileNameRoot , saveType=graphFileType ) 30 | } 31 | #------------------------------------------------------------------------------- 32 | # Get summary statistics of chain: 33 | summaryInfo = smryMCMC( mcmcCoda , compVal=NULL , #rope=c(0.45,0.55) , 34 | compValDiff=0.0 , #ropeDiff = c(-0.05,0.05) , 35 | saveName=fileNameRoot ) 36 | # Display posterior information: 37 | plotMCMC( mcmcCoda , data=myData , compVal=NULL , #rope=c(0.45,0.55) , 38 | compValDiff=0.0 , #ropeDiff = c(-0.05,0.05) , 39 | saveName=fileNameRoot , saveType=graphFileType ) 40 | #------------------------------------------------------------------------------- 41 | -------------------------------------------------------------------------------- /part2/ch09/data/Jags-Ydich-XnomSsubj-MbernBeta-Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/data/Jags-Ydich-XnomSsubj-MbernBeta-Mcmc.Rdata -------------------------------------------------------------------------------- /part2/ch09/data/Jags-Ydich-XnomSsubj-MbernBeta-SummaryInfo.csv: -------------------------------------------------------------------------------- 1 | "","Mean","Median","Mode","ESS","HDImass","HDIlow","HDIhigh","CompVal","PcntGtCompVal","ROPElow","ROPEhigh","PcntLtROPE","PcntInROPE","PcntGtROPE" 2 | "theta[1]",0.498332625207085,0.497555094713156,0.0285446066960012,50000,0.95,1.01154187460145e-09,0.993952552815069,NA,NA,NA,NA,NA,NA,NA 3 | "theta[2]",0.499357426815296,0.499681901057481,0.0287558325126463,50000,0.95,7.43031999559052e-11,0.993805901443364,NA,NA,NA,NA,NA,NA,NA 4 | "theta[1]-theta[2]",-0.0010248016082111,-0.00117974992640105,-0.00223624354669671,50000,0.95,-0.897692251589427,0.949009192118228,0,49.798,NA,NA,NA,NA,NA 5 | -------------------------------------------------------------------------------- /part2/ch09/data/ThetaDiag.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%DocumentNeededResources: font Helvetica 3 | %%+ font Helvetica-Bold 4 | %%+ font Helvetica-Oblique 5 | %%+ font Helvetica-BoldOblique 6 | %%+ font Symbol 7 | %%Title: R Graphics Output 8 | %%Creator: R Software 9 | %%Pages: (atend) 10 | %%BoundingBox: 0 0 504 504 11 | %%EndComments 12 | %%BeginProlog 13 | /bp { gs sRGB gs } def 14 | % begin .ps.prolog 15 | /gs { gsave } bind def 16 | /gr { grestore } bind def 17 | /ep { showpage gr gr } bind def 18 | /m { moveto } bind def 19 | /l { rlineto } bind def 20 | /np { newpath } bind def 21 | /cp { closepath } bind def 22 | /f { fill } bind def 23 | /o { stroke } bind def 24 | /c { newpath 0 360 arc } bind def 25 | /r { 4 2 roll moveto 1 copy 3 -1 roll exch 0 exch rlineto 0 rlineto -1 mul 0 exch rlineto closepath } bind def 26 | /p1 { stroke } bind def 27 | /p2 { gsave bg fill grestore newpath } bind def 28 | /p3 { gsave bg fill grestore stroke } bind def 29 | /p6 { gsave bg eofill grestore newpath } bind def 30 | /p7 { gsave bg eofill grestore stroke } bind def 31 | /t { 5 -2 roll moveto gsave rotate 32 | 1 index stringwidth pop 33 | mul neg 0 rmoveto show grestore } bind def 34 | /ta { 4 -2 roll moveto gsave rotate show } bind def 35 | /tb { 2 -1 roll 0 rmoveto show } bind def 36 | /cl { grestore gsave newpath 3 index 3 index moveto 1 index 37 | 4 -1 roll lineto exch 1 index lineto lineto 38 | closepath clip newpath } bind def 39 | /rgb { setrgbcolor } bind def 40 | /s { scalefont setfont } bind def 41 | % end .ps.prolog 42 | /sRGB { [ /CIEBasedABC 43 | << /DecodeLMN 44 | [ { dup 0.03928 le 45 | {12.92321 div} 46 | {0.055 add 1.055 div 2.4 exp } 47 | ifelse 48 | } bind dup dup 49 | ] 50 | /MatrixLMN [0.412457 0.212673 0.019334 51 | 0.357576 0.715152 0.119192 52 | 0.180437 0.072175 0.950301] 53 | /WhitePoint [0.9505 1.0 1.0890] 54 | >> 55 | ] setcolorspace } bind def 56 | /srgb { setcolor } bind def 57 | %%IncludeResource: font Helvetica 58 | /Helvetica findfont 59 | dup length dict begin 60 | {1 index /FID ne {def} {pop pop} ifelse} forall 61 | /Encoding ISOLatin1Encoding def 62 | currentdict 63 | end 64 | /Font1 exch definefont pop 65 | %%IncludeResource: font Helvetica-Bold 66 | /Helvetica-Bold findfont 67 | dup length dict begin 68 | {1 index /FID ne {def} {pop pop} ifelse} forall 69 | /Encoding ISOLatin1Encoding def 70 | currentdict 71 | end 72 | /Font2 exch definefont pop 73 | %%IncludeResource: font Helvetica-Oblique 74 | /Helvetica-Oblique findfont 75 | dup length dict begin 76 | {1 index /FID ne {def} {pop pop} ifelse} forall 77 | /Encoding ISOLatin1Encoding def 78 | currentdict 79 | end 80 | /Font3 exch definefont pop 81 | %%IncludeResource: font Helvetica-BoldOblique 82 | /Helvetica-BoldOblique findfont 83 | dup length dict begin 84 | {1 index /FID ne {def} {pop pop} ifelse} forall 85 | /Encoding ISOLatin1Encoding def 86 | currentdict 87 | end 88 | /Font4 exch definefont pop 89 | %%IncludeResource: font Symbol 90 | /Symbol findfont 91 | dup length dict begin 92 | {1 index /FID ne {def} {pop pop} ifelse} forall 93 | currentdict 94 | end 95 | /Font5 exch definefont pop 96 | %%EndProlog 97 | ep 98 | %%Trailer 99 | %%Pages: 0 100 | %%EOF 101 | -------------------------------------------------------------------------------- /part2/ch09/data/z15N50.csv: -------------------------------------------------------------------------------- 1 | "y" 2 | 0 3 | 1 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 1 13 | 0 14 | 0 15 | 0 16 | 1 17 | 1 18 | 1 19 | 0 20 | 0 21 | 1 22 | 0 23 | 0 24 | 0 25 | 0 26 | 1 27 | 1 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 1 34 | 0 35 | 1 36 | 0 37 | 1 38 | 0 39 | 0 40 | 0 41 | 1 42 | 0 43 | 0 44 | 1 45 | 1 46 | 0 47 | 1 48 | 0 49 | 0 50 | 0 51 | 0 52 | -------------------------------------------------------------------------------- /part2/ch09/data/z6N8z2N7.csv: -------------------------------------------------------------------------------- 1 | "y","s" 2 | "1","Reginald" 3 | "0","Reginald" 4 | "1","Reginald" 5 | "1","Reginald" 6 | "1","Reginald" 7 | "1","Reginald" 8 | "1","Reginald" 9 | "0","Reginald" 10 | "0","Tony" 11 | "0","Tony" 12 | "1","Tony" 13 | "0","Tony" 14 | "0","Tony" 15 | "1","Tony" 16 | "0","Tony" 17 | -------------------------------------------------------------------------------- /part2/ch09/img/fig9.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.1.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.10.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.11.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.12.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.13.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.14.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.15.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.15.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.16.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.17.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.17.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.2.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.3.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.4.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.5.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.6.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.7.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.8.png -------------------------------------------------------------------------------- /part2/ch09/img/fig9.9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/img/fig9.9.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.1.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.10.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.11.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.12.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.13.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.14.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.15.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.15.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.16.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.2.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.3.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.4.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.5.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.6.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.7.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.8.png -------------------------------------------------------------------------------- /part2/ch09/outputs/output9.9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09/outputs/output9.9.png -------------------------------------------------------------------------------- /part2/ch09_sp/bayesian_word/.ipynb_checkpoints/Untitled-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from nltk.corpus import wordnet as wn" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 2, 17 | "metadata": { 18 | "collapsed": false 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "word_vec = ['dump_truck', 'truck', 'motor_vehicle']" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 3, 28 | "metadata": { 29 | "collapsed": false 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "def len_hyponyms(word):\n", 34 | " print(len(set([s for s in wn.synsets(word)[0].closure(lambda s:s.hyponyms())])) + 1)" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 4, 40 | "metadata": { 41 | "collapsed": false 42 | }, 43 | "outputs": [ 44 | { 45 | "name": "stdout", 46 | "output_type": "stream", 47 | "text": [ 48 | "1\n", 49 | "24\n", 50 | "78\n" 51 | ] 52 | } 53 | ], 54 | "source": [ 55 | "for word in word_vec:\n", 56 | " len_hyponyms(word)" 57 | ] 58 | } 59 | ], 60 | "metadata": { 61 | "kernelspec": { 62 | "display_name": "Python 2", 63 | "language": "python", 64 | "name": "python2" 65 | }, 66 | "language_info": { 67 | "codemirror_mode": { 68 | "name": "ipython", 69 | "version": 2 70 | }, 71 | "file_extension": ".py", 72 | "mimetype": "text/x-python", 73 | "name": "python", 74 | "nbconvert_exporter": "python", 75 | "pygments_lexer": "ipython2", 76 | "version": "2.7.6" 77 | } 78 | }, 79 | "nbformat": 4, 80 | "nbformat_minor": 0 81 | } 82 | -------------------------------------------------------------------------------- /part2/ch09_sp/bayesian_word/bayseian_word.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/bayesian_word/bayseian_word.pdf -------------------------------------------------------------------------------- /part2/ch09_sp/bayesian_word/bayseian_word.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/bayesian_word/bayseian_word.pptx -------------------------------------------------------------------------------- /part2/ch09_sp/bayesian_word/rt.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/bayesian_word/rt.jpg -------------------------------------------------------------------------------- /part2/ch09_sp/bayesian_word/wordnet_len.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from nltk.corpus import wordnet as wn" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 2, 17 | "metadata": { 18 | "collapsed": false 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "word_vec = ['dump_truck', 'truck', 'motor_vehicle']" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 3, 28 | "metadata": { 29 | "collapsed": false 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "def len_hyponyms(word):\n", 34 | " print(len(set([s for s in wn.synsets(word)[0].closure(lambda s:s.hyponyms())])) + 1)" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 4, 40 | "metadata": { 41 | "collapsed": false 42 | }, 43 | "outputs": [ 44 | { 45 | "name": "stdout", 46 | "output_type": "stream", 47 | "text": [ 48 | "1\n", 49 | "24\n", 50 | "78\n" 51 | ] 52 | } 53 | ], 54 | "source": [ 55 | "for word in word_vec:\n", 56 | " len_hyponyms(word)" 57 | ] 58 | } 59 | ], 60 | "metadata": { 61 | "kernelspec": { 62 | "display_name": "Python 2", 63 | "language": "python", 64 | "name": "python2" 65 | }, 66 | "language_info": { 67 | "codemirror_mode": { 68 | "name": "ipython", 69 | "version": 2 70 | }, 71 | "file_extension": ".py", 72 | "mimetype": "text/x-python", 73 | "name": "python", 74 | "nbconvert_exporter": "python", 75 | "pygments_lexer": "ipython2", 76 | "version": "2.7.6" 77 | } 78 | }, 79 | "nbformat": 4, 80 | "nbformat_minor": 0 81 | } 82 | -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/A_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/A_kr.xlsx -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/B_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/B_kr.xlsx -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/C_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/C_kr.xlsx -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/D_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/D_kr.xlsx -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/E_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/E_kr.xlsx -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/G_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/G_kr.xlsx -------------------------------------------------------------------------------- /part2/ch09_sp/deeplearning/data/wips/H_kr.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch09_sp/deeplearning/data/wips/H_kr.xlsx -------------------------------------------------------------------------------- /part2/ch10/Jags-Ydich-Xnom1subj-MbernBeta.R: -------------------------------------------------------------------------------- 1 | # Jags-Ydich-Xnom1subj-Mbernbeta.R 2 | # Accompanies the book: 3 | # Kruschke, J. K. (2014). Doing Bayesian Data Analysis: 4 | # A Tutorial with R, JAGS, and Stan. 2nd Edition. Academic Press / Elsevier. 5 | source("DBDA2E-utilities.R") 6 | #=============================================================================== 7 | 8 | genMCMC = function( data , numSavedSteps=50000 , saveName=NULL ) { 9 | require(rjags) 10 | #----------------------------------------------------------------------------- 11 | # THE DATA. 12 | if ( class(data)=="data.frame" ) { # If data is a data.frame 13 | y = myData$y # then pull out the column named y 14 | } else { # else 15 | y = data # rename the data as y. 16 | } 17 | # Do some checking that data make sense: 18 | if ( any( y!=0 & y!=1 ) ) { stop("All y values must be 0 or 1.") } 19 | Ntotal = length(y) 20 | # Specify the data in a list, for later shipment to JAGS: 21 | dataList = list( 22 | y = y , 23 | Ntotal = Ntotal 24 | ) 25 | #----------------------------------------------------------------------------- 26 | # THE MODEL. 27 | modelString = " 28 | model { 29 | for ( i in 1:Ntotal ) { 30 | y[i] ~ dbern( theta ) 31 | } 32 | theta ~ dbeta( 1 , 1 ) 33 | } 34 | " # close quote for modelString 35 | writeLines( modelString , con="TEMPmodel.txt" ) 36 | #----------------------------------------------------------------------------- 37 | # INTIALIZE THE CHAINS. 38 | # Initial values of MCMC chains based on data: 39 | # Option 1: Use single initial value for all chains: 40 | # thetaInit = sum(y)/length(y) 41 | # initsList = list( theta=thetaInit ) 42 | # Option 2: Use function that generates random values for each chain: 43 | initsList = function() { 44 | resampledY = sample( y , replace=TRUE ) 45 | thetaInit = sum(resampledY)/length(resampledY) 46 | thetaInit = 0.001+0.998*thetaInit # keep away from 0,1 47 | return( list( theta=thetaInit ) ) 48 | } 49 | #----------------------------------------------------------------------------- 50 | # RUN THE CHAINS 51 | parameters = c( "theta") # The parameters to be monitored 52 | adaptSteps = 500 # Number of steps to adapt the samplers 53 | burnInSteps = 500 # Number of steps to burn-in the chains 54 | nChains = 4 # nChains should be 2 or more for diagnostics 55 | thinSteps = 1 56 | nIter = ceiling( ( numSavedSteps * thinSteps ) / nChains ) 57 | # Create, initialize, and adapt the model: 58 | jagsModel = jags.model( "TEMPmodel.txt" , data=dataList , inits=initsList , 59 | n.chains=nChains , n.adapt=adaptSteps ) 60 | # Burn-in: 61 | cat( "Burning in the MCMC chain...\n" ) 62 | update( jagsModel , n.iter=burnInSteps ) 63 | # The saved MCMC chain: 64 | cat( "Sampling final MCMC chain...\n" ) 65 | codaSamples = coda.samples( jagsModel , variable.names=parameters , 66 | n.iter=nIter , thin=thinSteps ) 67 | # resulting codaSamples object has these indices: 68 | # codaSamples[[ chainIdx ]][ stepIdx , paramIdx ] 69 | if ( !is.null(saveName) ) { 70 | save( codaSamples , file=paste(saveName,"Mcmc.Rdata",sep="") ) 71 | } 72 | return( codaSamples ) 73 | } # end function 74 | 75 | #=============================================================================== 76 | 77 | smryMCMC = function( codaSamples , compVal=NULL , rope=NULL , saveName=NULL ) { 78 | summaryInfo = NULL 79 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 80 | summaryInfo = rbind( summaryInfo , 81 | "theta" = summarizePost( mcmcMat[,"theta"] , 82 | compVal=compVal , ROPE=rope ) ) 83 | if ( !is.null(saveName) ) { 84 | write.csv( summaryInfo , file=paste(saveName,"SummaryInfo.csv",sep="") ) 85 | } 86 | show( summaryInfo ) 87 | return( summaryInfo ) 88 | } 89 | 90 | #=============================================================================== 91 | 92 | plotMCMC = function( codaSamples , data , compVal=NULL , rope=NULL , 93 | saveName=NULL , showCurve=FALSE , saveType="jpg" ) { 94 | # showCurve is TRUE or FALSE and indicates whether the posterior should 95 | # be displayed as a histogram (by default) or by an approximate curve. 96 | #----------------------------------------------------------------------------- 97 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 98 | chainLength = NROW( mcmcMat ) 99 | theta = mcmcMat[,"theta"] 100 | #----------------------------------------------------------------------------- 101 | # Set up window and layout: 102 | openGraph(width=4.0,height=3.0) 103 | par( mar=c(3.5,0.5,2.5,0.5) , mgp=c(2.25,0.7,0) ) 104 | #----------------------------------------------------------------------------- 105 | postInfo = plotPost( theta , cex.lab = 1.75 , 106 | showCurve=showCurve , 107 | compVal=compVal , ROPE=rope , cex.main=1.5 , 108 | xlab=bquote(theta) , main=paste("theta") , 109 | col="skyblue" ) 110 | z = sum(data$y) 111 | N = length(data$y) 112 | points( z/N , 0 , pch="+" , col="red" , cex=3 ) 113 | text( max(theta) , 0 , bquote( z==.(z) ) , adj=c(1,-11) ) 114 | text( max(theta) , 0 , bquote( N==.(N) ) , adj=c(1,-9.5) ) 115 | 116 | #----------------------------------------------------------------------------- 117 | if ( !is.null(saveName) ) { 118 | saveGraph( file=paste(saveName,"Post",sep=""), type=saveType) 119 | } 120 | } 121 | 122 | #=============================================================================== 123 | -------------------------------------------------------------------------------- /part2/ch10/Jags-Ydich-Xnom1subj-MbernBetaModelComp-Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/Jags-Ydich-Xnom1subj-MbernBetaModelComp-Mcmc.Rdata -------------------------------------------------------------------------------- /part2/ch10/Jags-Ydich-Xnom1subj-MbernBetaModelComp.R: -------------------------------------------------------------------------------- 1 | # Jags-Ydich-Xnom1subj-MbernBetaModelComp.R 2 | # Accompanies the book: 3 | # Kruschke, J. K. (2014). Doing Bayesian Data Analysis: 4 | # A Tutorial with R, JAGS, and Stan. 2nd Edition. Academic Press / Elsevier. 5 | graphics.off() 6 | rm(list=ls(all=TRUE)) 7 | source("DBDA2E-utilities.R") 8 | require(rjags) 9 | fileNameRoot="Jags-Ydich-Xnom1subj-MbernBetaModelComp-" # for output filenames 10 | 11 | #------------------------------------------------------------------------------ 12 | # THE DATA. 13 | 14 | N=9 15 | z=6 16 | y = c( rep(0,N-z) , rep(1,z) ) 17 | dataList = list( 18 | y = y , 19 | N = N 20 | ) 21 | 22 | #------------------------------------------------------------------------------ 23 | # THE MODEL. 24 | 25 | modelString = " 26 | model { 27 | for ( i in 1:N ) { 28 | y[i] ~ dbern( theta ) 29 | } 30 | theta ~ dbeta( omega[m]*(kappa-2)+1 , (1-omega[m])*(kappa-2)+1 ) 31 | omega[1] <- .25 32 | omega[2] <- .75 33 | kappa <- 12 34 | m ~ dcat( mPriorProb[] ) 35 | mPriorProb[1] <- .5 36 | mPriorProb[2] <- .5 37 | } 38 | " # close quote for modelString 39 | writeLines( modelString , con="TEMPmodel.txt" ) 40 | 41 | #------------------------------------------------------------------------------ 42 | # INTIALIZE THE CHAINS. 43 | 44 | # Specific initialization is not necessary in this case, 45 | # but here is a lazy version if wanted: 46 | # initsList = list( theta=0.5 , m=1 ) 47 | 48 | #------------------------------------------------------------------------------ 49 | # RUN THE CHAINS. 50 | 51 | parameters = c("theta","m") 52 | adaptSteps = 1000 # Number of steps to "tune" the samplers. 53 | burnInSteps = 1000 # Number of steps to "burn-in" the samplers. 54 | nChains = 4 # Number of chains to run. 55 | numSavedSteps=50000 # Total number of steps in chains to save. 56 | thinSteps=1 # Number of steps to "thin" (1=keep every step). 57 | nPerChain = ceiling( ( numSavedSteps * thinSteps ) / nChains ) # Steps per chain. 58 | # Create, initialize, and adapt the model: 59 | jagsModel = jags.model( "TEMPmodel.txt" , data=dataList , # inits=initsList , 60 | n.chains=nChains , n.adapt=adaptSteps ) 61 | # Burn-in: 62 | cat( "Burning in the MCMC chain...\n" ) 63 | update( jagsModel , n.iter=burnInSteps ) 64 | # The saved MCMC chain: 65 | cat( "Sampling final MCMC chain...\n" ) 66 | codaSamples = coda.samples( jagsModel , variable.names=parameters , 67 | n.iter=nPerChain , thin=thinSteps ) 68 | # resulting codaSamples object has these indices: 69 | # codaSamples[[ chainIdx ]][ stepIdx , paramIdx ] 70 | 71 | save( codaSamples , file=paste0(fileNameRoot,"Mcmc.Rdata") ) 72 | 73 | #------------------------------------------------------------------------------- 74 | # Display diagnostics of chain: 75 | 76 | parameterNames = varnames(codaSamples) # get all parameter names 77 | for ( parName in parameterNames ) { 78 | diagMCMC( codaSamples , parName=parName , 79 | saveName=fileNameRoot , saveType="eps" ) 80 | } 81 | 82 | #------------------------------------------------------------------------------ 83 | # EXAMINE THE RESULTS. 84 | 85 | # Convert coda-object codaSamples to matrix object for easier handling. 86 | mcmcMat = as.matrix( codaSamples , chains=TRUE ) 87 | m = mcmcMat[,"m"] 88 | theta = mcmcMat[,"theta"] 89 | 90 | # Compute the proportion of m at each index value: 91 | pM1 = sum( m == 1 ) / length( m ) 92 | pM2 = 1 - pM1 93 | 94 | # Extract theta values for each model index: 95 | thetaM1 = theta[ m == 1 ] 96 | thetaM2 = theta[ m == 2 ] 97 | 98 | # Plot histograms of sampled theta values for each model, 99 | # with pM displayed. 100 | openGraph(width=7,height=5) 101 | par( mar=0.5+c(3,1,2,1) , mgp=c(2.0,0.7,0) ) 102 | layout( matrix(c(1,1,2,3),nrow=2,byrow=FALSE) , widths=c(1,2) ) 103 | plotPost( m , breaks=seq(0.9,2.1,0.2) , cenTend="mean" , xlab="m" , main="Model Index" ) 104 | plotPost( thetaM1 , 105 | main=bquote( theta*" when m=1" * " ; p(m=1|D)" == .(signif(pM1,3)) ) , 106 | cex.main=1.75 , xlab=bquote(theta) , xlim=c(0,1) ) 107 | plotPost( thetaM2 , 108 | main=bquote( theta*" when m=2" * " ; p(m=2|D)" == .(signif(pM2,3)) ) , 109 | cex.main=1.75 , xlab=bquote(theta) , xlim=c(0,1) ) 110 | saveGraph( file=paste0(fileNameRoot,"Post") , type="eps" ) 111 | -------------------------------------------------------------------------------- /part2/ch10/Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior-PSEUDO-Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior-PSEUDO-Mcmc.Rdata -------------------------------------------------------------------------------- /part2/ch10/Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior.R: -------------------------------------------------------------------------------- 1 | # Jags-Ydich-Xnom1subj-MbernBetaModelComp.R 2 | # Accompanies the book: 3 | # Kruschke, J. K. (2014). Doing Bayesian Data Analysis: 4 | # A Tutorial with R, JAGS, and Stan. 2nd Edition. Academic Press / Elsevier. 5 | graphics.off() 6 | rm(list=ls(all=TRUE)) 7 | source("DBDA2E-utilities.R") 8 | require(rjags) 9 | fileNameRoot="Jags-Ydich-Xnom1subj-MbernBetaModelCompPseudoPrior-PSEUDO-" 10 | 11 | #------------------------------------------------------------------------------ 12 | # THE DATA. 13 | 14 | N=30 15 | z=ceiling(.55*N) 16 | y = c( rep(0,N-z) , rep(1,z) ) 17 | dataList = list( 18 | y = y , 19 | N = N 20 | ) 21 | 22 | #------------------------------------------------------------------------------ 23 | # THE MODEL. 24 | 25 | modelString = " 26 | model { 27 | for ( i in 1:N ) { 28 | y[i] ~ dbern( theta ) 29 | } 30 | theta <- equals(m,1)*theta1 + equals(m,2)*theta2 31 | theta1 ~ dbeta( omega1[m]*(kappa1[m]-2)+1 , (1-omega1[m])*(kappa1[m]-2)+1 ) 32 | omega1[1] <- .10 # true prior value 33 | omega1[2] <- .40 # pseudo prior value 34 | kappa1[1] <- 20 # true prior value 35 | kappa1[2] <- 50 # pseudo prior value 36 | theta2 ~ dbeta( omega2[m]*(kappa2[m]-2)+1 , (1-omega2[m])*(kappa2[m]-2)+1 ) 37 | omega2[1] <- .70 # pseudo prior value 38 | omega2[2] <- .90 # true prior value 39 | kappa2[1] <- 50 # pseudo prior value 40 | kappa2[2] <- 20 # true prior value 41 | m ~ dcat( mPriorProb[] ) 42 | mPriorProb[1] <- .5 43 | mPriorProb[2] <- .5 44 | } 45 | " # close quote for modelString 46 | writeLines( modelString , con="TEMPmodel.txt" ) 47 | 48 | #------------------------------------------------------------------------------ 49 | # INTIALIZE THE CHAINS. 50 | 51 | # Specific initialization is not necessary in this case, 52 | # but here is a lazy version if wanted: 53 | # initsList = list( theta1=0.5 , theta2=0.5 , m=1 ) 54 | 55 | #------------------------------------------------------------------------------ 56 | # RUN THE CHAINS. 57 | 58 | parameters = c("m","theta1","theta2") 59 | adaptSteps = 1000 # Number of steps to "tune" the samplers. 60 | burnInSteps = 1000 # Number of steps to "burn-in" the samplers. 61 | nChains = 4 # Number of chains to run. 62 | numSavedSteps=10000 # Total number of steps in chains to save. 63 | thinSteps=1 # Number of steps to "thin" (1=keep every step). 64 | nPerChain = ceiling( ( numSavedSteps * thinSteps ) / nChains ) # Steps per chain. 65 | # Create, initialize, and adapt the model: 66 | jagsModel = jags.model( "TEMPmodel.txt" , data=dataList , # inits=initsList , 67 | n.chains=nChains , n.adapt=adaptSteps ) 68 | # Burn-in: 69 | cat( "Burning in the MCMC chain...\n" ) 70 | update( jagsModel , n.iter=burnInSteps ) 71 | # The saved MCMC chain: 72 | cat( "Sampling final MCMC chain...\n" ) 73 | codaSamples = coda.samples( jagsModel , variable.names=parameters , 74 | n.iter=nPerChain , thin=thinSteps ) 75 | # resulting codaSamples object has these indices: 76 | # codaSamples[[ chainIdx ]][ stepIdx , paramIdx ] 77 | 78 | save( codaSamples , file=paste0(fileNameRoot,"Mcmc.Rdata") ) 79 | 80 | #------------------------------------------------------------------------------- 81 | # Display diagnostics of chain: 82 | 83 | parameterNames = varnames(codaSamples) # get all parameter names 84 | for ( parName in c("m") ) { # parameterNames ) { 85 | diagMCMC( codaSamples , parName=parName , 86 | saveName=fileNameRoot , saveType="eps" ) 87 | } 88 | 89 | #------------------------------------------------------------------------------ 90 | # EXAMINE THE RESULTS. 91 | 92 | # Convert coda-object codaSamples to matrix object for easier handling. 93 | mcmcMat = as.matrix( codaSamples , chains=TRUE ) 94 | m = mcmcMat[,"m"] 95 | # Compute the proportion of m at each index value: 96 | pM1 = sum( m == 1 ) / length( m ) 97 | pM2 = 1 - pM1 98 | # Extract theta values for each model index: 99 | theta1M1 = mcmcMat[,"theta1"][ m == 1 ] # true theta1 100 | theta1M2 = mcmcMat[,"theta1"][ m == 2 ] # pseudo theta1 101 | theta2M1 = mcmcMat[,"theta2"][ m == 1 ] # pseudo theta2 102 | theta2M2 = mcmcMat[,"theta2"][ m == 2 ] # true theta2 103 | 104 | # Plot histograms of sampled theta values for each model, 105 | # with pM displayed. 106 | openGraph(width=7,height=7) 107 | par( mar=0.5+c(3,1,2,1) , mgp=c(2.0,0.7,0) ) 108 | layout( matrix(c(1,1,2,3,4,5),nrow=3,byrow=TRUE) ) 109 | plotPost( m , breaks=seq(0.95,2.05,0.1) , xlim=c(0.75,2.25) , 110 | cenTend="mean" , xlab="m" , cex.main=1.75 , 111 | main=bquote( "Model Index." * 112 | " p(m=1|D) =" * .(signif(pM1,3)) * 113 | ", p(m=2|D) =" * .(signif(pM2,3)) ) ) 114 | plotPost( theta1M1 , 115 | main=bquote( theta[1]*" when m=1 (using true prior)" ) , 116 | cex.main=1.75 , xlab=bquote(theta[1]) , xlim=c(0,1) ) 117 | plotPost( theta2M1 , 118 | main=bquote( theta[2]*" when m=1; pseudo-prior" ) , 119 | cex.main=1.75 , xlab=bquote(theta[2]) , xlim=c(0,1) ) 120 | plotPost( theta1M2 , 121 | main=bquote( theta[1]*" when m=2; pseudo-prior" ) , 122 | cex.main=1.75 , xlab=bquote(theta[1]) , xlim=c(0,1) ) 123 | plotPost( theta2M2 , 124 | main=bquote( theta[2]*" when m=2 (using true prior)" ) , 125 | cex.main=1.75 , xlab=bquote(theta[2]) , xlim=c(0,1) ) 126 | saveGraph( file=paste0(fileNameRoot,"Post") , type="eps" ) 127 | -------------------------------------------------------------------------------- /part2/ch10/Jags-Ydich-Xnom1subj-MbernBeta_ver1.R: -------------------------------------------------------------------------------- 1 | # Jags-Ydich-Xnom1subj-Mbernbeta.R 2 | # Accompanies the book: 3 | # Kruschke, J. K. (2014). Doing Bayesian Data Analysis: 4 | # A Tutorial with R, JAGS, and Stan. 2nd Edition. Academic Press / Elsevier. 5 | source("DBDA2E-utilities.R") 6 | #=============================================================================== 7 | 8 | genMCMC = function( data , numSavedSteps=50000 , saveName=NULL ) { 9 | require(rjags) 10 | #----------------------------------------------------------------------------- 11 | # THE DATA. 12 | if ( class(data)=="data.frame" ) { # If data is a data.frame 13 | y = myData$y # then pull out the column named y 14 | } else { # else 15 | y = data # rename the data as y. 16 | } 17 | # Do some checking that data make sense: 18 | if ( any( y!=0 & y!=1 ) ) { stop("All y values must be 0 or 1.") } 19 | Ntotal = length(y) 20 | # Specify the data in a list, for later shipment to JAGS: 21 | dataList = list( 22 | y = y , 23 | Ntotal = Ntotal 24 | ) 25 | #----------------------------------------------------------------------------- 26 | # THE MODEL. 27 | modelString = " 28 | model { 29 | for (i in 1:Ntotal) { 30 | y[i] ~ dbern(theta) 31 | } 32 | theta ~ dbeta( 0.75*(12-2)+1, (1-0.75)*(12-2)+1) 33 | } 34 | " # close quote for modelString 35 | writeLines( modelString , con="TEMPmodel.txt" ) 36 | #----------------------------------------------------------------------------- 37 | # INTIALIZE THE CHAINS. 38 | # Initial values of MCMC chains based on data: 39 | # Option 1: Use single initial value for all chains: 40 | # thetaInit = sum(y)/length(y) 41 | # initsList = list( theta=thetaInit ) 42 | # Option 2: Use function that generates random values for each chain: 43 | initsList = function() { 44 | resampledY = sample( y , replace=TRUE ) 45 | thetaInit = sum(resampledY)/length(resampledY) 46 | thetaInit = 0.001+0.998*thetaInit # keep away from 0,1 47 | return( list( theta=thetaInit ) ) 48 | } 49 | #----------------------------------------------------------------------------- 50 | # RUN THE CHAINS 51 | parameters = c( "theta") # The parameters to be monitored 52 | adaptSteps = 500 # Number of steps to adapt the samplers 53 | burnInSteps = 500 # Number of steps to burn-in the chains 54 | nChains = 4 # nChains should be 2 or more for diagnostics 55 | thinSteps = 1 56 | nIter = ceiling( ( numSavedSteps * thinSteps ) / nChains ) 57 | # Create, initialize, and adapt the model: 58 | jagsModel = jags.model( "TEMPmodel.txt" , data=dataList , inits=initsList , 59 | n.chains=nChains , n.adapt=adaptSteps ) 60 | # Burn-in: 61 | cat( "Burning in the MCMC chain...\n" ) 62 | update( jagsModel , n.iter=burnInSteps ) 63 | # The saved MCMC chain: 64 | cat( "Sampling final MCMC chain...\n" ) 65 | codaSamples = coda.samples( jagsModel , variable.names=parameters , 66 | n.iter=nIter , thin=thinSteps ) 67 | # resulting codaSamples object has these indices: 68 | # codaSamples[[ chainIdx ]][ stepIdx , paramIdx ] 69 | if ( !is.null(saveName) ) { 70 | save( codaSamples , file=paste(saveName,"Mcmc.Rdata",sep="") ) 71 | } 72 | return( codaSamples ) 73 | } # end function 74 | 75 | #=============================================================================== 76 | 77 | smryMCMC = function( codaSamples , compVal=NULL , rope=NULL , saveName=NULL ) { 78 | summaryInfo = NULL 79 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 80 | summaryInfo = rbind( summaryInfo , 81 | "theta" = summarizePost( mcmcMat[,"theta"] , 82 | compVal=compVal , ROPE=rope ) ) 83 | if ( !is.null(saveName) ) { 84 | write.csv( summaryInfo , file=paste(saveName,"SummaryInfo.csv",sep="") ) 85 | } 86 | show( summaryInfo ) 87 | return( summaryInfo ) 88 | } 89 | 90 | #=============================================================================== 91 | 92 | plotMCMC = function( codaSamples , data , compVal=NULL , rope=NULL , 93 | saveName=NULL , showCurve=FALSE , saveType="jpg" ) { 94 | # showCurve is TRUE or FALSE and indicates whether the posterior should 95 | # be displayed as a histogram (by default) or by an approximate curve. 96 | #----------------------------------------------------------------------------- 97 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 98 | chainLength = NROW( mcmcMat ) 99 | theta = mcmcMat[,"theta"] 100 | #----------------------------------------------------------------------------- 101 | # Set up window and layout: 102 | openGraph(width=4.0,height=3.0) 103 | par( mar=c(3.5,0.5,2.5,0.5) , mgp=c(2.25,0.7,0) ) 104 | #----------------------------------------------------------------------------- 105 | postInfo = plotPost( theta , cex.lab = 1.75 , 106 | showCurve=showCurve , 107 | compVal=compVal , ROPE=rope , cex.main=1.5 , 108 | xlab=bquote(theta) , main=paste("theta") , 109 | col="skyblue" ) 110 | z = sum(data$y) 111 | N = length(data$y) 112 | points( z/N , 0 , pch="+" , col="red" , cex=3 ) 113 | text( max(theta) , 0 , bquote( z==.(z) ) , adj=c(1,-11) ) 114 | text( max(theta) , 0 , bquote( N==.(N) ) , adj=c(1,-9.5) ) 115 | 116 | #----------------------------------------------------------------------------- 117 | if ( !is.null(saveName) ) { 118 | saveGraph( file=paste(saveName,"Post",sep=""), type=saveType) 119 | } 120 | } 121 | 122 | #=============================================================================== 123 | -------------------------------------------------------------------------------- /part2/ch10/TEMPmodel.txt: -------------------------------------------------------------------------------- 1 | 2 | model { 3 | for ( i in 1:N ) { 4 | y[i] ~ dbern( theta ) 5 | } 6 | theta <- equals(m,1)*theta1 + equals(m,2)*theta2 7 | theta1 ~ dbeta( omega1[m]*(kappa1[m]-2)+1 , (1-omega1[m])*(kappa1[m]-2)+1 ) 8 | omega1[1] <- .10 # true prior value 9 | omega1[2] <- .40 # pseudo prior value 10 | kappa1[1] <- 20 # true prior value 11 | kappa1[2] <- 50 # pseudo prior value 12 | theta2 ~ dbeta( omega2[m]*(kappa2[m]-2)+1 , (1-omega2[m])*(kappa2[m]-2)+1 ) 13 | omega2[1] <- .70 # pseudo prior value 14 | omega2[2] <- .90 # true prior value 15 | kappa2[1] <- 50 # pseudo prior value 16 | kappa2[2] <- 20 # true prior value 17 | m ~ dcat( mPriorProb[] ) 18 | mPriorProb[1] <- .5 19 | mPriorProb[2] <- .5 20 | } 21 | 22 | -------------------------------------------------------------------------------- /part2/ch10/ch10.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | -------------------------------------------------------------------------------- /part2/ch10/ch10document_files/figure-html/unnamed-chunk-13-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/ch10document_files/figure-html/unnamed-chunk-13-1.png -------------------------------------------------------------------------------- /part2/ch10/ch10document_files/figure-html/unnamed-chunk-19-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/ch10document_files/figure-html/unnamed-chunk-19-1.png -------------------------------------------------------------------------------- /part2/ch10/figures/fig1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/figures/fig1.png -------------------------------------------------------------------------------- /part2/ch10/figures/fig11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/figures/fig11.png -------------------------------------------------------------------------------- /part2/ch10/figures/fig2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/figures/fig2.png -------------------------------------------------------------------------------- /part2/ch10/figures/fig3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/figures/fig3.png -------------------------------------------------------------------------------- /part2/ch10/figures/fig4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/figures/fig4.png -------------------------------------------------------------------------------- /part2/ch10/figures/fig5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch10/figures/fig5.png -------------------------------------------------------------------------------- /part2/ch11/files/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/1.png -------------------------------------------------------------------------------- /part2/ch11/files/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/2.png -------------------------------------------------------------------------------- /part2/ch11/files/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/3.png -------------------------------------------------------------------------------- /part2/ch11/files/5.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/5.PNG -------------------------------------------------------------------------------- /part2/ch11/files/Beta_distribution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/Beta_distribution.png -------------------------------------------------------------------------------- /part2/ch11/files/Eq1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/Eq1.png -------------------------------------------------------------------------------- /part2/ch11/files/Fig1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/Fig1.png -------------------------------------------------------------------------------- /part2/ch11/files/Fiq6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/Fiq6.png -------------------------------------------------------------------------------- /part2/ch11/files/Fiq7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/Fiq7.png -------------------------------------------------------------------------------- /part2/ch11/files/eq2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/eq2.png -------------------------------------------------------------------------------- /part2/ch11/files/eq3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/eq3.png -------------------------------------------------------------------------------- /part2/ch11/files/eq4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/eq4.png -------------------------------------------------------------------------------- /part2/ch11/files/fig2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/fig2.png -------------------------------------------------------------------------------- /part2/ch11/files/fig3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/fig3.png -------------------------------------------------------------------------------- /part2/ch11/files/fig4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/fig4.png -------------------------------------------------------------------------------- /part2/ch11/files/fig5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/fig5.png -------------------------------------------------------------------------------- /part2/ch11/files/hist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/hist.png -------------------------------------------------------------------------------- /part2/ch11/files/nb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/nb.png -------------------------------------------------------------------------------- /part2/ch11/files/nbinomial.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/files/nbinomial.jpg -------------------------------------------------------------------------------- /part2/ch11/캡처.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part2/ch11/캡처.PNG -------------------------------------------------------------------------------- /part3/ch13/Jags-Ydich-Xnom1subj-MbernBeta.R: -------------------------------------------------------------------------------- 1 | # Jags-Ydich-Xnom1subj-Mbernbeta.R 2 | # Accompanies the book: 3 | # Kruschke, J. K. (2014). Doing Bayesian Data Analysis: 4 | # A Tutorial with R, JAGS, and Stan. 2nd Edition. Academic Press / Elsevier. 5 | source("DBDA2E-utilities.R") 6 | #=============================================================================== 7 | 8 | genMCMC = function( data , numSavedSteps=50000 , saveName=NULL ) { 9 | require(rjags) 10 | #----------------------------------------------------------------------------- 11 | # THE DATA. 12 | if ( class(data)=="data.frame" ) { # If data is a data.frame 13 | y = myData$y # then pull out the column named y 14 | } else { # else 15 | y = data # rename the data as y. 16 | } 17 | # Do some checking that data make sense: 18 | if ( any( y!=0 & y!=1 ) ) { stop("All y values must be 0 or 1.") } 19 | Ntotal = length(y) 20 | # Specify the data in a list, for later shipment to JAGS: 21 | dataList = list( 22 | y = y , 23 | Ntotal = Ntotal 24 | ) 25 | #----------------------------------------------------------------------------- 26 | # THE MODEL. 27 | modelString = " 28 | model { 29 | for ( i in 1:Ntotal ) { 30 | y[i] ~ dbern( theta ) 31 | } 32 | theta ~ dbeta( 1 , 1 ) 33 | } 34 | " # close quote for modelString 35 | writeLines( modelString , con="TEMPmodel.txt" ) 36 | #----------------------------------------------------------------------------- 37 | # INTIALIZE THE CHAINS. 38 | # Initial values of MCMC chains based on data: 39 | # Option 1: Use single initial value for all chains: 40 | # thetaInit = sum(y)/length(y) 41 | # initsList = list( theta=thetaInit ) 42 | # Option 2: Use function that generates random values for each chain: 43 | initsList = function() { 44 | resampledY = sample( y , replace=TRUE ) 45 | thetaInit = sum(resampledY)/length(resampledY) 46 | thetaInit = 0.001+0.998*thetaInit # keep away from 0,1 47 | return( list( theta=thetaInit ) ) 48 | } 49 | #----------------------------------------------------------------------------- 50 | # RUN THE CHAINS 51 | parameters = c( "theta") # The parameters to be monitored 52 | adaptSteps = 500 # Number of steps to adapt the samplers 53 | burnInSteps = 500 # Number of steps to burn-in the chains 54 | nChains = 4 # nChains should be 2 or more for diagnostics 55 | thinSteps = 1 56 | nIter = ceiling( ( numSavedSteps * thinSteps ) / nChains ) 57 | # Create, initialize, and adapt the model: 58 | jagsModel = jags.model( "TEMPmodel.txt" , data=dataList , inits=initsList , 59 | n.chains=nChains , n.adapt=adaptSteps ) 60 | # Burn-in: 61 | cat( "Burning in the MCMC chain...\n" ) 62 | update( jagsModel , n.iter=burnInSteps ) 63 | # The saved MCMC chain: 64 | cat( "Sampling final MCMC chain...\n" ) 65 | codaSamples = coda.samples( jagsModel , variable.names=parameters , 66 | n.iter=nIter , thin=thinSteps ) 67 | # resulting codaSamples object has these indices: 68 | # codaSamples[[ chainIdx ]][ stepIdx , paramIdx ] 69 | if ( !is.null(saveName) ) { 70 | save( codaSamples , file=paste(saveName,"Mcmc.Rdata",sep="") ) 71 | } 72 | return( codaSamples ) 73 | } # end function 74 | 75 | #=============================================================================== 76 | 77 | smryMCMC = function( codaSamples , compVal=NULL , rope=NULL , saveName=NULL ) { 78 | summaryInfo = NULL 79 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 80 | summaryInfo = rbind( summaryInfo , 81 | "theta" = summarizePost( mcmcMat[,"theta"] , 82 | compVal=compVal , ROPE=rope ) ) 83 | if ( !is.null(saveName) ) { 84 | write.csv( summaryInfo , file=paste(saveName,"SummaryInfo.csv",sep="") ) 85 | } 86 | show( summaryInfo ) 87 | return( summaryInfo ) 88 | } 89 | 90 | #=============================================================================== 91 | 92 | plotMCMC = function( codaSamples , data , compVal=NULL , rope=NULL , 93 | saveName=NULL , showCurve=FALSE , saveType="jpg" ) { 94 | # showCurve is TRUE or FALSE and indicates whether the posterior should 95 | # be displayed as a histogram (by default) or by an approximate curve. 96 | #----------------------------------------------------------------------------- 97 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 98 | chainLength = NROW( mcmcMat ) 99 | theta = mcmcMat[,"theta"] 100 | #----------------------------------------------------------------------------- 101 | # Set up window and layout: 102 | openGraph(width=4.0,height=3.0) 103 | par( mar=c(3.5,0.5,2.5,0.5) , mgp=c(2.25,0.7,0) ) 104 | #----------------------------------------------------------------------------- 105 | postInfo = plotPost( theta , cex.lab = 1.75 , 106 | showCurve=showCurve , 107 | compVal=compVal , ROPE=rope , cex.main=1.5 , 108 | xlab=bquote(theta) , main=paste("theta") , 109 | col="skyblue" ) 110 | z = sum(data$y) 111 | N = length(data$y) 112 | points( z/N , 0 , pch="+" , col="red" , cex=3 ) 113 | text( max(theta) , 0 , bquote( z==.(z) ) , adj=c(1,-11) ) 114 | text( max(theta) , 0 , bquote( N==.(N) ) , adj=c(1,-9.5) ) 115 | 116 | #----------------------------------------------------------------------------- 117 | if ( !is.null(saveName) ) { 118 | saveGraph( file=paste(saveName,"Post",sep=""), type=saveType) 119 | } 120 | } 121 | 122 | #=============================================================================== 123 | -------------------------------------------------------------------------------- /part3/ch13/Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa-Power.R: -------------------------------------------------------------------------------- 1 | # Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa-Power.R 2 | graphics.off() # This closes all of R's graphics windows. 3 | rm(list=ls()) # Careful! This clears all of R's memory! 4 | set.seed(47405) # Optional, merely for replicability. 5 | 6 | # Load the functions genMCMC, smryMCMC, and plotMCMC: 7 | # (This also sources DBDA2E-utilities.R) 8 | source("Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa.R") 9 | 10 | # Specify idealized hypothesis: 11 | idealGroupMean = 0.65 12 | idealGroupSD = 0.07 13 | idealNsubj = 100 # more subjects => higher confidence in hypothesis 14 | idealNtrlPerSubj = 100 # more trials => higher confidence in hypothesis 15 | # Generate random theta values for idealized subjects: 16 | betaAB = betaABfromMeanSD( idealGroupMean , idealGroupSD ) 17 | theta = rbeta( idealNsubj , betaAB$a , betaAB$b ) 18 | # Transform the theta values to exactly match idealized mean, SD: 19 | theta = ((theta-mean(theta))/sd(theta))*idealGroupSD + idealGroupMean 20 | theta[ theta >= 0.999 ] = 0.999 # must be between 0 and 1 21 | theta[ theta <= 0.001 ] = 0.001 # must be between 0 and 1 22 | # Generate idealized data very close to theta's: 23 | z = round( theta*idealNtrlPerSubj ) 24 | # Convert to data format needed by JAGS function: 25 | dataMat=matrix(0,ncol=2,nrow=0,dimnames=list(NULL,c("y","s"))) 26 | for ( sIdx in 1:idealNsubj ) { 27 | yVec = c(rep(1,z[sIdx]),rep(0,idealNtrlPerSubj-z[sIdx])) 28 | dataMat = rbind( dataMat , cbind( yVec , rep(sIdx,idealNtrlPerSubj) ) ) 29 | } 30 | idealDatFrm = data.frame(dataMat) 31 | # Run Bayesian analysis on idealized data: 32 | mcmcCoda = genMCMC( data=idealDatFrm , saveName=NULL , 33 | numSavedSteps=2000 , thinSteps=20 ) 34 | # Convert coda object to matrix for convenience: 35 | mcmcMat = as.matrix(mcmcCoda) 36 | 37 | # # Examine idealized parameter values: 38 | # effectiveSize(mcmcCoda[,"omega"]) 39 | # effectiveSize(mcmcCoda[,"kappa"]) 40 | openGraph(width=7,height=3) 41 | layout(matrix(1:2,ncol=2)) 42 | par( mar=c(3,1,1,1) , mgp=c(2.0,0.7,0) , oma=0.1+c(0,0,2,0) , 43 | cex.lab=1.75 , cex.main=1.5 , pch=20 ) 44 | plotPost( mcmcMat[,"omega"] , xlab="omega" , cenTend="mean" , xlim=c(.45,.85) , 45 | border="skyblue" , HDItextPlace=0.9 ) 46 | plotPost( mcmcMat[,"kappa"] , xlab="kappa" , cenTend="mode" , xlim=c(0,250) ) 47 | mtext( text=bquote(list( idealNsubj==.(idealNsubj) , 48 | idealNtrlPerSubj==.(idealNtrlPerSubj) )) , 49 | outer=TRUE , adj=c(0.5,0.5) , cex=1.5 ) 50 | saveGraph( file=paste0("Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa-Power-", 51 | idealNsubj,"-",idealNtrlPerSubj) , type="eps" ) 52 | 53 | # Define function that assays goal achievement for a single set of data: 54 | goalAchievedForSample = function( data ) { 55 | # Generate the MCMC chain: 56 | mcmcCoda = genMCMC( data=data , saveName=NULL , 57 | numSavedSteps=5000 , thinSteps=2 ) 58 | #numSavedSteps=11000 , thinSteps=20 59 | # Convert coda object to matrix for convenience: 60 | mcmcMat = as.matrix(mcmcCoda) 61 | # Specify criteria for goals: 62 | nullROPE = c(0.48,0.52) 63 | HDImaxWid = 0.2 64 | # Compute HDIs: 65 | HDImat = apply( mcmcMat , 2 , "HDIofMCMC" ) 66 | show( HDImat[,1:5] ) 67 | # Define list for recording results: 68 | goalAchieved = list() 69 | # Goal: omega greater than ROPE: 70 | goalAchieved = c( goalAchieved , 71 | "omegaAboveROPE"=unname( HDImat[1,"omega"] > nullROPE[2] ) ) 72 | # Goal: omega HDI width less than max width: 73 | goalAchieved = c( goalAchieved , 74 | "omegaNarrowHDI"=unname( HDImat[2,"omega"]-HDImat[1,"omega"] 75 | < HDImaxWid ) ) 76 | # Goal: at least one theta greater than ROPE with none below: 77 | thetaCols = grep("theta",colnames(HDImat)) # column indices of thetas 78 | goalAchieved = c( goalAchieved , 79 | "thetasAboveROPE"= (any(HDImat[1,thetaCols] > nullROPE[2]) 80 | & !any(HDImat[2,thetaCols] < nullROPE[1]))) 81 | # Goal: all theta's HDI width less than max width: 82 | goalAchieved = c( goalAchieved , 83 | "thetasNarrowHDI"= all( HDImat[2,thetaCols] 84 | - HDImat[1,thetaCols] 85 | < HDImaxWid ) ) 86 | # More goals can be inserted here if wanted... 87 | # Return list of goal results: 88 | return(goalAchieved) 89 | } 90 | 91 | # Specify sample size for each simulated data set: 92 | Nsubj = 2*7 ; NtrlPerSubj = 47 # 658 flips total 93 | #Nsubj = 7 ; NtrlPerSubj = 2*47 # 658 flips total 94 | # Specify the number of simulated experiments: 95 | nSimulatedDataSets = min(500,NROW(mcmcMat)) # An arbitrary large number. 96 | # Run the simulated experiments: 97 | simCount=0 98 | if (exists("goalTally")) rm(goalTally) # in case previously run from here down 99 | for ( simIdx in ceiling(seq(1,NROW(mcmcMat),length=nSimulatedDataSets)) ) { 100 | simCount=simCount+1 101 | cat( "\n\n==================== Simulation",simCount,"of",nSimulatedDataSets, 102 | "====================\n\n" ) 103 | # Generate random omega and kappa for group distribution: 104 | genOmega = mcmcMat[simIdx,"omega"] 105 | genKappa = mcmcMat[simIdx,"kappa"] 106 | # Generate random theta's for individuals: 107 | genTheta = rbeta( Nsubj , genOmega*(genKappa-2)+1 , (1-genOmega)*(genKappa-2)+1 ) 108 | # Generate random data based on parameter value: 109 | dataMat=matrix(0,ncol=2,nrow=0,dimnames=list(NULL,c("y","s"))) 110 | for ( sIdx in 1:Nsubj ) { 111 | z = rbinom( 1 , size=NtrlPerSubj , prob=genTheta[sIdx] ) 112 | yVec = c(rep(1,z),rep(0,NtrlPerSubj-z)) 113 | dataMat = rbind( dataMat , cbind( yVec , rep(sIdx,NtrlPerSubj) ) ) 114 | } 115 | # Do Bayesian analysis on simulated data: 116 | goalAchieved = goalAchievedForSample( data.frame(dataMat) ) 117 | # Tally the results: 118 | if (!exists("goalTally")) { # if goalTally does not exist, create it 119 | goalTally=matrix( nrow=0 , ncol=length(goalAchieved) ) 120 | } 121 | goalTally = rbind( goalTally , goalAchieved ) 122 | # save( goalTally , 123 | # file="Jags-Ydich-XnomSsubj-MbinomBetaOmegaKappa-Power-goalTally.Rdata" ) 124 | } 125 | 126 | # For each goal... 127 | for ( goalIdx in 1:NCOL(goalTally) ) { 128 | # Extract the goal name for subsequent display: 129 | goalName = colnames(goalTally)[goalIdx] 130 | # Compute number of successes: 131 | goalHits = sum(unlist(goalTally[,goalIdx])) 132 | # Compute number of attempts: 133 | goalAttempts = NROW(goalTally) 134 | # Compute proportion of successes: 135 | goalEst = goalHits/goalAttempts 136 | # Compute HDI around proportion: 137 | goalEstHDI = HDIofICDF( qbeta , 138 | shape1=1+goalHits , 139 | shape2=1+goalAttempts-goalHits ) 140 | # Display the result: 141 | show( paste0( goalName, 142 | ": Est.Power=" , round(goalEst,3) , 143 | "; Low Bound=" , round(goalEstHDI[1],3) , 144 | "; High Bound=" , round(goalEstHDI[2],3) ) ) 145 | } 146 | -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Diaglp__.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Diaglp__.png -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Diagtheta.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Diagtheta.png -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Stan-Ydich-Xnom1subj-MbernBeta.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data 8 | myData = read.csv("z15N50.csv") 9 | #------------------------------------------------------------------------------- 10 | # Load the functions genMCMC, smryMCMC, and plotMCMC: 11 | source("Stan-Ydich-Xnom1subj-MbernBeta.R") 12 | #------------------------------------------------------------------------------- 13 | # Optional: Specify filename root and graphical format for saving output. 14 | # Otherwise specify as NULL or leave saveName and saveType arguments 15 | # out of function calls. 16 | fileNameRoot = "Stan-Ydich-Xnom1subj-MbernBeta-" 17 | graphFileType = "eps" 18 | #------------------------------------------------------------------------------- 19 | # Generate the MCMC chain: 20 | mcmcCoda = genMCMC( data=myData , numSavedSteps=10000 , saveName=fileNameRoot ) 21 | #------------------------------------------------------------------------------- 22 | # Display diagnostics of chain, for specified parameters: 23 | parameterNames = varnames(mcmcCoda) # get all parameter names 24 | for ( parName in parameterNames ) { 25 | diagMCMC( mcmcCoda , parName=parName , 26 | saveName=fileNameRoot , saveType=graphFileType ) 27 | } 28 | #------------------------------------------------------------------------------- 29 | # Get summary statistics of chain: 30 | summaryInfo = smryMCMC( mcmcCoda , compVal=0.5 , rope=c(0.45,0.55) , 31 | saveName=fileNameRoot ) 32 | # Display posterior information: 33 | plotMCMC( mcmcCoda , data=myData , # compVal=0.5 , rope=c(0.45,0.55) , 34 | saveName=fileNameRoot , saveType=graphFileType ) 35 | #------------------------------------------------------------------------------- 36 | # Use Stan display functions instead of DBDA2E functions: 37 | # Load the stanFit object that was saved by genMCMC: 38 | load("Stan-Ydich-Xnom1subj-MbernBeta-StanFit.Rdata") 39 | # Display information: 40 | show(stanFit) 41 | openGraph() 42 | traceplot(stanFit,pars=c("theta")) 43 | openGraph() 44 | plot(stanFit,pars=c("theta")) 45 | -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Mcmc.Rdata -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Post.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%DocumentNeededResources: font Helvetica 3 | %%+ font Helvetica-Bold 4 | %%+ font Helvetica-Oblique 5 | %%+ font Helvetica-BoldOblique 6 | %%+ font Symbol 7 | %%Title: R Graphics Output 8 | %%Creator: R Software 9 | %%Pages: (atend) 10 | %%BoundingBox: 0 0 288 216 11 | %%EndComments 12 | %%BeginProlog 13 | /bp { gs sRGB gs } def 14 | % begin .ps.prolog 15 | /gs { gsave } bind def 16 | /gr { grestore } bind def 17 | /ep { showpage gr gr } bind def 18 | /m { moveto } bind def 19 | /l { rlineto } bind def 20 | /np { newpath } bind def 21 | /cp { closepath } bind def 22 | /f { fill } bind def 23 | /o { stroke } bind def 24 | /c { newpath 0 360 arc } bind def 25 | /r { 4 2 roll moveto 1 copy 3 -1 roll exch 0 exch rlineto 0 rlineto -1 mul 0 exch rlineto closepath } bind def 26 | /p1 { stroke } bind def 27 | /p2 { gsave bg fill grestore newpath } bind def 28 | /p3 { gsave bg fill grestore stroke } bind def 29 | /p6 { gsave bg eofill grestore newpath } bind def 30 | /p7 { gsave bg eofill grestore stroke } bind def 31 | /t { 5 -2 roll moveto gsave rotate 32 | 1 index stringwidth pop 33 | mul neg 0 rmoveto show grestore } bind def 34 | /ta { 4 -2 roll moveto gsave rotate show } bind def 35 | /tb { 2 -1 roll 0 rmoveto show } bind def 36 | /cl { grestore gsave newpath 3 index 3 index moveto 1 index 37 | 4 -1 roll lineto exch 1 index lineto lineto 38 | closepath clip newpath } bind def 39 | /rgb { setrgbcolor } bind def 40 | /s { scalefont setfont } bind def 41 | % end .ps.prolog 42 | /sRGB { [ /CIEBasedABC 43 | << /DecodeLMN 44 | [ { dup 0.03928 le 45 | {12.92321 div} 46 | {0.055 add 1.055 div 2.4 exp } 47 | ifelse 48 | } bind dup dup 49 | ] 50 | /MatrixLMN [0.412457 0.212673 0.019334 51 | 0.357576 0.715152 0.119192 52 | 0.180437 0.072175 0.950301] 53 | /WhitePoint [0.9505 1.0 1.0890] 54 | >> 55 | ] setcolorspace } bind def 56 | /srgb { setcolor } bind def 57 | %%IncludeResource: font Helvetica 58 | /Helvetica findfont 59 | dup length dict begin 60 | {1 index /FID ne {def} {pop pop} ifelse} forall 61 | /Encoding ISOLatin1Encoding def 62 | currentdict 63 | end 64 | /Font1 exch definefont pop 65 | %%IncludeResource: font Helvetica-Bold 66 | /Helvetica-Bold findfont 67 | dup length dict begin 68 | {1 index /FID ne {def} {pop pop} ifelse} forall 69 | /Encoding ISOLatin1Encoding def 70 | currentdict 71 | end 72 | /Font2 exch definefont pop 73 | %%IncludeResource: font Helvetica-Oblique 74 | /Helvetica-Oblique findfont 75 | dup length dict begin 76 | {1 index /FID ne {def} {pop pop} ifelse} forall 77 | /Encoding ISOLatin1Encoding def 78 | currentdict 79 | end 80 | /Font3 exch definefont pop 81 | %%IncludeResource: font Helvetica-BoldOblique 82 | /Helvetica-BoldOblique findfont 83 | dup length dict begin 84 | {1 index /FID ne {def} {pop pop} ifelse} forall 85 | /Encoding ISOLatin1Encoding def 86 | currentdict 87 | end 88 | /Font4 exch definefont pop 89 | %%IncludeResource: font Symbol 90 | /Symbol findfont 91 | dup length dict begin 92 | {1 index /FID ne {def} {pop pop} ifelse} forall 93 | currentdict 94 | end 95 | /Font5 exch definefont pop 96 | %%EndProlog 97 | %%Page: 1 1 98 | bp 99 | 0.00 0.00 287.53 215.69 cl 100 | /Font2 findfont 24 s 101 | 0 0 0 srgb 102 | 143.77 189.07 (theta) .5 0 t 103 | /Font5 findfont 28 s 104 | 136.47 6.96 (q) 0 0 t 105 | 0.75 setlinewidth 106 | [] 0 setdash 107 | 1 setlinecap 108 | 1 setlinejoin 109 | 10.00 setmiterlimit 110 | np 111 | 10.76 50.40 m 112 | 231.77 0 l 113 | o 114 | np 115 | 10.76 50.40 m 116 | 0 -7.20 l 117 | o 118 | np 119 | 68.70 50.40 m 120 | 0 -7.20 l 121 | o 122 | np 123 | 126.65 50.40 m 124 | 0 -7.20 l 125 | o 126 | np 127 | 184.59 50.40 m 128 | 0 -7.20 l 129 | o 130 | np 131 | 242.53 50.40 m 132 | 0 -7.20 l 133 | o 134 | /Font1 findfont 16 s 135 | 10.76 28.80 (0.1) .5 0 t 136 | 68.70 28.80 (0.2) .5 0 t 137 | 126.65 28.80 (0.3) .5 0 t 138 | 184.59 28.80 (0.4) .5 0 t 139 | 242.53 28.80 (0.5) .5 0 t 140 | /bg { 0.5294 0.8078 0.9216 srgb } def 141 | 1 1 1 srgb 142 | 17.32 55.19 7.88 0.39 r p3 143 | 25.19 55.19 7.88 0.77 r p3 144 | 33.07 55.19 7.88 2.06 r p3 145 | 40.95 55.19 7.88 5.01 r p3 146 | 48.83 55.19 7.88 8.35 r p3 147 | 56.70 55.19 7.88 17.21 r p3 148 | 64.58 55.19 7.88 26.85 r p3 149 | 72.46 55.19 7.88 41.87 r p3 150 | 80.33 55.19 7.88 56.39 r p3 151 | 88.21 55.19 7.88 66.79 r p3 152 | 96.09 55.19 7.88 84.52 r p3 153 | 103.97 55.19 7.88 90.43 r p3 154 | 111.84 55.19 7.88 103.78 r p3 155 | 119.72 55.19 7.88 119.71 r p3 156 | 127.60 55.19 7.88 107.64 r p3 157 | 135.48 55.19 7.88 106.87 r p3 158 | 143.35 55.19 7.88 92.10 r p3 159 | 151.23 55.19 7.88 82.21 r p3 160 | 159.11 55.19 7.88 67.05 r p3 161 | 166.99 55.19 7.88 58.96 r p3 162 | 174.86 55.19 7.88 40.72 r p3 163 | 182.74 55.19 7.88 32.11 r p3 164 | 190.62 55.19 7.88 24.15 r p3 165 | 198.49 55.19 7.88 15.67 r p3 166 | 206.37 55.19 7.88 11.95 r p3 167 | 214.25 55.19 7.88 9.89 r p3 168 | 222.13 55.19 7.88 3.85 r p3 169 | 230.00 55.19 7.88 3.21 r p3 170 | 237.88 55.19 7.88 1.67 r p3 171 | 245.76 55.19 7.88 0.90 r p3 172 | 253.64 55.19 7.88 0.90 r p3 173 | 261.51 55.19 7.88 0.39 r p3 174 | 269.39 55.19 0.83 1.22 r p3 175 | /Font1 findfont 22 s 176 | 0 0 0 srgb 177 | 58.98 163.35 (m) 0 0 t 178 | 77.31 163.35 (o) 0 0 t 179 | 89.54 163.35 (d) 0 0 t 180 | 101.77 163.35 (e) 0 0 t 181 | /Font5 findfont 22 s 182 | 119.09 163.35 (=) 0 0 t 183 | /Font1 findfont 22 s 184 | 136.26 163.35 (0.297) 0 0 t 185 | 3.00 setlinewidth 186 | [] 0 setdash 187 | np 188 | 61.05 55.19 m 189 | 141.79 0 l 190 | o 191 | 87.93 83.17 (95) 0 0 t 192 | 112.40 83.17 (% HDI) 0 0 t 193 | 61.05 63.09 (0.187) 0.70 0 t 194 | 202.84 63.09 (0.431) 0.30 0 t 195 | 7.20 50.40 280.33 179.69 cl 196 | /Font1 findfont 48 s 197 | 1 0 0 srgb 198 | 126.65 43.07 (+) .5 0 t 199 | /Font1 findfont 16 s 200 | 0 0 0 srgb 201 | 228.24 182.56 (z) 0 0 t 202 | /Font5 findfont 16 s 203 | 239.94 182.56 (=) 0 0 t 204 | /Font1 findfont 16 s 205 | 252.42 182.56 (15) 0 0 t 206 | 224.68 167.52 (N) 0 0 t 207 | /Font5 findfont 16 s 208 | 239.94 167.52 (=) 0 0 t 209 | /Font1 findfont 16 s 210 | 252.42 167.52 (50) 0 0 t 211 | ep 212 | %%Trailer 213 | %%Pages: 1 214 | %%EOF 215 | -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-Post.png -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-StanDso.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-StanDso.Rdata -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-StanFit.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-StanFit.Rdata -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta-SummaryInfo.csv: -------------------------------------------------------------------------------- 1 | "","Mean","Median","Mode","ESS","HDImass","HDIlow","HDIhigh","CompVal","PcntGtCompVal","ROPElow","ROPEhigh","PcntLtROPE","PcntInROPE","PcntGtROPE" 2 | "theta",0.307892739619136,0.305393887557315,0.310976478325437,8995.5,0.95,0.183067423123637,0.427742883311279,0.5,0.19,0.45,0.55,98.35,1.64,0.01 3 | -------------------------------------------------------------------------------- /part3/ch14/data/Stan-Ydich-Xnom1subj-MbernBeta.R: -------------------------------------------------------------------------------- 1 | # Stan-Ydich-Xnom1subj-MbernBeta.R 2 | # Accompanies the book: 3 | # Kruschke, J. K. (2014). Doing Bayesian Data Analysis: 4 | # A Tutorial with R, JAGS, and Stan. 2nd Edition. Academic Press / Elsevier. 5 | source("DBDA2E-utilities.R") 6 | #=============================================================================== 7 | 8 | genMCMC = function( data , numSavedSteps=50000 , saveName=NULL ) { 9 | require(rstan) 10 | #----------------------------------------------------------------------------- 11 | # THE DATA. 12 | if ( class(data)=="data.frame" ) { # If data is a data.frame 13 | y = myData$y # then pull out the column named y 14 | } else { # else 15 | y = data # rename the data as y. 16 | } 17 | # Do some checking that data make sense: 18 | if ( any( y!=0 & y!=1 ) ) { stop("All y values must be 0 or 1.") } 19 | Ntotal = length(y) 20 | # Specify the data in a list, for later shipment to JAGS: 21 | dataList = list( 22 | y = y , 23 | Ntotal = Ntotal 24 | ) 25 | #----------------------------------------------------------------------------- 26 | # THE MODEL. 27 | modelString = " 28 | data { 29 | int Ntotal ; 30 | int y[Ntotal] ; 31 | } 32 | parameters { 33 | real theta ; 34 | } 35 | model { 36 | theta ~ beta(1,1) ; 37 | y ~ bernoulli(theta) ; // implicitly vectorized 38 | } 39 | " # close quote for modelString 40 | #----------------------------------------------------------------------------- 41 | # INTIALIZE THE CHAINS. 42 | # Initial values of MCMC chains based on data: 43 | # Option 1: Use single initial value for all chains: 44 | # thetaInit = sum(y)/length(y) 45 | # initsList = list( theta=thetaInit ) 46 | # Option 2: Use function that generates random values for each chain: 47 | initsList = function() { 48 | resampledY = sample( y , replace=TRUE ) 49 | thetaInit = sum(resampledY)/length(resampledY) 50 | thetaInit = 0.001+0.998*thetaInit # keep away from 0,1 51 | return( list( theta=thetaInit ) ) 52 | } 53 | #----------------------------------------------------------------------------- 54 | # RUN THE CHAINS 55 | parameters = c( "theta") # The parameters to be monitored 56 | burnInSteps = 500 # Stan defaults to iter/2 for overdispersed inits 57 | nChains = 4 # nChains should be 2 or more for diagnostics 58 | thinSteps = 4 # In Stan there is autocorrelation, so thin 59 | 60 | # stanCpp <- stanc( model_code = modelString ) # Translate to C++ 61 | # stanDso <- stan_model( stanc_ret = stanCpp ) # Compile Stan DSO 62 | 63 | # Translate to C++ and compile to DSO: 64 | stanDso <- stan_model( model_code=modelString ) 65 | # Get MC sample of posterior: 66 | stanFit <- sampling( object=stanDso , 67 | data = dataList , 68 | pars = parameters , # optional 69 | chains = nChains , 70 | iter = ( ceiling(numSavedSteps/nChains)*thinSteps 71 | +burnInSteps ) , 72 | warmup = burnInSteps , 73 | thin = thinSteps , 74 | init = initsList ) # optional 75 | # Or, accomplish above in one "stan" command; note stanDso is not separate. 76 | 77 | # For consistency with JAGS-oriented functions in DBDA2E collection, 78 | # convert stan format to coda format: 79 | codaSamples = mcmc.list( lapply( 1:ncol(stanFit) , 80 | function(x) { mcmc(as.array(stanFit)[,x,]) } ) ) 81 | # resulting codaSamples object has these indices: 82 | # codaSamples[[ chainIdx ]][ stepIdx , paramIdx ] 83 | if ( !is.null(saveName) ) { 84 | save( codaSamples , file=paste(saveName,"Mcmc.Rdata",sep="") ) 85 | save( stanFit , file=paste(saveName,"StanFit.Rdata",sep="") ) 86 | save( stanDso , file=paste(saveName,"StanDso.Rdata",sep="") ) 87 | } 88 | return( codaSamples ) 89 | } # end function 90 | 91 | #=============================================================================== 92 | 93 | smryMCMC = function( codaSamples , compVal=NULL , rope=NULL , saveName=NULL ) { 94 | summaryInfo = NULL 95 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 96 | summaryInfo = rbind( summaryInfo , 97 | "theta" = summarizePost( mcmcMat[,"theta"] , 98 | compVal=compVal , ROPE=rope ) ) 99 | if ( !is.null(saveName) ) { 100 | write.csv( summaryInfo , file=paste(saveName,"SummaryInfo.csv",sep="") ) 101 | } 102 | show( summaryInfo ) 103 | return( summaryInfo ) 104 | } 105 | 106 | #=============================================================================== 107 | 108 | plotMCMC = function( codaSamples , data , compVal=NULL , rope=NULL , 109 | saveName=NULL , showCurve=FALSE , saveType="jpg" ) { 110 | # showCurve is TRUE or FALSE and indicates whether the posterior should 111 | # be displayed as a histogram (by default) or by an approximate curve. 112 | #----------------------------------------------------------------------------- 113 | mcmcMat = as.matrix(codaSamples,chains=TRUE) 114 | chainLength = NROW( mcmcMat ) 115 | theta = mcmcMat[,"theta"] 116 | #----------------------------------------------------------------------------- 117 | # Set up window and layout: 118 | openGraph(width=4.0,height=3.0) 119 | par( mar=c(3.5,0.5,2.5,0.5) , mgp=c(2.25,0.7,0) ) 120 | #----------------------------------------------------------------------------- 121 | postInfo = plotPost( theta , cex.lab = 1.75 , 122 | showCurve=showCurve , 123 | compVal=compVal , ROPE=rope , cex.main=1.5 , 124 | xlab=bquote(theta) , main=paste("theta") , 125 | col="skyblue" ) 126 | z = sum(data$y) 127 | N = length(data$y) 128 | points( z/N , 0 , pch="+" , col="red" , cex=3 ) 129 | text( max(theta) , 0 , bquote( z==.(z) ) , adj=c(1,-11) ) 130 | text( max(theta) , 0 , bquote( N==.(N) ) , adj=c(1,-9.5) ) 131 | 132 | #----------------------------------------------------------------------------- 133 | if ( !is.null(saveName) ) { 134 | saveGraph( file=paste(saveName,"Post",sep=""), type=saveType) 135 | } 136 | } 137 | 138 | #=============================================================================== 139 | -------------------------------------------------------------------------------- /part3/ch14/data/z15N50.csv: -------------------------------------------------------------------------------- 1 | "y" 2 | 0 3 | 1 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 1 13 | 0 14 | 0 15 | 0 16 | 1 17 | 1 18 | 1 19 | 0 20 | 0 21 | 1 22 | 0 23 | 0 24 | 0 25 | 0 26 | 1 27 | 1 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 1 34 | 0 35 | 1 36 | 0 37 | 1 38 | 0 39 | 0 40 | 0 41 | 1 42 | 0 43 | 0 44 | 1 45 | 1 46 | 0 47 | 1 48 | 0 49 | 0 50 | 0 51 | 0 52 | -------------------------------------------------------------------------------- /part3/ch14/figures/eq14.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/figures/eq14.1.png -------------------------------------------------------------------------------- /part3/ch14/figures/fig14.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/figures/fig14.1.png -------------------------------------------------------------------------------- /part3/ch14/figures/fig14.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/figures/fig14.2.png -------------------------------------------------------------------------------- /part3/ch14/figures/fig14.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/figures/fig14.3.png -------------------------------------------------------------------------------- /part3/ch14/figures/fig8.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/figures/fig8.2.png -------------------------------------------------------------------------------- /part3/ch14/figures/fig8.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch14/figures/fig8.3.png -------------------------------------------------------------------------------- /part3/ch16/data/Jags-Ymet-Xnom1grp-Mnormal-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Jags-Ymet-Xnom1grp-Mnormal.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data file 8 | myDataFrame = read.csv( file="TwoGroupIQ.csv" ) 9 | # For purposes of this one-group example, use data from Smart Drug group: 10 | myData = myDataFrame$Score[myDataFrame$Group=="Smart Drug"] 11 | #------------------------------------------------------------------------------- 12 | # Optional: Specify filename root and graphical format for saving output. 13 | # Otherwise specify as NULL or leave saveName and saveType arguments 14 | # out of function calls. 15 | fileNameRoot = "OneGroupIQnormal-" 16 | graphFileType = "eps" 17 | #------------------------------------------------------------------------------- 18 | # Load the relevant model into R's working memory: 19 | source("Jags-Ymet-Xnom1grp-Mnormal.R") 20 | #------------------------------------------------------------------------------- 21 | # Generate the MCMC chain: 22 | mcmcCoda = genMCMC( data=myData , numSavedSteps=20000 , saveName=fileNameRoot ) 23 | #------------------------------------------------------------------------------- 24 | # Display diagnostics of chain, for specified parameters: 25 | parameterNames = varnames(mcmcCoda) # get all parameter names 26 | for ( parName in parameterNames ) { 27 | diagMCMC( codaObject=mcmcCoda , parName=parName , 28 | saveName=fileNameRoot , saveType=graphFileType ) 29 | } 30 | #------------------------------------------------------------------------------- 31 | # Get summary statistics of chain: 32 | summaryInfo = smryMCMC( mcmcCoda , 33 | compValMu=100.0 , ropeMu=c(99.0,101.0) , 34 | compValSigma=15.0 , ropeSigma=c(14,16) , 35 | compValEff=0.0 , ropeEff=c(-0.1,0.1) , 36 | saveName=fileNameRoot ) 37 | show(summaryInfo) 38 | # Display posterior information: 39 | plotMCMC( mcmcCoda , data=myData , 40 | compValMu=100.0 , ropeMu=c(99.0,101.0) , 41 | compValSigma=15.0 , ropeSigma=c(14,16) , 42 | compValEff=0.0 , ropeEff=c(-0.1,0.1) , 43 | pairsPlot=TRUE , showCurve=FALSE , 44 | saveName=fileNameRoot , saveType=graphFileType ) 45 | #------------------------------------------------------------------------------- 46 | -------------------------------------------------------------------------------- /part3/ch16/data/Jags-Ymet-Xnom1grp-Mrobust-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Jags-Ymet-Xnom1grp-Mrobust.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data file 8 | myDataFrame = read.csv( file="TwoGroupIQ.csv" ) 9 | # For purposes of this one-group example, use data from Smart Drug group: 10 | myData = myDataFrame$Score[myDataFrame$Group=="Smart Drug"] 11 | #------------------------------------------------------------------------------- 12 | # Optional: Specify filename root and graphical format for saving output. 13 | # Otherwise specify as NULL or leave saveName and saveType arguments 14 | # out of function calls. 15 | fileNameRoot = "OneGroupIQrobust-Jags-" 16 | graphFileType = "eps" 17 | #------------------------------------------------------------------------------- 18 | # Load the relevant model into R's working memory: 19 | source("Jags-Ymet-Xnom1grp-Mrobust.R") 20 | #------------------------------------------------------------------------------- 21 | # Generate the MCMC chain: 22 | mcmcCoda = genMCMC( data=myData , numSavedSteps=20000 , saveName=fileNameRoot ) 23 | #------------------------------------------------------------------------------- 24 | # Display diagnostics of chain, for specified parameters: 25 | parameterNames = varnames(mcmcCoda) # get all parameter names 26 | for ( parName in parameterNames ) { 27 | diagMCMC( codaObject=mcmcCoda , parName=parName , 28 | saveName=fileNameRoot , saveType=graphFileType ) 29 | } 30 | #------------------------------------------------------------------------------- 31 | # Get summary statistics of chain: 32 | summaryInfo = smryMCMC( mcmcCoda , 33 | compValMu=100.0 , ropeMu=c(99.0,101.0) , 34 | compValSigma=15.0 , ropeSigma=c(14,16) , 35 | compValEff=0.0 , ropeEff=c(-0.1,0.1) , 36 | saveName=fileNameRoot ) 37 | show(summaryInfo) 38 | # Display posterior information: 39 | plotMCMC( mcmcCoda , data=myData , 40 | compValMu=100.0 , ropeMu=c(99.0,101.0) , 41 | compValSigma=15.0 , ropeSigma=c(14,16) , 42 | compValEff=0.0 , ropeEff=c(-0.1,0.1) , 43 | pairsPlot=TRUE , showCurve=FALSE , 44 | saveName=fileNameRoot , saveType=graphFileType ) 45 | #------------------------------------------------------------------------------- 46 | -------------------------------------------------------------------------------- /part3/ch16/data/OneGroupIQnormal-Diagmu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/data/OneGroupIQnormal-Diagmu.png -------------------------------------------------------------------------------- /part3/ch16/data/OneGroupIQnormal-Diagsigma.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/data/OneGroupIQnormal-Diagsigma.png -------------------------------------------------------------------------------- /part3/ch16/data/OneGroupIQnormal-Mcmc.Rdata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/data/OneGroupIQnormal-Mcmc.Rdata -------------------------------------------------------------------------------- /part3/ch16/data/OneGroupIQnormal-Post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/data/OneGroupIQnormal-Post.png -------------------------------------------------------------------------------- /part3/ch16/data/OneGroupIQnormal-PostPairs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/data/OneGroupIQnormal-PostPairs.png -------------------------------------------------------------------------------- /part3/ch16/data/OneGroupIQnormal-SummaryInfo.csv: -------------------------------------------------------------------------------- 1 | "","Mean","Median","Mode","ESS","HDImass","HDIlow","HDIhigh","CompVal","PcntGtCompVal","ROPElow","ROPEhigh","PcntLtROPE","PcntInROPE","PcntGtROPE" 2 | "mu",107.82782707244,107.811466729288,107.089433112928,20097.3,0.95,101.437884968467,114.433468341364,100,98.95,99,101,0.41,1.625,97.965 3 | "sigma",25.9588548582424,25.7791320399593,25.0124458176058,11475.4,0.95,21.4644910108347,30.6368276827641,15,100,14,16,0,0,100 4 | "effSz",0.303906164483642,0.303473733261899,0.280792225196521,20000,0.95,0.0481049154537304,0.554016344009955,0,98.95,-0.1,0.1,0.07,5.65000000000001,94.28 5 | -------------------------------------------------------------------------------- /part3/ch16/data/Stan-Ymet-Xnom1grp-Mrobust-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Stan-Ymet-Xnom1grp-Mrobust.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data file 8 | myDataFrame = read.csv( file="TwoGroupIQ.csv" ) 9 | # For purposes of this one-group example, use data from Smart Drug group: 10 | myData = myDataFrame$Score[myDataFrame$Group=="Smart Drug"] 11 | #------------------------------------------------------------------------------- 12 | # Load the relevant model into R's working memory: 13 | source("Stan-Ymet-Xnom1grp-Mrobust.R") 14 | #------------------------------------------------------------------------------- 15 | # Optional: Specify filename root and graphical format for saving output. 16 | # Otherwise specify as NULL or leave saveName and saveType arguments 17 | # out of function calls. 18 | fileNameRoot = "OneGroupIQrobust-Stan-" 19 | graphFileType = "eps" 20 | #------------------------------------------------------------------------------- 21 | # Generate the MCMC chain: 22 | mcmcCoda = genMCMC( data=myData , numSavedSteps=20000 , saveName=fileNameRoot ) 23 | #------------------------------------------------------------------------------- 24 | # Display diagnostics of chain, for specified parameters: 25 | parameterNames = varnames(mcmcCoda) # get all parameter names 26 | for ( parName in parameterNames ) { 27 | diagMCMC( codaObject=mcmcCoda , parName=parName , 28 | saveName=fileNameRoot , saveType=graphFileType ) 29 | } 30 | #------------------------------------------------------------------------------- 31 | # Get summary statistics of chain: 32 | summaryInfo = smryMCMC( mcmcCoda , 33 | compValMu=100.0 , ropeMu=c(99.0,101.0) , 34 | compValSigma=15.0 , ropeSigma=c(14,16) , 35 | compValEff=0.0 , ropeEff=c(-0.1,0.1) , 36 | saveName=fileNameRoot ) 37 | show(summaryInfo) 38 | # Display posterior information: 39 | plotMCMC( mcmcCoda , data=myData , 40 | compValMu=100.0 , ropeMu=c(99.0,101.0) , 41 | compValSigma=15.0 , ropeSigma=c(14,16) , 42 | compValEff=0.0 , ropeEff=c(-0.1,0.1) , 43 | pairsPlot=TRUE , showCurve=FALSE , 44 | saveName=fileNameRoot , saveType=graphFileType ) 45 | #------------------------------------------------------------------------------- 46 | -------------------------------------------------------------------------------- /part3/ch16/data/Stan-Ymet-Xnom2grp-MrobustHet-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Stan-Ymet-Xnom2grp-MrobustHet.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load The data file 8 | myDataFrame = read.csv( file="TwoGroupIQ.csv" ) 9 | #------------------------------------------------------------------------------- 10 | # Load the relevant model into R's working memory: 11 | source("Stan-Ymet-Xnom2grp-MrobustHet.R") 12 | #------------------------------------------------------------------------------- 13 | # Optional: Specify filename root and graphical format for saving output. 14 | # Otherwise specify as NULL or leave saveName and saveType arguments 15 | # out of function calls. 16 | fileNameRoot = "TwoGroupIQrobustHet-Stan-" 17 | graphFileType = "eps" 18 | #------------------------------------------------------------------------------- 19 | # Generate the MCMC chain: 20 | mcmcCoda = genMCMC( datFrm=myDataFrame , yName="Score" , xName="Group" , 21 | numSavedSteps=20000 , saveName=fileNameRoot ) 22 | #------------------------------------------------------------------------------- 23 | # Display diagnostics of chain, for specified parameters: 24 | parameterNames = varnames(mcmcCoda) # get all parameter names 25 | for ( parName in parameterNames ) { 26 | diagMCMC( codaObject=mcmcCoda , parName=parName , 27 | saveName=fileNameRoot , saveType=graphFileType ) 28 | } 29 | #------------------------------------------------------------------------------- 30 | # Get summary statistics of chain: 31 | summaryInfo = smryMCMC( mcmcCoda , RopeMuDiff=c(-0.5,0.5) , 32 | RopeSdDiff=c(-0.5,0.5) , RopeEff=c(-0.1,0.1) , 33 | saveName=fileNameRoot ) 34 | show(summaryInfo) 35 | # Display posterior information: 36 | plotMCMC( mcmcCoda , datFrm=myDataFrame , yName="Score" , xName="Group" , 37 | RopeMuDiff=c(-0.5,0.5) , RopeSdDiff=c(-0.5,0.5), RopeEff=c(-0.1,0.1) , 38 | pairsPlot=TRUE , saveName=fileNameRoot , saveType=graphFileType ) 39 | #------------------------------------------------------------------------------- 40 | -------------------------------------------------------------------------------- /part3/ch16/data/TEMPmodel.txt: -------------------------------------------------------------------------------- 1 | 2 | model { 3 | for ( i in 1:Ntotal ) { 4 | y[i] ~ dnorm( mu , 1/sigma^2 ) 5 | } 6 | mu ~ dnorm( meanY , 1/(100*sdY)^2 ) 7 | sigma ~ dunif( sdY/1000 , sdY*1000 ) 8 | } 9 | 10 | -------------------------------------------------------------------------------- /part3/ch16/data/TwoGroupIQ.csv: -------------------------------------------------------------------------------- 1 | "Score","Group" 2 | 102,"Smart Drug" 3 | 107,"Smart Drug" 4 | 92,"Smart Drug" 5 | 101,"Smart Drug" 6 | 110,"Smart Drug" 7 | 68,"Smart Drug" 8 | 119,"Smart Drug" 9 | 106,"Smart Drug" 10 | 99,"Smart Drug" 11 | 103,"Smart Drug" 12 | 90,"Smart Drug" 13 | 93,"Smart Drug" 14 | 79,"Smart Drug" 15 | 89,"Smart Drug" 16 | 137,"Smart Drug" 17 | 119,"Smart Drug" 18 | 126,"Smart Drug" 19 | 110,"Smart Drug" 20 | 71,"Smart Drug" 21 | 114,"Smart Drug" 22 | 100,"Smart Drug" 23 | 95,"Smart Drug" 24 | 91,"Smart Drug" 25 | 99,"Smart Drug" 26 | 97,"Smart Drug" 27 | 106,"Smart Drug" 28 | 106,"Smart Drug" 29 | 129,"Smart Drug" 30 | 115,"Smart Drug" 31 | 124,"Smart Drug" 32 | 137,"Smart Drug" 33 | 73,"Smart Drug" 34 | 69,"Smart Drug" 35 | 95,"Smart Drug" 36 | 102,"Smart Drug" 37 | 116,"Smart Drug" 38 | 111,"Smart Drug" 39 | 134,"Smart Drug" 40 | 102,"Smart Drug" 41 | 110,"Smart Drug" 42 | 139,"Smart Drug" 43 | 112,"Smart Drug" 44 | 122,"Smart Drug" 45 | 84,"Smart Drug" 46 | 129,"Smart Drug" 47 | 112,"Smart Drug" 48 | 127,"Smart Drug" 49 | 106,"Smart Drug" 50 | 113,"Smart Drug" 51 | 109,"Smart Drug" 52 | 208,"Smart Drug" 53 | 114,"Smart Drug" 54 | 107,"Smart Drug" 55 | 50,"Smart Drug" 56 | 169,"Smart Drug" 57 | 133,"Smart Drug" 58 | 50,"Smart Drug" 59 | 97,"Smart Drug" 60 | 139,"Smart Drug" 61 | 72,"Smart Drug" 62 | 100,"Smart Drug" 63 | 144,"Smart Drug" 64 | 112,"Smart Drug" 65 | 109,"Placebo" 66 | 98,"Placebo" 67 | 106,"Placebo" 68 | 101,"Placebo" 69 | 100,"Placebo" 70 | 111,"Placebo" 71 | 117,"Placebo" 72 | 104,"Placebo" 73 | 106,"Placebo" 74 | 89,"Placebo" 75 | 84,"Placebo" 76 | 88,"Placebo" 77 | 94,"Placebo" 78 | 78,"Placebo" 79 | 108,"Placebo" 80 | 102,"Placebo" 81 | 95,"Placebo" 82 | 99,"Placebo" 83 | 90,"Placebo" 84 | 116,"Placebo" 85 | 97,"Placebo" 86 | 107,"Placebo" 87 | 102,"Placebo" 88 | 91,"Placebo" 89 | 94,"Placebo" 90 | 95,"Placebo" 91 | 86,"Placebo" 92 | 108,"Placebo" 93 | 115,"Placebo" 94 | 108,"Placebo" 95 | 88,"Placebo" 96 | 102,"Placebo" 97 | 102,"Placebo" 98 | 120,"Placebo" 99 | 112,"Placebo" 100 | 100,"Placebo" 101 | 105,"Placebo" 102 | 105,"Placebo" 103 | 88,"Placebo" 104 | 82,"Placebo" 105 | 111,"Placebo" 106 | 96,"Placebo" 107 | 92,"Placebo" 108 | 109,"Placebo" 109 | 91,"Placebo" 110 | 92,"Placebo" 111 | 123,"Placebo" 112 | 61,"Placebo" 113 | 59,"Placebo" 114 | 105,"Placebo" 115 | 184,"Placebo" 116 | 82,"Placebo" 117 | 138,"Placebo" 118 | 99,"Placebo" 119 | 93,"Placebo" 120 | 93,"Placebo" 121 | 72,"Placebo" 122 | -------------------------------------------------------------------------------- /part3/ch16/figures/cap16.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/cap16.1.png -------------------------------------------------------------------------------- /part3/ch16/figures/cap16.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/cap16.2.png -------------------------------------------------------------------------------- /part3/ch16/figures/cap16.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/cap16.3.png -------------------------------------------------------------------------------- /part3/ch16/figures/cap16.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/cap16.4.png -------------------------------------------------------------------------------- /part3/ch16/figures/cap16.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/cap16.5.png -------------------------------------------------------------------------------- /part3/ch16/figures/cap16.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/cap16.6.png -------------------------------------------------------------------------------- /part3/ch16/figures/eq16.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/eq16.1.png -------------------------------------------------------------------------------- /part3/ch16/figures/eq16.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/eq16.2.png -------------------------------------------------------------------------------- /part3/ch16/figures/eq16.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/eq16.3.png -------------------------------------------------------------------------------- /part3/ch16/figures/eq16.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/eq16.4.png -------------------------------------------------------------------------------- /part3/ch16/figures/eq16.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/eq16.5.png -------------------------------------------------------------------------------- /part3/ch16/figures/eq16.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/eq16.6.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.1.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.10-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.10-1.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.10-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.10-2.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.11.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.12.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.13.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.2.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.3.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.4.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.5.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.6.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.7.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.8.png -------------------------------------------------------------------------------- /part3/ch16/figures/fig16.9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/fig16.9.png -------------------------------------------------------------------------------- /part3/ch16/figures/tbl15.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/tbl15.1.png -------------------------------------------------------------------------------- /part3/ch16/figures/tbl15.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/tbl15.2.png -------------------------------------------------------------------------------- /part3/ch16/figures/tbl15.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part3/ch16/figures/tbl15.3.png -------------------------------------------------------------------------------- /part3/sp_dist/dist_types.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 확률의 종류" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "###1. 확률변수와 확률분포\n", 15 | "\n", 16 | "####1) 확률변수\n", 17 | "\n", 18 | "- 표본공간 내 각 사건들의 실수값을 대응시키는 함수\n", 19 | "- Event(사건 등)의 수치적인 표현\n", 20 | "\n", 21 | "####2) 확률분포\n", 22 | "\n", 23 | "- 확률변수가 취할 수 있는 값과 그 값에 대한 가능성\n", 24 | "- 이산형분포(discrete probability distribution) & 연속형 분포(continuous probability distribution)\n", 25 | "- 확률질량함수(probability mass function, PMF) & 확률밀도함수(probability density function, PDF)\n", 26 | "\n", 27 | "###2. 확률질량함수와 확률밀도함수\n", 28 | "\n", 29 | "####1) 확률질량함수 특징\n", 30 | "\n", 31 | "- P(X - $x_{\\rm i}$) $\\dashrightarrow$ X라는 값이 $x_{\\rm i}$ 라는 특정값을 가질 확률\n", 32 | "- 0 $\\le$ P(x) $\\le$ 1, i = 1,2,3,... n\n", 33 | "- $\\sum_{i=1}^n$P($x_{\\rm i}$) = 1\n", 34 | "\n", 35 | "####2) 확률밀도함수 특징\n", 36 | "\n", 37 | "- $f(x)\\ge0$, $-\\infty\\lt x \\lt \\infty$\n", 38 | "- $\\int_{i=1}^nf(x_{\\rm i})dx = 1$\n", 39 | "- 임의 수 c, d에 대해 $P(c\\le x \\le d) = \\int_c^df(x)dx$\n", 40 | "\n", 41 | "####3) 확률질량함수와 확률밀도함수의 차이점\n", 42 | "\n", 43 | "- $f(x)$는 확률 X, 1초과가 가능하다.\n", 44 | "- 확률변수 X가 c와 d사이에서 취할수 있는 값들은 $f(x)$의 면적\n", 45 | "- $P(x = a) = 0 \\dashrightarrow P(a \\le x \\le b) = p(a \\lt x \\lt b) = p(a \\le x \\lt b)$ \n", 46 | "\n", 47 | "###3. 분포의 종류\n", 48 | "\n", 49 | "####1) 누적분포함수(Cumulative Distribution Function, CDF)\n", 50 | "\n", 51 | "- 어떤 확률 분포에 대해 확률 변수가 특정값보다 작거나 같은 확률 {$X \\le x$}\n", 52 | "- $F(x) = P(X \\le x)$ : 확률변수 X의 누적분포함수\n", 53 | "- 이산형과 연속형에 동일하게 적용가능\n", 54 | "- $F(x)$는 $x$에 대한 비감소 함수(감소하지 않음)\n", 55 | "- $\\lim_{x\\rightarrow\\infty}F(x) = F(\\infty) = 1$\n", 56 | "- $\\lim_{x\\rightarrow\\infty}F(x) = F(-\\infty) = 0$\n", 57 | "\n", 58 | "\n", 59 | "####2) 결합확률분포(Joint Probability Distribution)\n", 60 | "\n", 61 | "- 두개 이상의 변수가 확률적 관계를 가지고 관측된 경우의 대응확률\n", 62 | "- Marginal probability 주변확률, 결합확률분포에서 하나의 사건에 대한 확률\n", 63 | "- $P(X = x | Y = y) = \\frac{P(X=x, Y=y)}{P(Y=y)} = \\frac{P(x, y)}{P{\\rm y}(Y)}$\n", 64 | "- $P(X=x, Y=y) = P(X=x) * P(Y=y)$ \n", 65 | "\n", 66 | "####3) 푸아송 분포 \n", 67 | "\n", 68 | "- 이산형 분포\n", 69 | "- 독립적인 사건이 발생하는 상한이 정해지지 않은 분포\n", 70 | "- 예를들어 물고기에게 먹이를 주었을 경우, 물고기가 몇 마리 올까? $\\rightarrow$ 물고기 \"온다/안온다\" 자체는 독립적인 사건이지만 몇마리나 올지에 대한 상한선은 정해지지 않았음.\n", 71 | "- $\\lambda$(람다) : 구간 내 평균 발생 횟수\n", 72 | "- 푸아송 분포 확률 질량 함수 : $f(x) = \\sum_{r=0}^xP(x = r)$\n", 73 | "- 푸아송 분포 확률 질량 함수 평균 : $E(x) = \\sum_{x=0}^{\\infty}x\\frac{\\lambda*e^{-\\lambda}}{x!}$\n", 74 | "- 문제 $\\rightarrow$ 아마존 한달 평균 30명이 피라냐 습격으로 죽는다. $\\rightarrow$ 한 달 30명이 죽을 확률, 하루에 2건이상 사고발생\n", 75 | "\n", 76 | "####4) 정규분포 \n", 77 | "\n", 78 | "- 연속형 분포\n", 79 | "- 확률변수 X 의 확률 밀도 함수가 $f(x) = \\frac{1}{\\root\\of{2\\pi\\sigma}}*e\\frac{-(x-m)^2}{2\\sigma^2}$ (단, $-\\infty < x < \\infty$) 일때 X의 확률분포를 정규분포라하고 $N(m, \\sigma^2)$\n", 80 | "- 임의의 실수 x에 대해 $f(x) > 0$\n", 81 | "- 곡선은 직선 x - m에 관해 대칭\n", 82 | "- 곡선과 x축 사이의 넓이는 1이다.\n", 83 | "- x의 확률 분포 평균은 m이고, 분산은 $\\sigma^2$이다.\n", 84 | "- X가 [a, b]에 속할 확률은 $P(a \\le x \\le b)$는 구간 [a, b]에서 곡선과 x축 사이의 넓이와 같음.\n", 85 | "\n", 86 | "####5) 지수분포\n", 87 | "\n", 88 | "- 어떤 독립적 사건이 푸아송 분포에 의해 발생할 때, 지정된 시점으로부터 이 사건이 일어 날때 까지의 걸린시간을 측정\n", 89 | "- 지수함수 $f(x) = \\lambda * e^{-\\lambda x}$\n", 90 | "- 평균, 분산 (생략)\n", 91 | "- 문제 $\\rightarrow$ 물고기가 평균 10분에 5번 입질, 첫번째 입질로부터 다음입질이 올때까지 걸린 시간 (평균 1분에 0.5번 입질, $\\lambda$ = 0.5)\n", 92 | "- $f(t) = 0.5e^{-0.5t}$, t>0인 분포를 따름\n", 93 | "\n", 94 | "####6) 균등분포\n", 95 | "\n", 96 | "- 연속확률 분포 중, 특정구간내의 값들이 나타날 가능성이 균등한 분포\n", 97 | "\n", 98 | "####7) Z - 분포\n", 99 | "\n", 100 | "- 표준정규분포\n", 101 | "- 정규분포를 표준화 시킨 것\n", 102 | "- $Z = \\frac{X-\\mu}{\\sigma}$\n", 103 | "\n", 104 | "####8) $\\chi^2$분포\n", 105 | "\n", 106 | "- $Z^2 = \\frac{(X-\\mu)^2}{\\sigma^2}, \\nu = 1$\n", 107 | "- 자유도 : n - 1 $\\rightarrow$ 1은 추정된 모수\n", 108 | "- 표본 평균과 모평균이 같다고 가정할 수 있는 이유는 모평균의 불편측정치(unbiased estimated value) 때문\n", 109 | "- 모평균과 표본평균이 같다고 가정하기 때문에 n번째 표본은 나머지 표본이 정해지면 자동으로 정해짐, 따라서 자유도는 n-1\n", 110 | "\n", 111 | "####9) t - 분포\n", 112 | "\n", 113 | "- W.S.Gosset\n", 114 | "- 표준정규분포를 따르는 확률변수를 독립인 $\\chi^2/\\nu$의 제곱근으로 나눈 확률 변수\n", 115 | "- t ~$\\frac{N(0,1)}{\\root\\of{\\chi^2/\\nu}}$\n", 116 | "- 모집단 확률 변수 X가 정규분포를 따르고 그에 따라 표본집단이 정규분포를 따르는 경우 $\\frac{\\bar X - \\mu}{S/\\root\\of{n}}$이 식은 자유도 n-1인 t분포를 따른다." 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "metadata": { 123 | "collapsed": true 124 | }, 125 | "outputs": [], 126 | "source": [] 127 | } 128 | ], 129 | "metadata": { 130 | "kernelspec": { 131 | "display_name": "Python 2", 132 | "language": "python", 133 | "name": "python2" 134 | }, 135 | "language_info": { 136 | "codemirror_mode": { 137 | "name": "ipython", 138 | "version": 2 139 | }, 140 | "file_extension": ".py", 141 | "mimetype": "text/x-python", 142 | "name": "python", 143 | "nbconvert_exporter": "python", 144 | "pygments_lexer": "ipython2", 145 | "version": "2.7.6" 146 | } 147 | }, 148 | "nbformat": 4, 149 | "nbformat_minor": 0 150 | } 151 | -------------------------------------------------------------------------------- /part4/ch18/data/Guber1999data.csv: -------------------------------------------------------------------------------- 1 | State,Spend,StuTeaRat,Salary,PrcntTake,SATV,SATM,SATT 2 | Alabama,4.405,17.2,31.144,8,491,538,1029 3 | Alaska,8.963,17.6,47.951,47,445,489,934 4 | Arizona,4.778,19.3,32.175,27,448,496,944 5 | Arkansas,4.459,17.1,28.934,6,482,523,1005 6 | California,4.992,24,41.078,45,417,485,902 7 | Colorado,5.443,18.4,34.571,29,462,518,980 8 | Connecticut,8.817,14.4,50.045,81,431,477,908 9 | Delaware,7.03,16.6,39.076,68,429,468,897 10 | Florida,5.718,19.1,32.588,48,420,469,889 11 | Georgia,5.193,16.3,32.291,65,406,448,854 12 | Hawaii,6.078,17.9,38.518,57,407,482,889 13 | Idaho,4.21,19.1,29.783,15,468,511,979 14 | Illinois,6.136,17.3,39.431,13,488,560,1048 15 | Indiana,5.826,17.5,36.785,58,415,467,882 16 | Iowa,5.483,15.8,31.511,5,516,583,1099 17 | Kansas,5.817,15.1,34.652,9,503,557,1060 18 | Kentucky,5.217,17,32.257,11,477,522,999 19 | Louisiana,4.761,16.8,26.461,9,486,535,1021 20 | Maine,6.428,13.8,31.972,68,427,469,896 21 | Maryland,7.245,17,40.661,64,430,479,909 22 | Massachusetts,7.287,14.8,40.795,80,430,477,907 23 | Michigan,6.994,20.1,41.895,11,484,549,1033 24 | Minnesota,6,17.5,35.948,9,506,579,1085 25 | Mississippi,4.08,17.5,26.818,4,496,540,1036 26 | Missouri,5.383,15.5,31.189,9,495,550,1045 27 | Montana,5.692,16.3,28.785,21,473,536,1009 28 | Nebraska,5.935,14.5,30.922,9,494,556,1050 29 | Nevada,5.16,18.7,34.836,30,434,483,917 30 | New Hampshire,5.859,15.6,34.72,70,444,491,935 31 | New Jersey,9.774,13.8,46.087,70,420,478,898 32 | New Mexico,4.586,17.2,28.493,11,485,530,1015 33 | New York,9.623,15.2,47.612,74,419,473,892 34 | North Carolina,5.077,16.2,30.793,60,411,454,865 35 | North Dakota,4.775,15.3,26.327,5,515,592,1107 36 | Ohio,6.162,16.6,36.802,23,460,515,975 37 | Oklahoma,4.845,15.5,28.172,9,491,536,1027 38 | Oregon,6.436,19.9,38.555,51,448,499,947 39 | Pennsylvania,7.109,17.1,44.51,70,419,461,880 40 | Rhode Island,7.469,14.7,40.729,70,425,463,888 41 | South Carolina,4.797,16.4,30.279,58,401,443,844 42 | South Dakota,4.775,14.4,25.994,5,505,563,1068 43 | Tennessee,4.388,18.6,32.477,12,497,543,1040 44 | Texas,5.222,15.7,31.223,47,419,474,893 45 | Utah,3.656,24.3,29.082,4,513,563,1076 46 | Vermont,6.75,13.8,35.406,68,429,472,901 47 | Virginia,5.327,14.6,33.987,65,428,468,896 48 | Washington,5.906,20.2,36.151,48,443,494,937 49 | West Virginia,6.107,14.8,31.944,17,448,484,932 50 | Wisconsin,6.93,15.9,37.746,9,501,572,1073 51 | Wyoming,6.16,14.9,31.285,10,476,525,1001 52 | -------------------------------------------------------------------------------- /part4/ch18/data/Jags-Ymet-XmetMulti-Mrobust-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Jags-Ymet-XmetMulti-Mrobust.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | # UNCOMMENT ONE OF THE FOLLOWING SECTIONS (In RStudio, select and ctrl-shift-C) 7 | #............................................................................. 8 | # # Two predictors: 9 | myData = read.csv( file="Guber1999data.csv" ) 10 | yName = "SATT" ; xName = c("Spend","PrcntTake") 11 | fileNameRoot = "Guber1999data-Jags-" 12 | numSavedSteps=15000 ; thinSteps=5 13 | #............................................................................. 14 | # # Single predictor: 15 | # myData = read.csv( file="Guber1999data.csv" ) 16 | # yName = "SATT" ; xName = c("Spend") 17 | # fileNameRoot = "Guber1999data-SinglePred-" 18 | # numSavedSteps=15000 ; thinSteps=5 19 | #............................................................................. 20 | # # Two predictors with redundant predictor: 21 | # myData = read.csv( file="Guber1999data.csv" ) 22 | # PropNotTake = (100-myData[,"PrcntTake"])/100 23 | # myData = cbind( myData , PropNotTake ) 24 | # yName = "SATT" ; xName = c("Spend","PrcntTake","PropNotTake") 25 | # fileNameRoot = "Guber1999data-Jags-Redund-" 26 | # numSavedSteps=15000 ; thinSteps=15 27 | #............................................................................. 28 | # # Two predictors with two redundant predictors: 29 | # myData = read.csv( file="Guber1999data.csv" ) 30 | # PropNotTake = (100-myData[,"PrcntTake"])/100 31 | # Partic = myData[,"PrcntTake"]/10 32 | # myData = cbind( myData , PropNotTake , Partic ) 33 | # yName = "SATT" ; xName = c("Spend","PrcntTake","PropNotTake","Partic") 34 | # fileNameRoot = "Guber1999data-Jags-Redund2-" 35 | # numSavedSteps=15000 ; thinSteps=15 36 | #............................................................................. 37 | # # Four predictors: 38 | # myData = read.csv( file="Guber1999data.csv" ) 39 | # yName = "SATT" ; xName = c("Spend","PrcntTake","StuTeaRat","Salary") 40 | # fileNameRoot = "Guber1999data-Jags-4X-" 41 | # numSavedSteps=15000 ; thinSteps=20 42 | #............................................................................. 43 | # # Append columns of random predictors: 44 | # myData = read.csv( file="Guber1999data.csv" ) 45 | # standardize = function(v){(v-mean(v))/sd(v)} 46 | # Ny=nrow(myData) 47 | # NxRand = 12 48 | # set.seed(47405) 49 | # for ( xIdx in 1:NxRand ) { 50 | # xRand = standardize(rnorm(Ny)) 51 | # myData = cbind( myData , xRand ) 52 | # colnames(myData)[ncol(myData)] = paste0("xRand",xIdx) 53 | # } 54 | # yName = "SATT" ; xName = c("Spend","PrcntTake", paste0("xRand",1:NxRand) ) 55 | # fileNameRoot = "Guber1999data-Jags-RandX-" 56 | # numSavedSteps=15000 ; thinSteps=5 57 | #............................................................................. 58 | # # Two predictors with interaction. 59 | # # ** For two predictors with interaction, also uncomment graph command at end. 60 | # # Append named column with interaction product: 61 | # myData = cbind( myData , SpendXPrcnt=myData[,"Spend"]*myData[,"PrcntTake"] ) 62 | # yName = "SATT" ; xName = c("Spend","PrcntTake","SpendXPrcnt") 63 | # fileNameRoot = "Guber1999data-Jags-Inter-" 64 | # numSavedSteps=15000 ; thinSteps=50 65 | #............................................................................. 66 | # # Introductory figures of chapter: 67 | # myData = read.csv( file="MultLinRegrPlotUnif.csv" ) 68 | # myData = myData[101:150,] 69 | # yName = "y" ; xName = c("x1","x2") 70 | # fileNameRoot = "MultLinRegrPlotUnif-" 71 | # numSavedSteps=11000 ; thinSteps=2 72 | #............................................................................. 73 | # myData = read.csv( file="MultLinRegrPlotUnif.csv" ) 74 | # myData = myData[101:150,] 75 | # yName = "y" ; xName = c("x1") 76 | # fileNameRoot = "MultLinRegrPlotUnif-x1only-" 77 | # numSavedSteps=11000 ; thinSteps=2 78 | #............................................................................. 79 | # myData = read.csv( file="MultLinRegrPlotCorr.csv" ) 80 | # yName = "y" ; xName = c("x1","x2") 81 | # fileNameRoot = "MultLinRegrPlotCorr-" 82 | # numSavedSteps=11000 ; thinSteps=2 83 | #............................................................................. 84 | # myData = read.csv( file="MultLinRegrPlotCorr.csv" ) 85 | # yName = "y" ; xName = c("x1") 86 | # fileNameRoot = "MultLinRegrPlotCorr-x1only-" 87 | # numSavedSteps=11000 ; thinSteps=2 88 | #............................................................................. 89 | 90 | graphFileType = "eps" 91 | #------------------------------------------------------------------------------- 92 | # Load the relevant model into R's working memory: 93 | source("Jags-Ymet-XmetMulti-Mrobust.R") 94 | #------------------------------------------------------------------------------- 95 | # Generate the MCMC chain: 96 | #startTime = proc.time() 97 | mcmcCoda = genMCMC( data=myData , xName=xName , yName=yName , 98 | numSavedSteps=numSavedSteps , thinSteps=thinSteps , 99 | saveName=fileNameRoot ) 100 | #stopTime = proc.time() 101 | #duration = stopTime - startTime 102 | #show(duration) 103 | #------------------------------------------------------------------------------- 104 | # Display diagnostics of chain, for specified parameters: 105 | parameterNames = varnames(mcmcCoda) # get all parameter names 106 | for ( parName in parameterNames ) { 107 | diagMCMC( codaObject=mcmcCoda , parName=parName , 108 | saveName=fileNameRoot , saveType=graphFileType ) 109 | } 110 | #------------------------------------------------------------------------------- 111 | # Get summary statistics of chain: 112 | summaryInfo = smryMCMC( mcmcCoda , 113 | saveName=fileNameRoot ) 114 | show(summaryInfo) 115 | # Display posterior information: 116 | plotMCMC( mcmcCoda , data=myData , xName=xName , yName=yName , 117 | pairsPlot=TRUE , showCurve=FALSE , 118 | saveName=fileNameRoot , saveType=graphFileType ) 119 | # # For two predictors with interaction, make some additional graphs: 120 | # source("Jags-Ymet-XmetMulti-Mrobust-InteractionPostProcess.R") # more graphs 121 | #------------------------------------------------------------------------------- 122 | -------------------------------------------------------------------------------- /part4/ch18/data/Jags-Ymet-XmetMulti-MrobustVarSelect-Example.R: -------------------------------------------------------------------------------- 1 | # Example for Jags-Ymet-XmetMulti-MrobustVarSelect.R 2 | #------------------------------------------------------------------------------- 3 | # Optional generic preliminaries: 4 | graphics.off() # This closes all of R's graphics windows. 5 | rm(list=ls()) # Careful! This clears all of R's memory! 6 | #------------------------------------------------------------------------------- 7 | # Load data file and specity column names of x (predictors) and y (predicted): 8 | myData = read.csv( file="Guber1999data.csv" ) 9 | 10 | # UNCOMMENT ONE OF THE FOLLOWING SECTIONS (In RStudio, select and ctrl-shift-C) 11 | #............................................................................. 12 | # Two predictors: 13 | yName = "SATT" ; xName = c("Spend","PrcntTake") 14 | fileNameRoot = "Guber1999data-Jags-VarSelect-" 15 | numSavedSteps=15000 ; thinSteps=25 16 | #............................................................................. 17 | # # Two predictors with redundant predictor: 18 | # PropNotTake = (100-myData[,"PrcntTake"])/100 19 | # myData = cbind( myData , PropNotTake ) 20 | # yName = "SATT" ; xName = c("Spend","PrcntTake","PropNotTake") 21 | # fileNameRoot = "Guber1999data-Jags-Redund-VarSelect-" 22 | # numSavedSteps=15000 ; thinSteps=30 23 | #............................................................................. 24 | # # Two predictors with two redundant predictors: 25 | # PropNotTake = (100-myData[,"PrcntTake"])/100 26 | # Partic = myData[,"PrcntTake"]/10 27 | # myData = cbind( myData , PropNotTake , Partic ) 28 | # yName = "SATT" ; xName = c("Spend","PrcntTake","PropNotTake","Partic") 29 | # fileNameRoot = "Guber1999data-Jags-Redund2-VarSelect-" 30 | # numSavedSteps=15000 ; thinSteps=15 31 | #............................................................................. 32 | # # Four predictors: 33 | # yName = "SATT" ; xName = c("Spend","PrcntTake","StuTeaRat","Salary") 34 | # fileNameRoot = "Guber1999data-Jags-4X-VarSelect-" 35 | # numSavedSteps=15000 ; thinSteps=20 36 | #............................................................................. 37 | # # Append columns of random predictors: 38 | # standardize = function(v){(v-mean(v))/sd(v)} 39 | # Ny=nrow(myData) 40 | # NxRand = 12 41 | # set.seed(47405) 42 | # for ( xIdx in 1:NxRand ) { 43 | # xRand = standardize(rnorm(Ny)) 44 | # myData = cbind( myData , xRand ) 45 | # colnames(myData)[ncol(myData)] = paste0("xRand",xIdx) 46 | # } 47 | # yName = "SATT" ; xName = c("Spend","PrcntTake", paste0("xRand",1:NxRand) ) 48 | # fileNameRoot = "Guber1999data-Jags-RandX-VarSelect-" 49 | # numSavedSteps=15000 ; thinSteps=5 50 | #............................................................................. 51 | # # Two predictors with interaction. 52 | # # Append named column with interaction product: 53 | # myData = cbind( myData , SpendXPrcnt=myData[,"Spend"]*myData[,"PrcntTake"] ) 54 | # yName = "SATT" ; xName = c("Spend","PrcntTake","SpendXPrcnt") 55 | # fileNameRoot = "Guber1999data-Jags-Inter-VarSelect-" 56 | # numSavedSteps=15000 ; thinSteps=25 57 | #............................................................................. 58 | graphFileType = "eps" 59 | #------------------------------------------------------------------------------- 60 | # Load the relevant model into R's working memory: 61 | source("Jags-Ymet-XmetMulti-MrobustVarSelect.R") 62 | #------------------------------------------------------------------------------- 63 | # Generate the MCMC chain: 64 | #startTime = proc.time() 65 | mcmcCoda = genMCMC( data=myData , xName=xName , yName=yName , 66 | numSavedSteps=numSavedSteps , thinSteps=thinSteps , 67 | saveName=fileNameRoot ) 68 | #stopTime = proc.time() 69 | #duration = stopTime - startTime 70 | #show(duration) 71 | #------------------------------------------------------------------------------- 72 | # Display diagnostics of chain, for specified parameters: 73 | parameterNames = varnames(mcmcCoda) # get all parameter names 74 | for ( parName in parameterNames ) { 75 | diagMCMC( codaObject=mcmcCoda , parName=parName , 76 | saveName=fileNameRoot , saveType=graphFileType ) 77 | } 78 | 79 | #------------------------------------------------------------------------------- 80 | # Get summary statistics of chain: 81 | summaryInfo = smryMCMC( mcmcCoda , 82 | saveName=fileNameRoot ) 83 | show(summaryInfo) 84 | # Display posterior information: 85 | plotMCMC( mcmcCoda , data=myData , xName=xName , yName=yName , 86 | pairsPlot=TRUE , showCurve=FALSE , 87 | saveName=fileNameRoot , saveType=graphFileType ) 88 | #------------------------------------------------------------------------------- 89 | -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.3.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.3.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.4.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.5.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.6.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap18.new.7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap18.new.7.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap_apr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap_apr.png -------------------------------------------------------------------------------- /part4/ch18/figures/cap_fns.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/cap_fns.png -------------------------------------------------------------------------------- /part4/ch18/figures/eq18.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/eq18.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/eq18.2-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/eq18.2-4.png -------------------------------------------------------------------------------- /part4/ch18/figures/eq18.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/eq18.5.png -------------------------------------------------------------------------------- /part4/ch18/figures/eq18.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/eq18.6.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig17.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig17.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.10.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.11.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.11.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.11.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.11.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.12.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.12.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.12.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.12.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.13.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.14.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.14.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.14.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.14.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.15.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.15.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.15.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.15.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.15.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.15.3.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.3.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.4.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.5.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.5.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.5.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.5.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.6.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.6.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.6.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.6.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.7.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.7.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.7.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.7.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.8.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.9.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.9.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/fig18.9.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/fig18.9.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/tbl15.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/tbl15.1.png -------------------------------------------------------------------------------- /part4/ch18/figures/tbl15.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/tbl15.2.png -------------------------------------------------------------------------------- /part4/ch18/figures/tbl15.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch18/figures/tbl15.3.png -------------------------------------------------------------------------------- /part4/ch24/figures/equation24.1-.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/equation24.1-.png -------------------------------------------------------------------------------- /part4/ch24/figures/equation24.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/equation24.1.png -------------------------------------------------------------------------------- /part4/ch24/figures/equation24.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/equation24.2.png -------------------------------------------------------------------------------- /part4/ch24/figures/equation24.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/equation24.3.png -------------------------------------------------------------------------------- /part4/ch24/figures/figure24.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/figure24.1.png -------------------------------------------------------------------------------- /part4/ch24/figures/figure24.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/figure24.2.png -------------------------------------------------------------------------------- /part4/ch24/figures/figure24.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/figure24.3.png -------------------------------------------------------------------------------- /part4/ch24/figures/figure24.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/figure24.4.png -------------------------------------------------------------------------------- /part4/ch24/figures/figure24.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/figure24.5.png -------------------------------------------------------------------------------- /part4/ch24/figures/figure24.6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/figure24.6.png -------------------------------------------------------------------------------- /part4/ch24/figures/table24.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/table24.1.png -------------------------------------------------------------------------------- /part4/ch24/figures/table24.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/ch24/figures/table24.2.png -------------------------------------------------------------------------------- /part4/sp_bayes_coding/data/z15N50.csv: -------------------------------------------------------------------------------- 1 | "y" 2 | 0 3 | 1 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 1 13 | 0 14 | 0 15 | 0 16 | 1 17 | 1 18 | 1 19 | 0 20 | 0 21 | 1 22 | 0 23 | 0 24 | 0 25 | 0 26 | 1 27 | 1 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 1 34 | 0 35 | 1 36 | 0 37 | 1 38 | 0 39 | 0 40 | 0 41 | 1 42 | 0 43 | 0 44 | 1 45 | 1 46 | 0 47 | 1 48 | 0 49 | 0 50 | 0 51 | 0 52 | -------------------------------------------------------------------------------- /part4/sp_bayes_coding/figures/br.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/sp_bayes_coding/figures/br.png -------------------------------------------------------------------------------- /part4/sp_bayes_coding/figures/cd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/sp_bayes_coding/figures/cd.png -------------------------------------------------------------------------------- /part4/sp_bayes_coding/figures/fig8.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/sp_bayes_coding/figures/fig8.2.png -------------------------------------------------------------------------------- /part4/sp_bayes_coding/figures/gr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psygrammer/bayesianR/9b5784ae6ac2ac95071a40ca0e2f57a27c614a85/part4/sp_bayes_coding/figures/gr.png -------------------------------------------------------------------------------- /pip-requirements.txt: -------------------------------------------------------------------------------- 1 | Jinja2==2.7.3 2 | MarkupSafe==0.23 3 | argparse==1.2.1 4 | backports.ssl-match-hostname==3.4.0.2 5 | certifi==14.05.14 6 | gnureadline==6.3.3 7 | ipython==3.0.0 8 | jsonschema==2.4.0 9 | pyzmq==14.5.0 10 | tornado==4.1 11 | wsgiref==0.1.2 12 | --------------------------------------------------------------------------------