├── assignment1 ├── assignment1.pdf ├── preamble │ ├── preamble_acronyms.tex │ ├── preamble_tikz.tex │ ├── preamble.tex │ └── preamble_math.tex └── yourname_assignment1.tex ├── assignment2 ├── assignment2.pdf ├── preamble │ ├── preamble_acronyms.tex │ ├── preamble_tikz.tex │ ├── preamble.tex │ └── preamble_math.tex └── yourname_assignment2.tex ├── assignment3 ├── assignment3.pdf ├── preamble │ ├── preamble_acronyms.tex │ ├── preamble_tikz.tex │ ├── preamble.tex │ └── preamble_math.tex └── yourname_assignment3.tex ├── assignment4 ├── assignment4.pdf ├── preamble │ ├── preamble_acronyms.tex │ ├── preamble_tikz.tex │ ├── preamble.tex │ └── preamble_math.tex └── yourname_assignment4.tex ├── _config.yml ├── assets └── css │ ├── style.css │ └── github-markdown.css ├── _layouts └── default.html └── README.md /assignment1/assignment1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/slinderman/stats215/HEAD/assignment1/assignment1.pdf -------------------------------------------------------------------------------- /assignment2/assignment2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/slinderman/stats215/HEAD/assignment2/assignment2.pdf -------------------------------------------------------------------------------- /assignment3/assignment3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/slinderman/stats215/HEAD/assignment3/assignment3.pdf -------------------------------------------------------------------------------- /assignment4/assignment4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/slinderman/stats215/HEAD/assignment4/assignment4.pdf -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-minimal 2 | plugins: 3 | - jekyll-titles-from-headings 4 | 5 | titles_from_headings: 6 | enabled: true 7 | strip_title: false 8 | collections: false 9 | -------------------------------------------------------------------------------- /assets/css/style.css: -------------------------------------------------------------------------------- 1 | --- 2 | --- 3 | 4 | @import "{{ site.theme }}"; 5 | 12 | -------------------------------------------------------------------------------- /assignment1/preamble/preamble_acronyms.tex: -------------------------------------------------------------------------------- 1 | \newacronym{KL}{kl}{Kullback-Leibler} 2 | \newacronym{ELBO}{elbo}{\emph{evidence lower bound}} 3 | \newacronym{EM}{em}{\emph{expectation-maximization}} 4 | \newacronym{PPCA}{ppca}{probabilistic principal components analysis} 5 | 6 | \newacronym{SVI}{svi}{stochastic variational inference} 7 | \newacronym{GMM}{gmm}{Gaussian mixture model} 8 | \newacronym{HMM}{hmm}{hidden Markov model} 9 | \newacronym{IO-HMM}{io-hmm}{input-output hidden Markov model} 10 | \newacronym{LDS}{lds}{linear dynamical system} 11 | \newacronym{SLDS}{slds}{switching linear dynamical system} 12 | \newacronym{AR-HMM}{ar-hmm}{autoregressive hidden Markov model} 13 | -------------------------------------------------------------------------------- /assignment2/preamble/preamble_acronyms.tex: -------------------------------------------------------------------------------- 1 | \newacronym{KL}{kl}{Kullback-Leibler} 2 | \newacronym{ELBO}{elbo}{\emph{evidence lower bound}} 3 | \newacronym{EM}{em}{\emph{expectation-maximization}} 4 | \newacronym{PPCA}{ppca}{probabilistic principal components analysis} 5 | 6 | \newacronym{SVI}{svi}{stochastic variational inference} 7 | \newacronym{GMM}{gmm}{Gaussian mixture model} 8 | \newacronym{HMM}{hmm}{hidden Markov model} 9 | \newacronym{IO-HMM}{io-hmm}{input-output hidden Markov model} 10 | \newacronym{LDS}{lds}{linear dynamical system} 11 | \newacronym{SLDS}{slds}{switching linear dynamical system} 12 | \newacronym{AR-HMM}{ar-hmm}{autoregressive hidden Markov model} 13 | -------------------------------------------------------------------------------- /assignment3/preamble/preamble_acronyms.tex: -------------------------------------------------------------------------------- 1 | \newacronym{KL}{kl}{Kullback-Leibler} 2 | \newacronym{ELBO}{elbo}{\emph{evidence lower bound}} 3 | \newacronym{EM}{em}{\emph{expectation-maximization}} 4 | \newacronym{PPCA}{ppca}{probabilistic principal components analysis} 5 | 6 | \newacronym{SVI}{svi}{stochastic variational inference} 7 | \newacronym{GMM}{gmm}{Gaussian mixture model} 8 | \newacronym{HMM}{hmm}{hidden Markov model} 9 | \newacronym{IO-HMM}{io-hmm}{input-output hidden Markov model} 10 | \newacronym{LDS}{lds}{linear dynamical system} 11 | \newacronym{SLDS}{slds}{switching linear dynamical system} 12 | \newacronym{AR-HMM}{ar-hmm}{autoregressive hidden Markov model} 13 | -------------------------------------------------------------------------------- /assignment4/preamble/preamble_acronyms.tex: -------------------------------------------------------------------------------- 1 | \newacronym{KL}{kl}{Kullback-Leibler} 2 | \newacronym{ELBO}{elbo}{\emph{evidence lower bound}} 3 | \newacronym{EM}{em}{\emph{expectation-maximization}} 4 | \newacronym{PPCA}{ppca}{probabilistic principal components analysis} 5 | 6 | \newacronym{SVI}{svi}{stochastic variational inference} 7 | \newacronym{GMM}{gmm}{Gaussian mixture model} 8 | \newacronym{HMM}{hmm}{hidden Markov model} 9 | \newacronym{IO-HMM}{io-hmm}{input-output hidden Markov model} 10 | \newacronym{LDS}{lds}{linear dynamical system} 11 | \newacronym{SLDS}{slds}{switching linear dynamical system} 12 | \newacronym{AR-HMM}{ar-hmm}{autoregressive hidden Markov model} 13 | -------------------------------------------------------------------------------- /_layouts/default.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 15 | 16 | 17 | 18 | 19 | Statistical Models in Biology 20 | 21 | 22 | 23 |
24 | 25 | {{ content }} 26 | 27 |
28 | 29 | 30 | -------------------------------------------------------------------------------- /assignment1/preamble/preamble_tikz.tex: -------------------------------------------------------------------------------- 1 | \usepackage{tikz} 2 | % \usetikzlibrary{bayesnet} 3 | 4 | \pgfdeclarelayer{edgelayer} 5 | \pgfdeclarelayer{nodelayer} 6 | \pgfsetlayers{edgelayer,nodelayer,main} 7 | 8 | \definecolor{hexcolor0xbfbfbf}{rgb}{0.749,0.749,0.749} 9 | 10 | \tikzset{>=latex} 11 | \tikzstyle{none} = [inner sep=0pt] 12 | 13 | \tikzstyle{line} = [ - ] 14 | \tikzstyle{arrow} = [ ->, shorten <=1pt, shorten >=1pt ] 15 | \tikzstyle{ardash} = [ dotted, ->, shorten <=1pt, shorten >=1pt ] 16 | 17 | \tikzstyle{empty}=[circle,opacity=0.0,text opacity=1.0,inner sep=0pt,minimum 18 | width=0pt,minimum height=0pt] 19 | \tikzstyle{box}=[rectangle,fill=White,draw=Black] 20 | \tikzstyle{filled}=[circle,fill=hexcolor0xbfbfbf,draw=Black] 21 | \tikzstyle{hollow}=[circle,fill=White,draw=Black] 22 | \tikzstyle{param}=[rectangle,fill=Black,draw=Black,inner sep=0pt,minimum width=4pt,minimum height=4pt] 23 | -------------------------------------------------------------------------------- /assignment2/preamble/preamble_tikz.tex: -------------------------------------------------------------------------------- 1 | \usepackage{tikz} 2 | % \usetikzlibrary{bayesnet} 3 | 4 | \pgfdeclarelayer{edgelayer} 5 | \pgfdeclarelayer{nodelayer} 6 | \pgfsetlayers{edgelayer,nodelayer,main} 7 | 8 | \definecolor{hexcolor0xbfbfbf}{rgb}{0.749,0.749,0.749} 9 | 10 | \tikzset{>=latex} 11 | \tikzstyle{none} = [inner sep=0pt] 12 | 13 | \tikzstyle{line} = [ - ] 14 | \tikzstyle{arrow} = [ ->, shorten <=1pt, shorten >=1pt ] 15 | \tikzstyle{ardash} = [ dotted, ->, shorten <=1pt, shorten >=1pt ] 16 | 17 | \tikzstyle{empty}=[circle,opacity=0.0,text opacity=1.0,inner sep=0pt,minimum 18 | width=0pt,minimum height=0pt] 19 | \tikzstyle{box}=[rectangle,fill=White,draw=Black] 20 | \tikzstyle{filled}=[circle,fill=hexcolor0xbfbfbf,draw=Black] 21 | \tikzstyle{hollow}=[circle,fill=White,draw=Black] 22 | \tikzstyle{param}=[rectangle,fill=Black,draw=Black,inner sep=0pt,minimum width=4pt,minimum height=4pt] 23 | -------------------------------------------------------------------------------- /assignment3/preamble/preamble_tikz.tex: -------------------------------------------------------------------------------- 1 | \usepackage{tikz} 2 | % \usetikzlibrary{bayesnet} 3 | 4 | \pgfdeclarelayer{edgelayer} 5 | \pgfdeclarelayer{nodelayer} 6 | \pgfsetlayers{edgelayer,nodelayer,main} 7 | 8 | \definecolor{hexcolor0xbfbfbf}{rgb}{0.749,0.749,0.749} 9 | 10 | \tikzset{>=latex} 11 | \tikzstyle{none} = [inner sep=0pt] 12 | 13 | \tikzstyle{line} = [ - ] 14 | \tikzstyle{arrow} = [ ->, shorten <=1pt, shorten >=1pt ] 15 | \tikzstyle{ardash} = [ dotted, ->, shorten <=1pt, shorten >=1pt ] 16 | 17 | \tikzstyle{empty}=[circle,opacity=0.0,text opacity=1.0,inner sep=0pt,minimum 18 | width=0pt,minimum height=0pt] 19 | \tikzstyle{box}=[rectangle,fill=White,draw=Black] 20 | \tikzstyle{filled}=[circle,fill=hexcolor0xbfbfbf,draw=Black] 21 | \tikzstyle{hollow}=[circle,fill=White,draw=Black] 22 | \tikzstyle{param}=[rectangle,fill=Black,draw=Black,inner sep=0pt,minimum width=4pt,minimum height=4pt] 23 | -------------------------------------------------------------------------------- /assignment4/preamble/preamble_tikz.tex: -------------------------------------------------------------------------------- 1 | \usepackage{tikz} 2 | % \usetikzlibrary{bayesnet} 3 | 4 | \pgfdeclarelayer{edgelayer} 5 | \pgfdeclarelayer{nodelayer} 6 | \pgfsetlayers{edgelayer,nodelayer,main} 7 | 8 | \definecolor{hexcolor0xbfbfbf}{rgb}{0.749,0.749,0.749} 9 | 10 | \tikzset{>=latex} 11 | \tikzstyle{none} = [inner sep=0pt] 12 | 13 | \tikzstyle{line} = [ - ] 14 | \tikzstyle{arrow} = [ ->, shorten <=1pt, shorten >=1pt ] 15 | \tikzstyle{ardash} = [ dotted, ->, shorten <=1pt, shorten >=1pt ] 16 | 17 | \tikzstyle{empty}=[circle,opacity=0.0,text opacity=1.0,inner sep=0pt,minimum 18 | width=0pt,minimum height=0pt] 19 | \tikzstyle{box}=[rectangle,fill=White,draw=Black] 20 | \tikzstyle{filled}=[circle,fill=hexcolor0xbfbfbf,draw=Black] 21 | \tikzstyle{hollow}=[circle,fill=White,draw=Black] 22 | \tikzstyle{param}=[rectangle,fill=Black,draw=Black,inner sep=0pt,minimum width=4pt,minimum height=4pt] 23 | -------------------------------------------------------------------------------- /assignment4/yourname_assignment4.tex: -------------------------------------------------------------------------------- 1 | \documentclass[11pt]{article} 2 | 3 | \input{preamble/preamble.tex} 4 | \input{preamble/preamble_math.tex} 5 | \input{preamble/preamble_acronyms.tex} 6 | 7 | \title{STAT215: Assignment 4} 8 | \author{Your Name} 9 | \date{Due: March 13, 2020} 10 | 11 | \begin{document} 12 | 13 | \maketitle 14 | 15 | \textbf{Problem 1:} Consider a Gaussian linear dynamical system (LDS), 16 | \begin{align*} 17 | p(x_{1:T}, y_{1:T}) &= \cN(x_1 \mid 0, q^2) \left[ \prod_{t=2}^T \cN(x_t \mid a x_{t-1} + b, q^2) \right] 18 | \left[ \prod_{t=1}^T \cN(y_{t} \mid x_t, r^2) \right], 19 | \end{align*} 20 | for~$x_t, y_t \in \reals$ for all~$t$, and parameters~$a, b \in \reals$ and $q^2, r^2 \in \reals_+$. 21 | Compute the forward filtered distribution~$p(x_t \mid y_{1:t})$ in terms of the model parameters 22 | and the filtered distribution~$p(x_{t-1} \mid y_{1:t-1})$. Solve for the base case~$p(x_1 \mid y_1)$. 23 | For reference, consult the state space modeling chapters of either the Bishop or the Murphy textbook. 24 | 25 | \begin{solution} 26 | Your solution here. 27 | \end{solution} 28 | 29 | \clearpage 30 | 31 | \textbf{Problem 2:} Sample a time series of length~$T=30$ from the Gaussian LDS in Problem 1 with parameters~$a=1, b=0, q=0.1, r=0.3$. Plot the sample of~$x_{1:T}$ as a solid line, and plot the observed~$y_{1:T}$ as $\mathsf{+}$'s. Write code to compute the filtered distribution~$p(x_t \mid y_{1:t})$ you derived in Problem 1. Then plot the mean of the filtered distribution~$\bbE[x_t \mid y_{1:t}]$ over time as a solid line, and plot a shaded region encompassing the mean $\pm 2$ standard deviations of the filtered distribution. All plots should be on the same axis. Include a legend. 32 | 33 | Write your code in a Colab notebook and include a PDF printout of your notebook as well as the raw .ipynb file. 34 | 35 | \clearpage 36 | 37 | \textbf{Problem 3:} Reproduce Figure 2.5 of Rasmussen and Williams, \textit{Gaussian Processes for Machine Learning}, available at \url{http://www.gaussianprocess.org/gpml/chapters/RW2.pdf}. Use a randomly generated dataset as described in the figure caption and surrounding text. 38 | 39 | Write your code in a Colab notebook and include a PDF printout of your notebook as well as the raw .ipynb file. 40 | 41 | 42 | 43 | \end{document} -------------------------------------------------------------------------------- /assignment1/preamble/preamble.tex: -------------------------------------------------------------------------------- 1 | \RequirePackage[l2tabu, orthodox]{nag} 2 | %\documentclass{article} 3 | 4 | \usepackage[left=1.in, right=1.in, top=1.25in, bottom=1.25in]{geometry} 5 | 6 | % FONTS 7 | %\usepackage[T1]{fontenc} 8 | 9 | % Replace default Latin Modern typewriter with its proportional counterpart 10 | % http://www.tug.dk/FontCatalogue/lmoderntypewriterprop/ 11 | %\renewcommand*\ttdefault{lmvtt} 12 | 13 | 14 | %%% OPTION 1 - Fourier Math + New Century Schoolbook + ParaType Sans 15 | 16 | % % Import Fourier Math (this imposes its own New Century Schoolbook type) 17 | % % http://www.ctan.org/tex-archive/fonts/fouriernc/ 18 | %\usepackage{fouriernc} 19 | %\usepackage{amsmath} 20 | % % Replace with TeX Gyre Schola version of New Century Schoolbook (must scale!) 21 | % % http://www.tug.dk/FontCatalogue/tgschola/ 22 | %\usepackage[scale=0.92]{tgschola} 23 | %\usepackage[scaled=0.88]{PTSans} 24 | 25 | %% OPTION 2 - MathDesign Math + Bitstream Charter + ParaType Sans 26 | 27 | % Import MathDesign (this brings along Bitstream Charter) 28 | % http://www.ctan.org/tex-archive/fonts/mathdesign/ 29 | \usepackage[bitstream-charter]{mathdesign} 30 | \usepackage{amsmath} 31 | \usepackage[scaled=0.92]{PTSans} 32 | 33 | 34 | % %%% OPTION 3 - MTPRO 2 Math + Termes Times + ParaType Sans 35 | 36 | % \usepackage{tgtermes} 37 | % \usepackage{amsmath} 38 | % \usepackage[subscriptcorrection, 39 | % amssymbols, 40 | % mtpbb, 41 | % mtpcal, 42 | % nofontinfo % suppresses all warnings 43 | % ]{mtpro2} 44 | % \usepackage{scalefnt,letltxmacro} 45 | % \LetLtxMacro{\oldtextsc}{\textsc} 46 | % \renewcommand{\textsc}[1]{\oldtextsc{\scalefont{1.10}#1}} 47 | % \usepackage[scaled=0.92]{PTSans} 48 | 49 | % Use default fonts here 50 | % \usepackage{amsmath} 51 | % \usepackage{amssymb} 52 | 53 | \usepackage{titling} 54 | 55 | % COLOR 56 | \usepackage[table,usenames,dvipsnames]{xcolor} 57 | \definecolor{shadecolor}{gray}{0.9} 58 | 59 | % SPACING and TEXT 60 | \usepackage[final,expansion=alltext]{microtype} 61 | \usepackage[english]{babel} 62 | \usepackage[parfill]{parskip} 63 | \usepackage{afterpage} 64 | \usepackage{framed} 65 | \usepackage{verbatim} 66 | \usepackage{setspace} 67 | 68 | %redefine the leftbar environment to accept a width and coloring options 69 | \renewenvironment{leftbar}[1][\hsize] 70 | {% 71 | \def\FrameCommand 72 | {% 73 | {\color{Gray}\vrule width 3pt}% 74 | \hspace{10pt}% 75 | %\hspace{0pt}\fboxsep=\FrameSep\colorbox{black!10}% 76 | }% 77 | \MakeFramed{\hsize#1\advance\hsize-\width\FrameRestore}% 78 | }% 79 | {\endMakeFramed} 80 | 81 | % define a paragraph header function 82 | \DeclareRobustCommand{\parhead}[1]{\textbf{#1}~} 83 | 84 | % EDITING 85 | % line numbering in left margin 86 | \usepackage{lineno} 87 | \renewcommand\linenumberfont{\normalfont 88 | \footnotesize 89 | \sffamily 90 | \color{SkyBlue}} 91 | % ragged paragraphs in right margin 92 | \usepackage{ragged2e} 93 | \DeclareRobustCommand{\sidenote}[1]{\marginpar{ 94 | \RaggedRight 95 | \textcolor{Plum}{\textsf{#1}}}} 96 | % paragraph counter in right margin 97 | \newcommand{\parnum}{\bfseries\P\arabic{parcount}} 98 | \newcounter{parcount} 99 | \newcommand\p{% 100 | \stepcounter{parcount}% 101 | \leavevmode\marginpar[\hfill\parnum]{\parnum}% 102 | } 103 | % paragraph helper 104 | %\DeclareRobustCommand{\PP}{\textcolor{Plum}{\P} } 105 | 106 | % \usepackage[bottom]{footmisc} 107 | \usepackage[symbol]{footmisc} 108 | \renewcommand{\thefootnote}{\arabic{footnote}} 109 | 110 | % COUNTERS 111 | \usepackage[inline]{enumitem} 112 | \renewcommand{\labelenumi}{\color{black!67}{\arabic{enumi}.}} 113 | \renewcommand{\labelenumii}{{\color{black!67}(\alph{enumii})}} 114 | \renewcommand{\labelitemi}{{\color{black!67}\textbullet}} 115 | 116 | % FIGURES 117 | \usepackage{graphicx} 118 | \usepackage[labelfont={it, small}, font=small]{caption} 119 | \usepackage[format=hang]{subcaption} 120 | % \usepackage{ccaption} 121 | 122 | % APPENDIX FIGURES 123 | \usepackage{chngcntr} 124 | 125 | % TABLES 126 | \usepackage{booktabs} 127 | \usepackage{longtable} 128 | \usepackage{hhline} 129 | 130 | % ALGORITHMS 131 | \usepackage[algoruled]{algorithm2e} 132 | \usepackage{listings} 133 | \usepackage{fancyvrb} 134 | \fvset{fontsize=\normalsize} 135 | 136 | % THEOREMS 137 | \usepackage{amsthm} 138 | \newtheorem{proposition}{Proposition} 139 | \newtheorem{lemma}{Lemma} 140 | 141 | % BIBLIOGRAPHY 142 | \usepackage[numbers]{natbib} 143 | 144 | % HYPERREF 145 | \usepackage[colorlinks,linktoc=all]{hyperref} 146 | \usepackage[all]{hypcap} 147 | \hypersetup{citecolor=MidnightBlue} 148 | \hypersetup{linkcolor=black} 149 | \hypersetup{urlcolor=MidnightBlue} 150 | 151 | % CLEVEREF must come after HYPERREF 152 | \usepackage[nameinlink]{cleveref} 153 | 154 | % ACRONYMS 155 | \usepackage[acronym,smallcaps,nowarn]{glossaries} 156 | % \makeglossaries 157 | 158 | % COLOR DEFINITIONS 159 | \newcommand{\red}[1]{\textcolor{BrickRed}{#1}} 160 | \newcommand{\orange}[1]{\textcolor{BurntOrange}{#1}} 161 | \newcommand{\green}[1]{\textcolor{OliveGreen}{#1}} 162 | \newcommand{\blue}[1]{\textcolor{MidnightBlue}{#1}} 163 | \newcommand{\gray}[1]{\textcolor{black!60}{#1}} 164 | 165 | % LISTINGS DEFINTIONS 166 | \lstdefinestyle{mystyle}{ 167 | commentstyle=\color{OliveGreen}, 168 | keywordstyle=\color{BurntOrange}, 169 | numberstyle=\tiny\color{black!60}, 170 | stringstyle=\color{MidnightBlue}, 171 | basicstyle=\ttfamily, 172 | breakatwhitespace=false, 173 | breaklines=true, 174 | captionpos=b, 175 | keepspaces=true, 176 | numbers=left, 177 | numbersep=5pt, 178 | showspaces=false, 179 | showstringspaces=false, 180 | showtabs=false, 181 | tabsize=2 182 | } 183 | \lstset{style=mystyle} 184 | 185 | \usepackage[colorinlistoftodos, 186 | prependcaption, 187 | textsize=small, 188 | backgroundcolor=yellow, 189 | linecolor=lightgray, 190 | bordercolor=lightgray]{todonotes} 191 | -------------------------------------------------------------------------------- /assignment2/preamble/preamble.tex: -------------------------------------------------------------------------------- 1 | \RequirePackage[l2tabu, orthodox]{nag} 2 | %\documentclass{article} 3 | 4 | \usepackage[left=1.in, right=1.in, top=1.25in, bottom=1.25in]{geometry} 5 | 6 | % FONTS 7 | %\usepackage[T1]{fontenc} 8 | 9 | % Replace default Latin Modern typewriter with its proportional counterpart 10 | % http://www.tug.dk/FontCatalogue/lmoderntypewriterprop/ 11 | %\renewcommand*\ttdefault{lmvtt} 12 | 13 | 14 | %%% OPTION 1 - Fourier Math + New Century Schoolbook + ParaType Sans 15 | 16 | % % Import Fourier Math (this imposes its own New Century Schoolbook type) 17 | % % http://www.ctan.org/tex-archive/fonts/fouriernc/ 18 | %\usepackage{fouriernc} 19 | %\usepackage{amsmath} 20 | % % Replace with TeX Gyre Schola version of New Century Schoolbook (must scale!) 21 | % % http://www.tug.dk/FontCatalogue/tgschola/ 22 | %\usepackage[scale=0.92]{tgschola} 23 | %\usepackage[scaled=0.88]{PTSans} 24 | 25 | %% OPTION 2 - MathDesign Math + Bitstream Charter + ParaType Sans 26 | 27 | % Import MathDesign (this brings along Bitstream Charter) 28 | % http://www.ctan.org/tex-archive/fonts/mathdesign/ 29 | \usepackage[bitstream-charter]{mathdesign} 30 | \usepackage{amsmath} 31 | \usepackage[scaled=0.92]{PTSans} 32 | 33 | 34 | % %%% OPTION 3 - MTPRO 2 Math + Termes Times + ParaType Sans 35 | 36 | % \usepackage{tgtermes} 37 | % \usepackage{amsmath} 38 | % \usepackage[subscriptcorrection, 39 | % amssymbols, 40 | % mtpbb, 41 | % mtpcal, 42 | % nofontinfo % suppresses all warnings 43 | % ]{mtpro2} 44 | % \usepackage{scalefnt,letltxmacro} 45 | % \LetLtxMacro{\oldtextsc}{\textsc} 46 | % \renewcommand{\textsc}[1]{\oldtextsc{\scalefont{1.10}#1}} 47 | % \usepackage[scaled=0.92]{PTSans} 48 | 49 | % Use default fonts here 50 | % \usepackage{amsmath} 51 | % \usepackage{amssymb} 52 | 53 | \usepackage{titling} 54 | 55 | % COLOR 56 | \usepackage[table,usenames,dvipsnames]{xcolor} 57 | \definecolor{shadecolor}{gray}{0.9} 58 | 59 | % SPACING and TEXT 60 | \usepackage[final,expansion=alltext]{microtype} 61 | \usepackage[english]{babel} 62 | \usepackage[parfill]{parskip} 63 | \usepackage{afterpage} 64 | \usepackage{framed} 65 | \usepackage{verbatim} 66 | \usepackage{setspace} 67 | 68 | %redefine the leftbar environment to accept a width and coloring options 69 | \renewenvironment{leftbar}[1][\hsize] 70 | {% 71 | \def\FrameCommand 72 | {% 73 | {\color{Gray}\vrule width 3pt}% 74 | \hspace{10pt}% 75 | %\hspace{0pt}\fboxsep=\FrameSep\colorbox{black!10}% 76 | }% 77 | \MakeFramed{\hsize#1\advance\hsize-\width\FrameRestore}% 78 | }% 79 | {\endMakeFramed} 80 | 81 | % define a paragraph header function 82 | \DeclareRobustCommand{\parhead}[1]{\textbf{#1}~} 83 | 84 | % EDITING 85 | % line numbering in left margin 86 | \usepackage{lineno} 87 | \renewcommand\linenumberfont{\normalfont 88 | \footnotesize 89 | \sffamily 90 | \color{SkyBlue}} 91 | % ragged paragraphs in right margin 92 | \usepackage{ragged2e} 93 | \DeclareRobustCommand{\sidenote}[1]{\marginpar{ 94 | \RaggedRight 95 | \textcolor{Plum}{\textsf{#1}}}} 96 | % paragraph counter in right margin 97 | \newcommand{\parnum}{\bfseries\P\arabic{parcount}} 98 | \newcounter{parcount} 99 | \newcommand\p{% 100 | \stepcounter{parcount}% 101 | \leavevmode\marginpar[\hfill\parnum]{\parnum}% 102 | } 103 | % paragraph helper 104 | %\DeclareRobustCommand{\PP}{\textcolor{Plum}{\P} } 105 | 106 | % \usepackage[bottom]{footmisc} 107 | \usepackage[symbol]{footmisc} 108 | \renewcommand{\thefootnote}{\arabic{footnote}} 109 | 110 | % COUNTERS 111 | \usepackage[inline]{enumitem} 112 | \renewcommand{\labelenumi}{\color{black!67}{\arabic{enumi}.}} 113 | \renewcommand{\labelenumii}{{\color{black!67}(\alph{enumii})}} 114 | \renewcommand{\labelitemi}{{\color{black!67}\textbullet}} 115 | 116 | % FIGURES 117 | \usepackage{graphicx} 118 | \usepackage[labelfont={it, small}, font=small]{caption} 119 | \usepackage[format=hang]{subcaption} 120 | % \usepackage{ccaption} 121 | 122 | % APPENDIX FIGURES 123 | \usepackage{chngcntr} 124 | 125 | % TABLES 126 | \usepackage{booktabs} 127 | \usepackage{longtable} 128 | \usepackage{hhline} 129 | 130 | % ALGORITHMS 131 | \usepackage[algoruled]{algorithm2e} 132 | \usepackage{listings} 133 | \usepackage{fancyvrb} 134 | \fvset{fontsize=\normalsize} 135 | 136 | % THEOREMS 137 | \usepackage{amsthm} 138 | \newtheorem{proposition}{Proposition} 139 | \newtheorem{lemma}{Lemma} 140 | 141 | % BIBLIOGRAPHY 142 | \usepackage[numbers]{natbib} 143 | 144 | % HYPERREF 145 | \usepackage[colorlinks,linktoc=all]{hyperref} 146 | \usepackage[all]{hypcap} 147 | \hypersetup{citecolor=MidnightBlue} 148 | \hypersetup{linkcolor=black} 149 | \hypersetup{urlcolor=MidnightBlue} 150 | 151 | % CLEVEREF must come after HYPERREF 152 | \usepackage[nameinlink]{cleveref} 153 | 154 | % ACRONYMS 155 | \usepackage[acronym,smallcaps,nowarn]{glossaries} 156 | % \makeglossaries 157 | 158 | % COLOR DEFINITIONS 159 | \newcommand{\red}[1]{\textcolor{BrickRed}{#1}} 160 | \newcommand{\orange}[1]{\textcolor{BurntOrange}{#1}} 161 | \newcommand{\green}[1]{\textcolor{OliveGreen}{#1}} 162 | \newcommand{\blue}[1]{\textcolor{MidnightBlue}{#1}} 163 | \newcommand{\gray}[1]{\textcolor{black!60}{#1}} 164 | 165 | % LISTINGS DEFINTIONS 166 | \lstdefinestyle{mystyle}{ 167 | commentstyle=\color{OliveGreen}, 168 | keywordstyle=\color{BurntOrange}, 169 | numberstyle=\tiny\color{black!60}, 170 | stringstyle=\color{MidnightBlue}, 171 | basicstyle=\ttfamily, 172 | breakatwhitespace=false, 173 | breaklines=true, 174 | captionpos=b, 175 | keepspaces=true, 176 | numbers=left, 177 | numbersep=5pt, 178 | showspaces=false, 179 | showstringspaces=false, 180 | showtabs=false, 181 | tabsize=2 182 | } 183 | \lstset{style=mystyle} 184 | 185 | \usepackage[colorinlistoftodos, 186 | prependcaption, 187 | textsize=small, 188 | backgroundcolor=yellow, 189 | linecolor=lightgray, 190 | bordercolor=lightgray]{todonotes} 191 | 192 | % Define an environment for solutions 193 | \newenvironment{solution} 194 | { 195 | \color{MidnightBlue} 196 | } 197 | { 198 | } 199 | %-------------------------------------------------- -------------------------------------------------------------------------------- /assignment3/preamble/preamble.tex: -------------------------------------------------------------------------------- 1 | \RequirePackage[l2tabu, orthodox]{nag} 2 | %\documentclass{article} 3 | 4 | \usepackage[left=1.in, right=1.in, top=1.25in, bottom=1.25in]{geometry} 5 | 6 | % FONTS 7 | %\usepackage[T1]{fontenc} 8 | 9 | % Replace default Latin Modern typewriter with its proportional counterpart 10 | % http://www.tug.dk/FontCatalogue/lmoderntypewriterprop/ 11 | %\renewcommand*\ttdefault{lmvtt} 12 | 13 | 14 | %%% OPTION 1 - Fourier Math + New Century Schoolbook + ParaType Sans 15 | 16 | % % Import Fourier Math (this imposes its own New Century Schoolbook type) 17 | % % http://www.ctan.org/tex-archive/fonts/fouriernc/ 18 | %\usepackage{fouriernc} 19 | %\usepackage{amsmath} 20 | % % Replace with TeX Gyre Schola version of New Century Schoolbook (must scale!) 21 | % % http://www.tug.dk/FontCatalogue/tgschola/ 22 | %\usepackage[scale=0.92]{tgschola} 23 | %\usepackage[scaled=0.88]{PTSans} 24 | 25 | %% OPTION 2 - MathDesign Math + Bitstream Charter + ParaType Sans 26 | 27 | % Import MathDesign (this brings along Bitstream Charter) 28 | % http://www.ctan.org/tex-archive/fonts/mathdesign/ 29 | \usepackage[bitstream-charter]{mathdesign} 30 | \usepackage{amsmath} 31 | \usepackage[scaled=0.92]{PTSans} 32 | 33 | 34 | % %%% OPTION 3 - MTPRO 2 Math + Termes Times + ParaType Sans 35 | 36 | % \usepackage{tgtermes} 37 | % \usepackage{amsmath} 38 | % \usepackage[subscriptcorrection, 39 | % amssymbols, 40 | % mtpbb, 41 | % mtpcal, 42 | % nofontinfo % suppresses all warnings 43 | % ]{mtpro2} 44 | % \usepackage{scalefnt,letltxmacro} 45 | % \LetLtxMacro{\oldtextsc}{\textsc} 46 | % \renewcommand{\textsc}[1]{\oldtextsc{\scalefont{1.10}#1}} 47 | % \usepackage[scaled=0.92]{PTSans} 48 | 49 | % Use default fonts here 50 | % \usepackage{amsmath} 51 | % \usepackage{amssymb} 52 | 53 | \usepackage{titling} 54 | 55 | % COLOR 56 | \usepackage[table,usenames,dvipsnames]{xcolor} 57 | \definecolor{shadecolor}{gray}{0.9} 58 | 59 | % SPACING and TEXT 60 | \usepackage[final,expansion=alltext]{microtype} 61 | \usepackage[english]{babel} 62 | \usepackage[parfill]{parskip} 63 | \usepackage{afterpage} 64 | \usepackage{framed} 65 | \usepackage{verbatim} 66 | \usepackage{setspace} 67 | 68 | %redefine the leftbar environment to accept a width and coloring options 69 | \renewenvironment{leftbar}[1][\hsize] 70 | {% 71 | \def\FrameCommand 72 | {% 73 | {\color{Gray}\vrule width 3pt}% 74 | \hspace{10pt}% 75 | %\hspace{0pt}\fboxsep=\FrameSep\colorbox{black!10}% 76 | }% 77 | \MakeFramed{\hsize#1\advance\hsize-\width\FrameRestore}% 78 | }% 79 | {\endMakeFramed} 80 | 81 | % define a paragraph header function 82 | \DeclareRobustCommand{\parhead}[1]{\textbf{#1}~} 83 | 84 | % EDITING 85 | % line numbering in left margin 86 | \usepackage{lineno} 87 | \renewcommand\linenumberfont{\normalfont 88 | \footnotesize 89 | \sffamily 90 | \color{SkyBlue}} 91 | % ragged paragraphs in right margin 92 | \usepackage{ragged2e} 93 | \DeclareRobustCommand{\sidenote}[1]{\marginpar{ 94 | \RaggedRight 95 | \textcolor{Plum}{\textsf{#1}}}} 96 | % paragraph counter in right margin 97 | \newcommand{\parnum}{\bfseries\P\arabic{parcount}} 98 | \newcounter{parcount} 99 | \newcommand\p{% 100 | \stepcounter{parcount}% 101 | \leavevmode\marginpar[\hfill\parnum]{\parnum}% 102 | } 103 | % paragraph helper 104 | %\DeclareRobustCommand{\PP}{\textcolor{Plum}{\P} } 105 | 106 | % \usepackage[bottom]{footmisc} 107 | \usepackage[symbol]{footmisc} 108 | \renewcommand{\thefootnote}{\arabic{footnote}} 109 | 110 | % COUNTERS 111 | \usepackage[inline]{enumitem} 112 | \renewcommand{\labelenumi}{\color{black!67}{\arabic{enumi}.}} 113 | \renewcommand{\labelenumii}{{\color{black!67}(\alph{enumii})}} 114 | \renewcommand{\labelitemi}{{\color{black!67}\textbullet}} 115 | 116 | % FIGURES 117 | \usepackage{graphicx} 118 | \usepackage[labelfont={it, small}, font=small]{caption} 119 | \usepackage[format=hang]{subcaption} 120 | % \usepackage{ccaption} 121 | 122 | % APPENDIX FIGURES 123 | \usepackage{chngcntr} 124 | 125 | % TABLES 126 | \usepackage{booktabs} 127 | \usepackage{longtable} 128 | \usepackage{hhline} 129 | 130 | % ALGORITHMS 131 | \usepackage[algoruled]{algorithm2e} 132 | \usepackage{listings} 133 | \usepackage{fancyvrb} 134 | \fvset{fontsize=\normalsize} 135 | 136 | % THEOREMS 137 | \usepackage{amsthm} 138 | \newtheorem{proposition}{Proposition} 139 | \newtheorem{lemma}{Lemma} 140 | 141 | % BIBLIOGRAPHY 142 | \usepackage[numbers]{natbib} 143 | 144 | % HYPERREF 145 | \usepackage[colorlinks,linktoc=all]{hyperref} 146 | \usepackage[all]{hypcap} 147 | \hypersetup{citecolor=MidnightBlue} 148 | \hypersetup{linkcolor=black} 149 | \hypersetup{urlcolor=MidnightBlue} 150 | 151 | % CLEVEREF must come after HYPERREF 152 | \usepackage[nameinlink]{cleveref} 153 | 154 | % ACRONYMS 155 | \usepackage[acronym,smallcaps,nowarn]{glossaries} 156 | % \makeglossaries 157 | 158 | % COLOR DEFINITIONS 159 | \newcommand{\red}[1]{\textcolor{BrickRed}{#1}} 160 | \newcommand{\orange}[1]{\textcolor{BurntOrange}{#1}} 161 | \newcommand{\green}[1]{\textcolor{OliveGreen}{#1}} 162 | \newcommand{\blue}[1]{\textcolor{MidnightBlue}{#1}} 163 | \newcommand{\gray}[1]{\textcolor{black!60}{#1}} 164 | 165 | % LISTINGS DEFINTIONS 166 | \lstdefinestyle{mystyle}{ 167 | commentstyle=\color{OliveGreen}, 168 | keywordstyle=\color{BurntOrange}, 169 | numberstyle=\tiny\color{black!60}, 170 | stringstyle=\color{MidnightBlue}, 171 | basicstyle=\ttfamily, 172 | breakatwhitespace=false, 173 | breaklines=true, 174 | captionpos=b, 175 | keepspaces=true, 176 | numbers=left, 177 | numbersep=5pt, 178 | showspaces=false, 179 | showstringspaces=false, 180 | showtabs=false, 181 | tabsize=2 182 | } 183 | \lstset{style=mystyle} 184 | 185 | \usepackage[colorinlistoftodos, 186 | prependcaption, 187 | textsize=small, 188 | backgroundcolor=yellow, 189 | linecolor=lightgray, 190 | bordercolor=lightgray]{todonotes} 191 | 192 | % Define an environment for solutions 193 | \newenvironment{solution} 194 | { 195 | \color{MidnightBlue} 196 | } 197 | { 198 | } 199 | %-------------------------------------------------- -------------------------------------------------------------------------------- /assignment4/preamble/preamble.tex: -------------------------------------------------------------------------------- 1 | \RequirePackage[l2tabu, orthodox]{nag} 2 | %\documentclass{article} 3 | 4 | \usepackage[left=1.in, right=1.in, top=1.25in, bottom=1.25in]{geometry} 5 | 6 | % FONTS 7 | %\usepackage[T1]{fontenc} 8 | 9 | % Replace default Latin Modern typewriter with its proportional counterpart 10 | % http://www.tug.dk/FontCatalogue/lmoderntypewriterprop/ 11 | %\renewcommand*\ttdefault{lmvtt} 12 | 13 | 14 | %%% OPTION 1 - Fourier Math + New Century Schoolbook + ParaType Sans 15 | 16 | % % Import Fourier Math (this imposes its own New Century Schoolbook type) 17 | % % http://www.ctan.org/tex-archive/fonts/fouriernc/ 18 | %\usepackage{fouriernc} 19 | %\usepackage{amsmath} 20 | % % Replace with TeX Gyre Schola version of New Century Schoolbook (must scale!) 21 | % % http://www.tug.dk/FontCatalogue/tgschola/ 22 | %\usepackage[scale=0.92]{tgschola} 23 | %\usepackage[scaled=0.88]{PTSans} 24 | 25 | %% OPTION 2 - MathDesign Math + Bitstream Charter + ParaType Sans 26 | 27 | % Import MathDesign (this brings along Bitstream Charter) 28 | % http://www.ctan.org/tex-archive/fonts/mathdesign/ 29 | \usepackage[bitstream-charter]{mathdesign} 30 | \usepackage{amsmath} 31 | \usepackage[scaled=0.92]{PTSans} 32 | 33 | 34 | % %%% OPTION 3 - MTPRO 2 Math + Termes Times + ParaType Sans 35 | 36 | % \usepackage{tgtermes} 37 | % \usepackage{amsmath} 38 | % \usepackage[subscriptcorrection, 39 | % amssymbols, 40 | % mtpbb, 41 | % mtpcal, 42 | % nofontinfo % suppresses all warnings 43 | % ]{mtpro2} 44 | % \usepackage{scalefnt,letltxmacro} 45 | % \LetLtxMacro{\oldtextsc}{\textsc} 46 | % \renewcommand{\textsc}[1]{\oldtextsc{\scalefont{1.10}#1}} 47 | % \usepackage[scaled=0.92]{PTSans} 48 | 49 | % Use default fonts here 50 | % \usepackage{amsmath} 51 | % \usepackage{amssymb} 52 | 53 | \usepackage{titling} 54 | 55 | % COLOR 56 | \usepackage[table,usenames,dvipsnames]{xcolor} 57 | \definecolor{shadecolor}{gray}{0.9} 58 | 59 | % SPACING and TEXT 60 | \usepackage[final,expansion=alltext]{microtype} 61 | \usepackage[english]{babel} 62 | \usepackage[parfill]{parskip} 63 | \usepackage{afterpage} 64 | \usepackage{framed} 65 | \usepackage{verbatim} 66 | \usepackage{setspace} 67 | 68 | %redefine the leftbar environment to accept a width and coloring options 69 | \renewenvironment{leftbar}[1][\hsize] 70 | {% 71 | \def\FrameCommand 72 | {% 73 | {\color{Gray}\vrule width 3pt}% 74 | \hspace{10pt}% 75 | %\hspace{0pt}\fboxsep=\FrameSep\colorbox{black!10}% 76 | }% 77 | \MakeFramed{\hsize#1\advance\hsize-\width\FrameRestore}% 78 | }% 79 | {\endMakeFramed} 80 | 81 | % define a paragraph header function 82 | \DeclareRobustCommand{\parhead}[1]{\textbf{#1}~} 83 | 84 | % EDITING 85 | % line numbering in left margin 86 | \usepackage{lineno} 87 | \renewcommand\linenumberfont{\normalfont 88 | \footnotesize 89 | \sffamily 90 | \color{SkyBlue}} 91 | % ragged paragraphs in right margin 92 | \usepackage{ragged2e} 93 | \DeclareRobustCommand{\sidenote}[1]{\marginpar{ 94 | \RaggedRight 95 | \textcolor{Plum}{\textsf{#1}}}} 96 | % paragraph counter in right margin 97 | \newcommand{\parnum}{\bfseries\P\arabic{parcount}} 98 | \newcounter{parcount} 99 | \newcommand\p{% 100 | \stepcounter{parcount}% 101 | \leavevmode\marginpar[\hfill\parnum]{\parnum}% 102 | } 103 | % paragraph helper 104 | %\DeclareRobustCommand{\PP}{\textcolor{Plum}{\P} } 105 | 106 | % \usepackage[bottom]{footmisc} 107 | \usepackage[symbol]{footmisc} 108 | \renewcommand{\thefootnote}{\arabic{footnote}} 109 | 110 | % COUNTERS 111 | \usepackage[inline]{enumitem} 112 | \renewcommand{\labelenumi}{\color{black!67}{\arabic{enumi}.}} 113 | \renewcommand{\labelenumii}{{\color{black!67}(\alph{enumii})}} 114 | \renewcommand{\labelitemi}{{\color{black!67}\textbullet}} 115 | 116 | % FIGURES 117 | \usepackage{graphicx} 118 | \usepackage[labelfont={it, small}, font=small]{caption} 119 | \usepackage[format=hang]{subcaption} 120 | % \usepackage{ccaption} 121 | 122 | % APPENDIX FIGURES 123 | \usepackage{chngcntr} 124 | 125 | % TABLES 126 | \usepackage{booktabs} 127 | \usepackage{longtable} 128 | \usepackage{hhline} 129 | 130 | % ALGORITHMS 131 | \usepackage[algoruled]{algorithm2e} 132 | \usepackage{listings} 133 | \usepackage{fancyvrb} 134 | \fvset{fontsize=\normalsize} 135 | 136 | % THEOREMS 137 | \usepackage{amsthm} 138 | \newtheorem{proposition}{Proposition} 139 | \newtheorem{lemma}{Lemma} 140 | 141 | % BIBLIOGRAPHY 142 | \usepackage[numbers]{natbib} 143 | 144 | % HYPERREF 145 | \usepackage[colorlinks,linktoc=all]{hyperref} 146 | \usepackage[all]{hypcap} 147 | \hypersetup{citecolor=MidnightBlue} 148 | \hypersetup{linkcolor=black} 149 | \hypersetup{urlcolor=MidnightBlue} 150 | 151 | % CLEVEREF must come after HYPERREF 152 | \usepackage[nameinlink]{cleveref} 153 | 154 | % ACRONYMS 155 | \usepackage[acronym,smallcaps,nowarn]{glossaries} 156 | % \makeglossaries 157 | 158 | % COLOR DEFINITIONS 159 | \newcommand{\red}[1]{\textcolor{BrickRed}{#1}} 160 | \newcommand{\orange}[1]{\textcolor{BurntOrange}{#1}} 161 | \newcommand{\green}[1]{\textcolor{OliveGreen}{#1}} 162 | \newcommand{\blue}[1]{\textcolor{MidnightBlue}{#1}} 163 | \newcommand{\gray}[1]{\textcolor{black!60}{#1}} 164 | 165 | % LISTINGS DEFINTIONS 166 | \lstdefinestyle{mystyle}{ 167 | commentstyle=\color{OliveGreen}, 168 | keywordstyle=\color{BurntOrange}, 169 | numberstyle=\tiny\color{black!60}, 170 | stringstyle=\color{MidnightBlue}, 171 | basicstyle=\ttfamily, 172 | breakatwhitespace=false, 173 | breaklines=true, 174 | captionpos=b, 175 | keepspaces=true, 176 | numbers=left, 177 | numbersep=5pt, 178 | showspaces=false, 179 | showstringspaces=false, 180 | showtabs=false, 181 | tabsize=2 182 | } 183 | \lstset{style=mystyle} 184 | 185 | \usepackage[colorinlistoftodos, 186 | prependcaption, 187 | textsize=small, 188 | backgroundcolor=yellow, 189 | linecolor=lightgray, 190 | bordercolor=lightgray]{todonotes} 191 | 192 | % Define an environment for solutions 193 | \newenvironment{solution} 194 | { 195 | \color{MidnightBlue} 196 | } 197 | { 198 | } 199 | %-------------------------------------------------- -------------------------------------------------------------------------------- /assignment1/preamble/preamble_math.tex: -------------------------------------------------------------------------------- 1 | % !TEX root = template.tex 2 | 3 | % \DeclareRobustCommand{\mb}[1]{\ensuremath{\boldsymbol{\mathbf{#1}}}} 4 | \DeclareRobustCommand{\mb}[1]{\boldsymbol{#1}} 5 | 6 | % \newcommand{\KL}[2]{\ensuremath{\textrm{KL}\PARENS{#1\;\|\;#2}}} 7 | \DeclareRobustCommand{\KL}[2]{\ensuremath{\textrm{KL}\left(#1\;\|\;#2\right)}} 8 | 9 | \DeclareMathOperator*{\argmax}{arg\,max} 10 | \DeclareMathOperator*{\argmin}{arg\,min} 11 | 12 | \renewcommand{\mid}{~\vert~} 13 | \newcommand{\given}{\,|\,} 14 | \newcommand{\iid}[1]{\stackrel{\text{iid}}{#1}} 15 | 16 | \newcommand{\mba}{\mb{a}} 17 | \newcommand{\mbb}{\mb{b}} 18 | \newcommand{\mbc}{\mb{c}} 19 | \newcommand{\mbd}{\mb{d}} 20 | \newcommand{\mbe}{\mb{e}} 21 | % \newcommand{\mbf}{\mb{f}} 22 | \newcommand{\mbg}{\mb{g}} 23 | \newcommand{\mbh}{\mb{h}} 24 | \newcommand{\mbi}{\mb{i}} 25 | \newcommand{\mbj}{\mb{j}} 26 | \newcommand{\mbk}{\mb{k}} 27 | \newcommand{\mbl}{\mb{l}} 28 | \newcommand{\mbm}{\mb{m}} 29 | \newcommand{\mbn}{\mb{n}} 30 | \newcommand{\mbo}{\mb{o}} 31 | \newcommand{\mbp}{\mb{p}} 32 | \newcommand{\mbq}{\mb{q}} 33 | \newcommand{\mbr}{\mb{r}} 34 | \newcommand{\mbs}{\mb{s}} 35 | \newcommand{\mbt}{\mb{t}} 36 | \newcommand{\mbu}{\mb{u}} 37 | \newcommand{\mbv}{\mb{v}} 38 | \newcommand{\mbw}{\mb{w}} 39 | \newcommand{\mbx}{\mb{x}} 40 | \newcommand{\mby}{\mb{y}} 41 | \newcommand{\mbz}{\mb{z}} 42 | 43 | \newcommand{\mbA}{\mb{A}} 44 | \newcommand{\mbB}{\mb{B}} 45 | \newcommand{\mbC}{\mb{C}} 46 | \newcommand{\mbD}{\mb{D}} 47 | \newcommand{\mbE}{\mb{E}} 48 | \newcommand{\mbF}{\mb{F}} 49 | \newcommand{\mbG}{\mb{G}} 50 | \newcommand{\mbH}{\mb{H}} 51 | \newcommand{\mbI}{\mb{I}} 52 | \newcommand{\mbJ}{\mb{J}} 53 | \newcommand{\mbK}{\mb{K}} 54 | \newcommand{\mbL}{\mb{L}} 55 | \newcommand{\mbM}{\mb{M}} 56 | \newcommand{\mbN}{\mb{N}} 57 | \newcommand{\mbO}{\mb{O}} 58 | \newcommand{\mbP}{\mb{P}} 59 | \newcommand{\mbQ}{\mb{Q}} 60 | \newcommand{\mbR}{\mb{R}} 61 | \newcommand{\mbS}{\mb{S}} 62 | \newcommand{\mbT}{\mb{T}} 63 | \newcommand{\mbU}{\mb{U}} 64 | \newcommand{\mbV}{\mb{V}} 65 | \newcommand{\mbW}{\mb{W}} 66 | \newcommand{\mbX}{\mb{X}} 67 | \newcommand{\mbY}{\mb{Y}} 68 | \newcommand{\mbZ}{\mb{Z}} 69 | 70 | \newcommand{\mbalpha}{\mb{\alpha}} 71 | \newcommand{\mbbeta}{\mb{\beta}} 72 | \newcommand{\mbdelta}{\mb{\delta}} 73 | \newcommand{\mbepsilon}{\mb{\epsilon}} 74 | \newcommand{\mbchi}{\mb{\chi}} 75 | \newcommand{\mbeta}{\mb{\eta}} 76 | \newcommand{\mbgamma}{\mb{\gamma}} 77 | \newcommand{\mbiota}{\mb{\iota}} 78 | \newcommand{\mbkappa}{\mb{\kappa}} 79 | \newcommand{\mblambda}{\mb{\lambda}} 80 | \newcommand{\mbmu}{\mb{\mu}} 81 | \newcommand{\mbnu}{\mb{\nu}} 82 | \newcommand{\mbomega}{\mb{\omega}} 83 | \newcommand{\mbphi}{\mb{\phi}} 84 | \newcommand{\mbpi}{\mb{\pi}} 85 | \newcommand{\mbpsi}{\mb{\psi}} 86 | \newcommand{\mbrho}{\mb{\rho}} 87 | \newcommand{\mbsigma}{\mb{\sigma}} 88 | \newcommand{\mbtau}{\mb{\tau}} 89 | \newcommand{\mbtheta}{\mb{\theta}} 90 | \newcommand{\mbupsilon}{\mb{\upsilon}} 91 | \newcommand{\mbvarepsilon}{\mb{\varepsilon}} 92 | \newcommand{\mbvarphi}{\mb{\varphi}} 93 | \newcommand{\mbvartheta}{\mb{\vartheta}} 94 | \newcommand{\mbvarrho}{\mb{\varrho}} 95 | \newcommand{\mbxi}{\mb{\xi}} 96 | \newcommand{\mbzeta}{\mb{\zeta}} 97 | 98 | \newcommand{\mbDelta}{\mb{\Delta}} 99 | \newcommand{\mbGamma}{\mb{\Gamma}} 100 | \newcommand{\mbLambda}{\mb{\Lambda}} 101 | \newcommand{\mbOmega}{\mb{\Omega}} 102 | \newcommand{\mbPhi}{\mb{\Phi}} 103 | \newcommand{\mbPi}{\mb{\Pi}} 104 | \newcommand{\mbPsi}{\mb{\Psi}} 105 | \newcommand{\mbSigma}{\mb{\Sigma}} 106 | \newcommand{\mbTheta}{\mb{\Theta}} 107 | \newcommand{\mbUpsilon}{\mb{\Upsilon}} 108 | \newcommand{\mbXi}{\mb{\Xi}} 109 | 110 | \newcommand{\dif}{\mathop{}\!\mathrm{d}} 111 | \newcommand{\diag}{\textrm{diag}} 112 | \newcommand{\supp}{\textrm{supp}} 113 | 114 | \newcommand{\E}{\mathbb{E}} 115 | \newcommand{\Var}{\mathbb{V}\textrm{ar}} 116 | 117 | \newcommand{\bbA}{\mathbb{A}} 118 | \newcommand{\bbB}{\mathbb{B}} 119 | \newcommand{\bbC}{\mathbb{C}} 120 | \newcommand{\bbD}{\mathbb{D}} 121 | \newcommand{\bbE}{\mathbb{E}} 122 | \newcommand{\bbF}{\mathbb{F}} 123 | \newcommand{\bbG}{\mathbb{G}} 124 | \newcommand{\bbH}{\mathbb{H}} 125 | \newcommand{\bbI}{\mathbb{I}} 126 | \newcommand{\bbJ}{\mathbb{J}} 127 | \newcommand{\bbK}{\mathbb{K}} 128 | \newcommand{\bbL}{\mathbb{L}} 129 | \newcommand{\bbM}{\mathbb{M}} 130 | \newcommand{\bbN}{\mathbb{N}} 131 | \newcommand{\bbO}{\mathbb{O}} 132 | \newcommand{\bbP}{\mathbb{P}} 133 | \newcommand{\bbQ}{\mathbb{Q}} 134 | \newcommand{\bbR}{\mathbb{R}} 135 | \newcommand{\bbS}{\mathbb{S}} 136 | \newcommand{\bbT}{\mathbb{T}} 137 | \newcommand{\bbU}{\mathbb{U}} 138 | \newcommand{\bbV}{\mathbb{V}} 139 | \newcommand{\bbW}{\mathbb{W}} 140 | \newcommand{\bbX}{\mathbb{X}} 141 | \newcommand{\bbY}{\mathbb{Y}} 142 | \newcommand{\bbZ}{\mathbb{Z}} 143 | 144 | \newcommand{\cA}{\mathcal{A}} 145 | \newcommand{\cB}{\mathcal{B}} 146 | \newcommand{\cC}{\mathcal{C}} 147 | \newcommand{\cD}{\mathcal{D}} 148 | \newcommand{\cE}{\mathcal{E}} 149 | \newcommand{\cF}{\mathcal{F}} 150 | \newcommand{\cG}{\mathcal{G}} 151 | \newcommand{\cH}{\mathcal{H}} 152 | \newcommand{\cI}{\mathcal{I}} 153 | \newcommand{\cJ}{\mathcal{J}} 154 | \newcommand{\cK}{\mathcal{K}} 155 | \newcommand{\cL}{\mathcal{L}} 156 | \newcommand{\cM}{\mathcal{M}} 157 | \newcommand{\cN}{\mathcal{N}} 158 | \newcommand{\cO}{\mathcal{O}} 159 | \newcommand{\cP}{\mathcal{P}} 160 | \newcommand{\cQ}{\mathcal{Q}} 161 | \newcommand{\cR}{\mathcal{R}} 162 | \newcommand{\cS}{\mathcal{S}} 163 | \newcommand{\cT}{\mathcal{T}} 164 | \newcommand{\cU}{\mathcal{U}} 165 | \newcommand{\cV}{\mathcal{V}} 166 | \newcommand{\cW}{\mathcal{W}} 167 | \newcommand{\cX}{\mathcal{X}} 168 | \newcommand{\cY}{\mathcal{Y}} 169 | \newcommand{\cZ}{\mathcal{Z}} 170 | 171 | \newcommand{\trans}{\mathsf{T}} 172 | \newcommand{\naturals}{\mathbb{N}} 173 | \newcommand{\reals}{\mathbb{R}} 174 | 175 | \newcommand{\distNormal}{\mathcal{N}} 176 | \newcommand{\distGamma}{\mathrm{Gamma}} 177 | \newcommand{\distBernoulli}{\mathrm{Bern}} 178 | \newcommand{\distBinomial}{\mathrm{Bin}} 179 | \newcommand{\distCategorical}{\mathrm{Cat}} 180 | \newcommand{\distDirichlet}{\mathrm{Dir}} 181 | \newcommand{\distMultinomial}{\mathrm{Mult}} 182 | \newcommand{\distPolyaGamma}{\mathrm{PG}} 183 | \newcommand{\distMNIW}{\mathrm{MNIW}} 184 | \newcommand{\distPoissonProcess}{\mathrm{PP}} 185 | 186 | \newcommand{\dtmax}{\Delta t_{\mathsf{max}}} 187 | -------------------------------------------------------------------------------- /assignment2/preamble/preamble_math.tex: -------------------------------------------------------------------------------- 1 | % !TEX root = template.tex 2 | 3 | % \DeclareRobustCommand{\mb}[1]{\ensuremath{\boldsymbol{\mathbf{#1}}}} 4 | \DeclareRobustCommand{\mb}[1]{\boldsymbol{#1}} 5 | 6 | % \newcommand{\KL}[2]{\ensuremath{\textrm{KL}\PARENS{#1\;\|\;#2}}} 7 | \DeclareRobustCommand{\KL}[2]{\ensuremath{\textrm{KL}\left(#1\;\|\;#2\right)}} 8 | 9 | \DeclareMathOperator*{\argmax}{arg\,max} 10 | \DeclareMathOperator*{\argmin}{arg\,min} 11 | 12 | \renewcommand{\mid}{~\vert~} 13 | \newcommand{\given}{\,|\,} 14 | \newcommand{\iid}[1]{\stackrel{\text{iid}}{#1}} 15 | 16 | \newcommand{\mba}{\mb{a}} 17 | \newcommand{\mbb}{\mb{b}} 18 | \newcommand{\mbc}{\mb{c}} 19 | \newcommand{\mbd}{\mb{d}} 20 | \newcommand{\mbe}{\mb{e}} 21 | % \newcommand{\mbf}{\mb{f}} 22 | \newcommand{\mbg}{\mb{g}} 23 | \newcommand{\mbh}{\mb{h}} 24 | \newcommand{\mbi}{\mb{i}} 25 | \newcommand{\mbj}{\mb{j}} 26 | \newcommand{\mbk}{\mb{k}} 27 | \newcommand{\mbl}{\mb{l}} 28 | \newcommand{\mbm}{\mb{m}} 29 | \newcommand{\mbn}{\mb{n}} 30 | \newcommand{\mbo}{\mb{o}} 31 | \newcommand{\mbp}{\mb{p}} 32 | \newcommand{\mbq}{\mb{q}} 33 | \newcommand{\mbr}{\mb{r}} 34 | \newcommand{\mbs}{\mb{s}} 35 | \newcommand{\mbt}{\mb{t}} 36 | \newcommand{\mbu}{\mb{u}} 37 | \newcommand{\mbv}{\mb{v}} 38 | \newcommand{\mbw}{\mb{w}} 39 | \newcommand{\mbx}{\mb{x}} 40 | \newcommand{\mby}{\mb{y}} 41 | \newcommand{\mbz}{\mb{z}} 42 | 43 | \newcommand{\mbA}{\mb{A}} 44 | \newcommand{\mbB}{\mb{B}} 45 | \newcommand{\mbC}{\mb{C}} 46 | \newcommand{\mbD}{\mb{D}} 47 | \newcommand{\mbE}{\mb{E}} 48 | \newcommand{\mbF}{\mb{F}} 49 | \newcommand{\mbG}{\mb{G}} 50 | \newcommand{\mbH}{\mb{H}} 51 | \newcommand{\mbI}{\mb{I}} 52 | \newcommand{\mbJ}{\mb{J}} 53 | \newcommand{\mbK}{\mb{K}} 54 | \newcommand{\mbL}{\mb{L}} 55 | \newcommand{\mbM}{\mb{M}} 56 | \newcommand{\mbN}{\mb{N}} 57 | \newcommand{\mbO}{\mb{O}} 58 | \newcommand{\mbP}{\mb{P}} 59 | \newcommand{\mbQ}{\mb{Q}} 60 | \newcommand{\mbR}{\mb{R}} 61 | \newcommand{\mbS}{\mb{S}} 62 | \newcommand{\mbT}{\mb{T}} 63 | \newcommand{\mbU}{\mb{U}} 64 | \newcommand{\mbV}{\mb{V}} 65 | \newcommand{\mbW}{\mb{W}} 66 | \newcommand{\mbX}{\mb{X}} 67 | \newcommand{\mbY}{\mb{Y}} 68 | \newcommand{\mbZ}{\mb{Z}} 69 | 70 | \newcommand{\mbalpha}{\mb{\alpha}} 71 | \newcommand{\mbbeta}{\mb{\beta}} 72 | \newcommand{\mbdelta}{\mb{\delta}} 73 | \newcommand{\mbepsilon}{\mb{\epsilon}} 74 | \newcommand{\mbchi}{\mb{\chi}} 75 | \newcommand{\mbeta}{\mb{\eta}} 76 | \newcommand{\mbgamma}{\mb{\gamma}} 77 | \newcommand{\mbiota}{\mb{\iota}} 78 | \newcommand{\mbkappa}{\mb{\kappa}} 79 | \newcommand{\mblambda}{\mb{\lambda}} 80 | \newcommand{\mbmu}{\mb{\mu}} 81 | \newcommand{\mbnu}{\mb{\nu}} 82 | \newcommand{\mbomega}{\mb{\omega}} 83 | \newcommand{\mbphi}{\mb{\phi}} 84 | \newcommand{\mbpi}{\mb{\pi}} 85 | \newcommand{\mbpsi}{\mb{\psi}} 86 | \newcommand{\mbrho}{\mb{\rho}} 87 | \newcommand{\mbsigma}{\mb{\sigma}} 88 | \newcommand{\mbtau}{\mb{\tau}} 89 | \newcommand{\mbtheta}{\mb{\theta}} 90 | \newcommand{\mbupsilon}{\mb{\upsilon}} 91 | \newcommand{\mbvarepsilon}{\mb{\varepsilon}} 92 | \newcommand{\mbvarphi}{\mb{\varphi}} 93 | \newcommand{\mbvartheta}{\mb{\vartheta}} 94 | \newcommand{\mbvarrho}{\mb{\varrho}} 95 | \newcommand{\mbxi}{\mb{\xi}} 96 | \newcommand{\mbzeta}{\mb{\zeta}} 97 | 98 | \newcommand{\mbDelta}{\mb{\Delta}} 99 | \newcommand{\mbGamma}{\mb{\Gamma}} 100 | \newcommand{\mbLambda}{\mb{\Lambda}} 101 | \newcommand{\mbOmega}{\mb{\Omega}} 102 | \newcommand{\mbPhi}{\mb{\Phi}} 103 | \newcommand{\mbPi}{\mb{\Pi}} 104 | \newcommand{\mbPsi}{\mb{\Psi}} 105 | \newcommand{\mbSigma}{\mb{\Sigma}} 106 | \newcommand{\mbTheta}{\mb{\Theta}} 107 | \newcommand{\mbUpsilon}{\mb{\Upsilon}} 108 | \newcommand{\mbXi}{\mb{\Xi}} 109 | 110 | \newcommand{\dif}{\mathop{}\!\mathrm{d}} 111 | \newcommand{\diag}{\textrm{diag}} 112 | \newcommand{\supp}{\textrm{supp}} 113 | 114 | \newcommand{\E}{\mathbb{E}} 115 | \newcommand{\Var}{\mathbb{V}\textrm{ar}} 116 | 117 | \newcommand{\bbA}{\mathbb{A}} 118 | \newcommand{\bbB}{\mathbb{B}} 119 | \newcommand{\bbC}{\mathbb{C}} 120 | \newcommand{\bbD}{\mathbb{D}} 121 | \newcommand{\bbE}{\mathbb{E}} 122 | \newcommand{\bbF}{\mathbb{F}} 123 | \newcommand{\bbG}{\mathbb{G}} 124 | \newcommand{\bbH}{\mathbb{H}} 125 | \newcommand{\bbI}{\mathbb{I}} 126 | \newcommand{\bbJ}{\mathbb{J}} 127 | \newcommand{\bbK}{\mathbb{K}} 128 | \newcommand{\bbL}{\mathbb{L}} 129 | \newcommand{\bbM}{\mathbb{M}} 130 | \newcommand{\bbN}{\mathbb{N}} 131 | \newcommand{\bbO}{\mathbb{O}} 132 | \newcommand{\bbP}{\mathbb{P}} 133 | \newcommand{\bbQ}{\mathbb{Q}} 134 | \newcommand{\bbR}{\mathbb{R}} 135 | \newcommand{\bbS}{\mathbb{S}} 136 | \newcommand{\bbT}{\mathbb{T}} 137 | \newcommand{\bbU}{\mathbb{U}} 138 | \newcommand{\bbV}{\mathbb{V}} 139 | \newcommand{\bbW}{\mathbb{W}} 140 | \newcommand{\bbX}{\mathbb{X}} 141 | \newcommand{\bbY}{\mathbb{Y}} 142 | \newcommand{\bbZ}{\mathbb{Z}} 143 | 144 | \newcommand{\cA}{\mathcal{A}} 145 | \newcommand{\cB}{\mathcal{B}} 146 | \newcommand{\cC}{\mathcal{C}} 147 | \newcommand{\cD}{\mathcal{D}} 148 | \newcommand{\cE}{\mathcal{E}} 149 | \newcommand{\cF}{\mathcal{F}} 150 | \newcommand{\cG}{\mathcal{G}} 151 | \newcommand{\cH}{\mathcal{H}} 152 | \newcommand{\cI}{\mathcal{I}} 153 | \newcommand{\cJ}{\mathcal{J}} 154 | \newcommand{\cK}{\mathcal{K}} 155 | \newcommand{\cL}{\mathcal{L}} 156 | \newcommand{\cM}{\mathcal{M}} 157 | \newcommand{\cN}{\mathcal{N}} 158 | \newcommand{\cO}{\mathcal{O}} 159 | \newcommand{\cP}{\mathcal{P}} 160 | \newcommand{\cQ}{\mathcal{Q}} 161 | \newcommand{\cR}{\mathcal{R}} 162 | \newcommand{\cS}{\mathcal{S}} 163 | \newcommand{\cT}{\mathcal{T}} 164 | \newcommand{\cU}{\mathcal{U}} 165 | \newcommand{\cV}{\mathcal{V}} 166 | \newcommand{\cW}{\mathcal{W}} 167 | \newcommand{\cX}{\mathcal{X}} 168 | \newcommand{\cY}{\mathcal{Y}} 169 | \newcommand{\cZ}{\mathcal{Z}} 170 | 171 | \newcommand{\trans}{\mathsf{T}} 172 | \newcommand{\naturals}{\mathbb{N}} 173 | \newcommand{\reals}{\mathbb{R}} 174 | 175 | \newcommand{\distNormal}{\mathcal{N}} 176 | \newcommand{\distGamma}{\mathrm{Gamma}} 177 | \newcommand{\distBernoulli}{\mathrm{Bern}} 178 | \newcommand{\distBinomial}{\mathrm{Bin}} 179 | \newcommand{\distCategorical}{\mathrm{Cat}} 180 | \newcommand{\distDirichlet}{\mathrm{Dir}} 181 | \newcommand{\distMultinomial}{\mathrm{Mult}} 182 | \newcommand{\distPolyaGamma}{\mathrm{PG}} 183 | \newcommand{\distMNIW}{\mathrm{MNIW}} 184 | \newcommand{\distPoissonProcess}{\mathrm{PP}} 185 | 186 | \newcommand{\dtmax}{\Delta t_{\mathsf{max}}} 187 | -------------------------------------------------------------------------------- /assignment3/preamble/preamble_math.tex: -------------------------------------------------------------------------------- 1 | % !TEX root = template.tex 2 | 3 | % \DeclareRobustCommand{\mb}[1]{\ensuremath{\boldsymbol{\mathbf{#1}}}} 4 | \DeclareRobustCommand{\mb}[1]{\boldsymbol{#1}} 5 | 6 | % \newcommand{\KL}[2]{\ensuremath{\textrm{KL}\PARENS{#1\;\|\;#2}}} 7 | \DeclareRobustCommand{\KL}[2]{\ensuremath{\textrm{KL}\left(#1\;\|\;#2\right)}} 8 | 9 | \DeclareMathOperator*{\argmax}{arg\,max} 10 | \DeclareMathOperator*{\argmin}{arg\,min} 11 | 12 | \renewcommand{\mid}{~\vert~} 13 | \newcommand{\given}{\,|\,} 14 | \newcommand{\iid}[1]{\stackrel{\text{iid}}{#1}} 15 | 16 | \newcommand{\mba}{\mb{a}} 17 | \newcommand{\mbb}{\mb{b}} 18 | \newcommand{\mbc}{\mb{c}} 19 | \newcommand{\mbd}{\mb{d}} 20 | \newcommand{\mbe}{\mb{e}} 21 | % \newcommand{\mbf}{\mb{f}} 22 | \newcommand{\mbg}{\mb{g}} 23 | \newcommand{\mbh}{\mb{h}} 24 | \newcommand{\mbi}{\mb{i}} 25 | \newcommand{\mbj}{\mb{j}} 26 | \newcommand{\mbk}{\mb{k}} 27 | \newcommand{\mbl}{\mb{l}} 28 | \newcommand{\mbm}{\mb{m}} 29 | \newcommand{\mbn}{\mb{n}} 30 | \newcommand{\mbo}{\mb{o}} 31 | \newcommand{\mbp}{\mb{p}} 32 | \newcommand{\mbq}{\mb{q}} 33 | \newcommand{\mbr}{\mb{r}} 34 | \newcommand{\mbs}{\mb{s}} 35 | \newcommand{\mbt}{\mb{t}} 36 | \newcommand{\mbu}{\mb{u}} 37 | \newcommand{\mbv}{\mb{v}} 38 | \newcommand{\mbw}{\mb{w}} 39 | \newcommand{\mbx}{\mb{x}} 40 | \newcommand{\mby}{\mb{y}} 41 | \newcommand{\mbz}{\mb{z}} 42 | 43 | \newcommand{\mbA}{\mb{A}} 44 | \newcommand{\mbB}{\mb{B}} 45 | \newcommand{\mbC}{\mb{C}} 46 | \newcommand{\mbD}{\mb{D}} 47 | \newcommand{\mbE}{\mb{E}} 48 | \newcommand{\mbF}{\mb{F}} 49 | \newcommand{\mbG}{\mb{G}} 50 | \newcommand{\mbH}{\mb{H}} 51 | \newcommand{\mbI}{\mb{I}} 52 | \newcommand{\mbJ}{\mb{J}} 53 | \newcommand{\mbK}{\mb{K}} 54 | \newcommand{\mbL}{\mb{L}} 55 | \newcommand{\mbM}{\mb{M}} 56 | \newcommand{\mbN}{\mb{N}} 57 | \newcommand{\mbO}{\mb{O}} 58 | \newcommand{\mbP}{\mb{P}} 59 | \newcommand{\mbQ}{\mb{Q}} 60 | \newcommand{\mbR}{\mb{R}} 61 | \newcommand{\mbS}{\mb{S}} 62 | \newcommand{\mbT}{\mb{T}} 63 | \newcommand{\mbU}{\mb{U}} 64 | \newcommand{\mbV}{\mb{V}} 65 | \newcommand{\mbW}{\mb{W}} 66 | \newcommand{\mbX}{\mb{X}} 67 | \newcommand{\mbY}{\mb{Y}} 68 | \newcommand{\mbZ}{\mb{Z}} 69 | 70 | \newcommand{\mbalpha}{\mb{\alpha}} 71 | \newcommand{\mbbeta}{\mb{\beta}} 72 | \newcommand{\mbdelta}{\mb{\delta}} 73 | \newcommand{\mbepsilon}{\mb{\epsilon}} 74 | \newcommand{\mbchi}{\mb{\chi}} 75 | \newcommand{\mbeta}{\mb{\eta}} 76 | \newcommand{\mbgamma}{\mb{\gamma}} 77 | \newcommand{\mbiota}{\mb{\iota}} 78 | \newcommand{\mbkappa}{\mb{\kappa}} 79 | \newcommand{\mblambda}{\mb{\lambda}} 80 | \newcommand{\mbmu}{\mb{\mu}} 81 | \newcommand{\mbnu}{\mb{\nu}} 82 | \newcommand{\mbomega}{\mb{\omega}} 83 | \newcommand{\mbphi}{\mb{\phi}} 84 | \newcommand{\mbpi}{\mb{\pi}} 85 | \newcommand{\mbpsi}{\mb{\psi}} 86 | \newcommand{\mbrho}{\mb{\rho}} 87 | \newcommand{\mbsigma}{\mb{\sigma}} 88 | \newcommand{\mbtau}{\mb{\tau}} 89 | \newcommand{\mbtheta}{\mb{\theta}} 90 | \newcommand{\mbupsilon}{\mb{\upsilon}} 91 | \newcommand{\mbvarepsilon}{\mb{\varepsilon}} 92 | \newcommand{\mbvarphi}{\mb{\varphi}} 93 | \newcommand{\mbvartheta}{\mb{\vartheta}} 94 | \newcommand{\mbvarrho}{\mb{\varrho}} 95 | \newcommand{\mbxi}{\mb{\xi}} 96 | \newcommand{\mbzeta}{\mb{\zeta}} 97 | 98 | \newcommand{\mbDelta}{\mb{\Delta}} 99 | \newcommand{\mbGamma}{\mb{\Gamma}} 100 | \newcommand{\mbLambda}{\mb{\Lambda}} 101 | \newcommand{\mbOmega}{\mb{\Omega}} 102 | \newcommand{\mbPhi}{\mb{\Phi}} 103 | \newcommand{\mbPi}{\mb{\Pi}} 104 | \newcommand{\mbPsi}{\mb{\Psi}} 105 | \newcommand{\mbSigma}{\mb{\Sigma}} 106 | \newcommand{\mbTheta}{\mb{\Theta}} 107 | \newcommand{\mbUpsilon}{\mb{\Upsilon}} 108 | \newcommand{\mbXi}{\mb{\Xi}} 109 | 110 | \newcommand{\dif}{\mathop{}\!\mathrm{d}} 111 | \newcommand{\diag}{\textrm{diag}} 112 | \newcommand{\supp}{\textrm{supp}} 113 | 114 | \newcommand{\E}{\mathbb{E}} 115 | \newcommand{\Var}{\mathbb{V}\textrm{ar}} 116 | 117 | \newcommand{\bbA}{\mathbb{A}} 118 | \newcommand{\bbB}{\mathbb{B}} 119 | \newcommand{\bbC}{\mathbb{C}} 120 | \newcommand{\bbD}{\mathbb{D}} 121 | \newcommand{\bbE}{\mathbb{E}} 122 | \newcommand{\bbF}{\mathbb{F}} 123 | \newcommand{\bbG}{\mathbb{G}} 124 | \newcommand{\bbH}{\mathbb{H}} 125 | \newcommand{\bbI}{\mathbb{I}} 126 | \newcommand{\bbJ}{\mathbb{J}} 127 | \newcommand{\bbK}{\mathbb{K}} 128 | \newcommand{\bbL}{\mathbb{L}} 129 | \newcommand{\bbM}{\mathbb{M}} 130 | \newcommand{\bbN}{\mathbb{N}} 131 | \newcommand{\bbO}{\mathbb{O}} 132 | \newcommand{\bbP}{\mathbb{P}} 133 | \newcommand{\bbQ}{\mathbb{Q}} 134 | \newcommand{\bbR}{\mathbb{R}} 135 | \newcommand{\bbS}{\mathbb{S}} 136 | \newcommand{\bbT}{\mathbb{T}} 137 | \newcommand{\bbU}{\mathbb{U}} 138 | \newcommand{\bbV}{\mathbb{V}} 139 | \newcommand{\bbW}{\mathbb{W}} 140 | \newcommand{\bbX}{\mathbb{X}} 141 | \newcommand{\bbY}{\mathbb{Y}} 142 | \newcommand{\bbZ}{\mathbb{Z}} 143 | 144 | \newcommand{\cA}{\mathcal{A}} 145 | \newcommand{\cB}{\mathcal{B}} 146 | \newcommand{\cC}{\mathcal{C}} 147 | \newcommand{\cD}{\mathcal{D}} 148 | \newcommand{\cE}{\mathcal{E}} 149 | \newcommand{\cF}{\mathcal{F}} 150 | \newcommand{\cG}{\mathcal{G}} 151 | \newcommand{\cH}{\mathcal{H}} 152 | \newcommand{\cI}{\mathcal{I}} 153 | \newcommand{\cJ}{\mathcal{J}} 154 | \newcommand{\cK}{\mathcal{K}} 155 | \newcommand{\cL}{\mathcal{L}} 156 | \newcommand{\cM}{\mathcal{M}} 157 | \newcommand{\cN}{\mathcal{N}} 158 | \newcommand{\cO}{\mathcal{O}} 159 | \newcommand{\cP}{\mathcal{P}} 160 | \newcommand{\cQ}{\mathcal{Q}} 161 | \newcommand{\cR}{\mathcal{R}} 162 | \newcommand{\cS}{\mathcal{S}} 163 | \newcommand{\cT}{\mathcal{T}} 164 | \newcommand{\cU}{\mathcal{U}} 165 | \newcommand{\cV}{\mathcal{V}} 166 | \newcommand{\cW}{\mathcal{W}} 167 | \newcommand{\cX}{\mathcal{X}} 168 | \newcommand{\cY}{\mathcal{Y}} 169 | \newcommand{\cZ}{\mathcal{Z}} 170 | 171 | \newcommand{\trans}{\mathsf{T}} 172 | \newcommand{\naturals}{\mathbb{N}} 173 | \newcommand{\reals}{\mathbb{R}} 174 | 175 | \newcommand{\distNormal}{\mathcal{N}} 176 | \newcommand{\distGamma}{\mathrm{Gamma}} 177 | \newcommand{\distBernoulli}{\mathrm{Bern}} 178 | \newcommand{\distBinomial}{\mathrm{Bin}} 179 | \newcommand{\distCategorical}{\mathrm{Cat}} 180 | \newcommand{\distDirichlet}{\mathrm{Dir}} 181 | \newcommand{\distMultinomial}{\mathrm{Mult}} 182 | \newcommand{\distPolyaGamma}{\mathrm{PG}} 183 | \newcommand{\distMNIW}{\mathrm{MNIW}} 184 | \newcommand{\distPoissonProcess}{\mathrm{PP}} 185 | 186 | \newcommand{\dtmax}{\Delta t_{\mathsf{max}}} 187 | -------------------------------------------------------------------------------- /assignment4/preamble/preamble_math.tex: -------------------------------------------------------------------------------- 1 | % !TEX root = template.tex 2 | 3 | % \DeclareRobustCommand{\mb}[1]{\ensuremath{\boldsymbol{\mathbf{#1}}}} 4 | \DeclareRobustCommand{\mb}[1]{\boldsymbol{#1}} 5 | 6 | % \newcommand{\KL}[2]{\ensuremath{\textrm{KL}\PARENS{#1\;\|\;#2}}} 7 | \DeclareRobustCommand{\KL}[2]{\ensuremath{\textrm{KL}\left(#1\;\|\;#2\right)}} 8 | 9 | \DeclareMathOperator*{\argmax}{arg\,max} 10 | \DeclareMathOperator*{\argmin}{arg\,min} 11 | 12 | \renewcommand{\mid}{~\vert~} 13 | \newcommand{\given}{\,|\,} 14 | \newcommand{\iid}[1]{\stackrel{\text{iid}}{#1}} 15 | 16 | \newcommand{\mba}{\mb{a}} 17 | \newcommand{\mbb}{\mb{b}} 18 | \newcommand{\mbc}{\mb{c}} 19 | \newcommand{\mbd}{\mb{d}} 20 | \newcommand{\mbe}{\mb{e}} 21 | % \newcommand{\mbf}{\mb{f}} 22 | \newcommand{\mbg}{\mb{g}} 23 | \newcommand{\mbh}{\mb{h}} 24 | \newcommand{\mbi}{\mb{i}} 25 | \newcommand{\mbj}{\mb{j}} 26 | \newcommand{\mbk}{\mb{k}} 27 | \newcommand{\mbl}{\mb{l}} 28 | \newcommand{\mbm}{\mb{m}} 29 | \newcommand{\mbn}{\mb{n}} 30 | \newcommand{\mbo}{\mb{o}} 31 | \newcommand{\mbp}{\mb{p}} 32 | \newcommand{\mbq}{\mb{q}} 33 | \newcommand{\mbr}{\mb{r}} 34 | \newcommand{\mbs}{\mb{s}} 35 | \newcommand{\mbt}{\mb{t}} 36 | \newcommand{\mbu}{\mb{u}} 37 | \newcommand{\mbv}{\mb{v}} 38 | \newcommand{\mbw}{\mb{w}} 39 | \newcommand{\mbx}{\mb{x}} 40 | \newcommand{\mby}{\mb{y}} 41 | \newcommand{\mbz}{\mb{z}} 42 | 43 | \newcommand{\mbA}{\mb{A}} 44 | \newcommand{\mbB}{\mb{B}} 45 | \newcommand{\mbC}{\mb{C}} 46 | \newcommand{\mbD}{\mb{D}} 47 | \newcommand{\mbE}{\mb{E}} 48 | \newcommand{\mbF}{\mb{F}} 49 | \newcommand{\mbG}{\mb{G}} 50 | \newcommand{\mbH}{\mb{H}} 51 | \newcommand{\mbI}{\mb{I}} 52 | \newcommand{\mbJ}{\mb{J}} 53 | \newcommand{\mbK}{\mb{K}} 54 | \newcommand{\mbL}{\mb{L}} 55 | \newcommand{\mbM}{\mb{M}} 56 | \newcommand{\mbN}{\mb{N}} 57 | \newcommand{\mbO}{\mb{O}} 58 | \newcommand{\mbP}{\mb{P}} 59 | \newcommand{\mbQ}{\mb{Q}} 60 | \newcommand{\mbR}{\mb{R}} 61 | \newcommand{\mbS}{\mb{S}} 62 | \newcommand{\mbT}{\mb{T}} 63 | \newcommand{\mbU}{\mb{U}} 64 | \newcommand{\mbV}{\mb{V}} 65 | \newcommand{\mbW}{\mb{W}} 66 | \newcommand{\mbX}{\mb{X}} 67 | \newcommand{\mbY}{\mb{Y}} 68 | \newcommand{\mbZ}{\mb{Z}} 69 | 70 | \newcommand{\mbalpha}{\mb{\alpha}} 71 | \newcommand{\mbbeta}{\mb{\beta}} 72 | \newcommand{\mbdelta}{\mb{\delta}} 73 | \newcommand{\mbepsilon}{\mb{\epsilon}} 74 | \newcommand{\mbchi}{\mb{\chi}} 75 | \newcommand{\mbeta}{\mb{\eta}} 76 | \newcommand{\mbgamma}{\mb{\gamma}} 77 | \newcommand{\mbiota}{\mb{\iota}} 78 | \newcommand{\mbkappa}{\mb{\kappa}} 79 | \newcommand{\mblambda}{\mb{\lambda}} 80 | \newcommand{\mbmu}{\mb{\mu}} 81 | \newcommand{\mbnu}{\mb{\nu}} 82 | \newcommand{\mbomega}{\mb{\omega}} 83 | \newcommand{\mbphi}{\mb{\phi}} 84 | \newcommand{\mbpi}{\mb{\pi}} 85 | \newcommand{\mbpsi}{\mb{\psi}} 86 | \newcommand{\mbrho}{\mb{\rho}} 87 | \newcommand{\mbsigma}{\mb{\sigma}} 88 | \newcommand{\mbtau}{\mb{\tau}} 89 | \newcommand{\mbtheta}{\mb{\theta}} 90 | \newcommand{\mbupsilon}{\mb{\upsilon}} 91 | \newcommand{\mbvarepsilon}{\mb{\varepsilon}} 92 | \newcommand{\mbvarphi}{\mb{\varphi}} 93 | \newcommand{\mbvartheta}{\mb{\vartheta}} 94 | \newcommand{\mbvarrho}{\mb{\varrho}} 95 | \newcommand{\mbxi}{\mb{\xi}} 96 | \newcommand{\mbzeta}{\mb{\zeta}} 97 | 98 | \newcommand{\mbDelta}{\mb{\Delta}} 99 | \newcommand{\mbGamma}{\mb{\Gamma}} 100 | \newcommand{\mbLambda}{\mb{\Lambda}} 101 | \newcommand{\mbOmega}{\mb{\Omega}} 102 | \newcommand{\mbPhi}{\mb{\Phi}} 103 | \newcommand{\mbPi}{\mb{\Pi}} 104 | \newcommand{\mbPsi}{\mb{\Psi}} 105 | \newcommand{\mbSigma}{\mb{\Sigma}} 106 | \newcommand{\mbTheta}{\mb{\Theta}} 107 | \newcommand{\mbUpsilon}{\mb{\Upsilon}} 108 | \newcommand{\mbXi}{\mb{\Xi}} 109 | 110 | \newcommand{\dif}{\mathop{}\!\mathrm{d}} 111 | \newcommand{\diag}{\textrm{diag}} 112 | \newcommand{\supp}{\textrm{supp}} 113 | 114 | \newcommand{\E}{\mathbb{E}} 115 | \newcommand{\Var}{\mathbb{V}\textrm{ar}} 116 | 117 | \newcommand{\bbA}{\mathbb{A}} 118 | \newcommand{\bbB}{\mathbb{B}} 119 | \newcommand{\bbC}{\mathbb{C}} 120 | \newcommand{\bbD}{\mathbb{D}} 121 | \newcommand{\bbE}{\mathbb{E}} 122 | \newcommand{\bbF}{\mathbb{F}} 123 | \newcommand{\bbG}{\mathbb{G}} 124 | \newcommand{\bbH}{\mathbb{H}} 125 | \newcommand{\bbI}{\mathbb{I}} 126 | \newcommand{\bbJ}{\mathbb{J}} 127 | \newcommand{\bbK}{\mathbb{K}} 128 | \newcommand{\bbL}{\mathbb{L}} 129 | \newcommand{\bbM}{\mathbb{M}} 130 | \newcommand{\bbN}{\mathbb{N}} 131 | \newcommand{\bbO}{\mathbb{O}} 132 | \newcommand{\bbP}{\mathbb{P}} 133 | \newcommand{\bbQ}{\mathbb{Q}} 134 | \newcommand{\bbR}{\mathbb{R}} 135 | \newcommand{\bbS}{\mathbb{S}} 136 | \newcommand{\bbT}{\mathbb{T}} 137 | \newcommand{\bbU}{\mathbb{U}} 138 | \newcommand{\bbV}{\mathbb{V}} 139 | \newcommand{\bbW}{\mathbb{W}} 140 | \newcommand{\bbX}{\mathbb{X}} 141 | \newcommand{\bbY}{\mathbb{Y}} 142 | \newcommand{\bbZ}{\mathbb{Z}} 143 | 144 | \newcommand{\cA}{\mathcal{A}} 145 | \newcommand{\cB}{\mathcal{B}} 146 | \newcommand{\cC}{\mathcal{C}} 147 | \newcommand{\cD}{\mathcal{D}} 148 | \newcommand{\cE}{\mathcal{E}} 149 | \newcommand{\cF}{\mathcal{F}} 150 | \newcommand{\cG}{\mathcal{G}} 151 | \newcommand{\cH}{\mathcal{H}} 152 | \newcommand{\cI}{\mathcal{I}} 153 | \newcommand{\cJ}{\mathcal{J}} 154 | \newcommand{\cK}{\mathcal{K}} 155 | \newcommand{\cL}{\mathcal{L}} 156 | \newcommand{\cM}{\mathcal{M}} 157 | \newcommand{\cN}{\mathcal{N}} 158 | \newcommand{\cO}{\mathcal{O}} 159 | \newcommand{\cP}{\mathcal{P}} 160 | \newcommand{\cQ}{\mathcal{Q}} 161 | \newcommand{\cR}{\mathcal{R}} 162 | \newcommand{\cS}{\mathcal{S}} 163 | \newcommand{\cT}{\mathcal{T}} 164 | \newcommand{\cU}{\mathcal{U}} 165 | \newcommand{\cV}{\mathcal{V}} 166 | \newcommand{\cW}{\mathcal{W}} 167 | \newcommand{\cX}{\mathcal{X}} 168 | \newcommand{\cY}{\mathcal{Y}} 169 | \newcommand{\cZ}{\mathcal{Z}} 170 | 171 | \newcommand{\trans}{\mathsf{T}} 172 | \newcommand{\naturals}{\mathbb{N}} 173 | \newcommand{\reals}{\mathbb{R}} 174 | 175 | \newcommand{\distNormal}{\mathcal{N}} 176 | \newcommand{\distGamma}{\mathrm{Gamma}} 177 | \newcommand{\distBernoulli}{\mathrm{Bern}} 178 | \newcommand{\distBinomial}{\mathrm{Bin}} 179 | \newcommand{\distCategorical}{\mathrm{Cat}} 180 | \newcommand{\distDirichlet}{\mathrm{Dir}} 181 | \newcommand{\distMultinomial}{\mathrm{Mult}} 182 | \newcommand{\distPolyaGamma}{\mathrm{PG}} 183 | \newcommand{\distMNIW}{\mathrm{MNIW}} 184 | \newcommand{\distPoissonProcess}{\mathrm{PP}} 185 | 186 | \newcommand{\dtmax}{\Delta t_{\mathsf{max}}} 187 | -------------------------------------------------------------------------------- /assignment1/yourname_assignment1.tex: -------------------------------------------------------------------------------- 1 | \documentclass[11pt]{article} 2 | 3 | \input{preamble/preamble.tex} 4 | \input{preamble/preamble_math.tex} 5 | \input{preamble/preamble_acronyms.tex} 6 | 7 | \title{STAT215: Assignment 1} 8 | % \author{Your Name Here} 9 | \date{Due: January 30, 2020 at 11:59pm PT} 10 | 11 | \begin{document} 12 | 13 | \maketitle 14 | 15 | \textbf{Problem 1:} \textit{The negative binomial distribution.} 16 | 17 | Consider a coin with probability~$p$ of coming up heads. The number of coin flips before seeing a `tails' follows a geometric distribution with pmf 18 | \begin{align*} 19 | \Pr(X=k; p) &= p^k \, (1-p). 20 | \end{align*} 21 | The number of coin flips before seeing~$r$ tails follows a \emph{negative binomial} distribution with parameters~$r$ and~$p$. 22 | 23 | \begin{enumerate}[label=(\alph*)] 24 | \item Derive the probability mass function~$\Pr(X=k; r, p)$ of the negative binomial distribution. Explain your reasoning. 25 | 26 | \item The geometric distribution has mean~$p / (1-p)$ and variance~$p / (1-p)^2$. Compute the mean and variance of the negative binomial distribution. Plot the variance as a function of the mean for fixed~$p$ and varying~$r$. How does this compare to the Poisson distribution? 27 | 28 | \item Rewrite the negative binomial pmf in terms of the mean~$\mu$ and the dispersion parameter~$r$. Show that as~$r \to \infty$ with~$\mu$ fixed, the negative binomial converges to a Poisson distribution with mean~$\mu$. 29 | 30 | \item The gamma distribution is a continuous distribution on~$(0, \infty)$ with pdf 31 | \begin{align*} 32 | p(x; \alpha, \beta) &= \frac{\beta^\alpha}{\Gamma(\alpha)} x^{\alpha -1} e^{-\beta x}, 33 | \end{align*} 34 | where~$\Gamma(\cdot)$ denotes the gamma function, which has the property that~$\Gamma(n) = (n-1)!$ for positive integers~$n$. Show that the negative binomial is the marginal distribution over~$X$ where~${X \sim \mathrm{Poisson}(\mu)}$ and~${\mu \sim \mathrm{Gamma}(r, (1-p)/p )}$, integrating over~$\mu$. In other words, show that the negative binomial is equivalent to an infinite mixture of Poissons with gamma mixing measure. 35 | 36 | \item Suppose~$X_n \sim \mathrm{NB}(r, p)$ for~$n=1, \ldots, N$ are independent samples of a negative binomial distribution. Write the log likelihood~$\cL(r, p)$. Solve for the maximum likelihood estimate (in closed form) of~$\hat{p}$ for fixed~$r$. Plug this into the log likelihood to obtain the profile likelihood~$\cL(r, \hat{p}(r))$ as a function of~$r$ alone. 37 | 38 | \end{enumerate} 39 | 40 | \clearpage 41 | 42 | 43 | \textbf{Problem 2:} \textit{The multivariate normal distribution.} 44 | 45 | \begin{enumerate}[label=(\alph*)] 46 | 47 | \item In class we introduced a multivariate Gaussian distribution via its representation as a linear transformation~$x = Az + \mu$ where~$z$ is a vector of independent standard normal random variates. Using the change of variables formula, derive the multivariate Gaussian pdf, 48 | \begin{align*} 49 | p(x; \mu, \Sigma) &= (2 \pi)^{-D/2} |\Sigma|^{-1/2} \exp \left\{ -\frac{1}{2} (x - \mu)^\trans \Sigma^{-1} (x- \mu) \right\}, 50 | \end{align*} 51 | where~$\mu \in \reals^D$ and $\Sigma = AA^\top \in \reals^{D \times D}$ is a positive semi-definite covariance matrix. 52 | 53 | \item Let~$r = \|z\|_2 = (\sum_{d=1}^D z_d^2)^{1/2}$ where~$z$ is a vector of standard normal variates, as above. We will derive its density function. 54 | \begin{enumerate}[label=(\roman*)] 55 | \item Start by considering the~$D=2$ dimensional case and note that~$p(r) \, \mathrm{d}r$ equals the probability mass assigned by the multivariate normal distribution to the infinitesimal shell at radius~$r$ from the origin. 56 | 57 | \item Generalize your solution to $D > 2$ dimensions, using the fact that the surface area of the $D$-dimensional ball with radius $r$ is $2r^{D-1} \pi^{D/2} / \Gamma(D/2)$. 58 | 59 | \item Plot this density for increasing values of dimension~$D$. What does this tell your about the distribution of high dimensional Gaussian vectors? 60 | \item Now use another change of variables to derive the pdf of~$r^2$, the sum of squares of the Gaussian variables. The squared 2-norm follows a $\chi^2$ distribution with $D$ degrees of freedom. Show that it is a special case of the gamma distribution introduced in Problem 1. 61 | 62 | \end{enumerate} 63 | 64 | 65 | \item Rewrite the multivariate Gaussian density in natural exponential family form with parameters~$J$ and $h$. How do its natural parameters relate to its mean parameters~$\mu$ and~$\Sigma$? What are the sufficient statistics of this exponential family distribution? What is the log normalizer? Show that the derivatives of the log normalizer yield the expected sufficient statistics. 66 | 67 | \item Consider a directed graphical model on a collection of scalar random variables $(x_1, \ldots, x_D)$. Assume that each variable $x_d$ for~$d > 1$ has exactly one parent in the directed graphical model, and let the index of the parent of~$x_d$ be denoted by~$\mathsf{par}_d \in \{1, \ldots, d-1\}$. The joint distribution is then given by, 68 | \begin{align*} 69 | x_1 &\sim \cN(0, \beta^{-1}), \\ 70 | x_{d} &\sim \cN(x_{\mathsf{par}_d} + b_d; \beta^{-1}) \qquad \text{ for } d=2, \ldots, D. 71 | \end{align*} 72 | The parameters of the model are~$\beta, \{b_d\}_{d=2}^D$. Show that the joint distribution is a multivariate Gaussian and find a closed form expression the precision matrix, $J$. How does the precision matrix change in the two-dimensional model where each~$x_d \in \reals^2$, $\beta^{-1}$ is replaced by $\beta^{-1}I$, and $b_d \in \reals^2$? 73 | 74 | \end{enumerate} 75 | 76 | \clearpage 77 | 78 | \textbf{Problem 3:} \textit{Bayesian linear regression.} 79 | 80 | Consider a regression problem with datapoints~$(x_n, y_n) \in \reals^{D} \times \reals$. We begin with a linear model, 81 | \begin{align*} 82 | y_n = w^\trans x_n + \epsilon_n; \quad \epsilon_n \sim \cN(0, \beta^{-1}), 83 | \end{align*} 84 | where~$w \in \reals^{D}$ is a vector of regression weights and~$\beta \in \reals_+$ specifies the precision (inverse variance) of the errors~$\epsilon_n$. 85 | 86 | \begin{enumerate}[label=(\alph*)] 87 | 88 | \item Assume a standard normal prior $w_i \sim \cN(0, \alpha^{-1})$. Compute the marginal likelihood 89 | \begin{align*} 90 | p(\{x_n, y_n\}_{n=1}^N; \alpha, \beta) &= \int p(w; \alpha) \, p(\{(x_n, y_n)\}_{n=1}^N \mid w; \beta) \, \mathrm{d}w. 91 | \end{align*} 92 | 93 | \item Now consider a ``spike-and-slab'' prior distribution on the entries of~$w$. Let~$z \in \{0, 1\}^{D}$ be a binary vector specifying whether the corresponding entries in~$w$ are nonzero. That is, if~$z_{i}=0$ then~$w_{i}$ is deterministically zero; otherwise,~$w_{i} \sim \cN(0, \alpha^{-1})$ as above. We can write this as a degenerate Gaussian prior 94 | \begin{align*} 95 | p(w \mid z) &= \prod_{i=1}^{D} \cN(w_{i} \mid 0, z_{i} \alpha^{-1}). 96 | \end{align*} 97 | Compute the marginal likelihood~$p(\{(x_n, y_n)\}_{n=1}^N \mid z, \alpha, \beta)$. How would you find the value of~$z$ that maximizes this likelihood? 98 | 99 | \item Suppose that each datapoint has its own precision~$\beta_n$. Compute the posterior distribution 100 | \begin{align*} 101 | p(w \mid \{(x_n, y_n, \beta_n)\}_{n=1}^N, \alpha). 102 | \end{align*} 103 | How does the posterior mean compare to the ordinary least squares estimate? 104 | 105 | \item Finally, assume the per-datapoint precisions~$\beta_n$ are not directly observed, but are assumed to be independently sampled from a gamma prior distribution, 106 | \begin{align*} 107 | \beta_n &\sim \mathrm{Gamma}(a, b), 108 | \end{align*} 109 | which has the property that~$\E[\beta_n] = a/b $ and $\E[\ln \beta_n] = \psi(a) - \ln b$ where~$\psi$ is the digamma function. Then, the errors~$\epsilon_n$ are marginally distributed according to the Student's t distribution, which has heavier tails than the Gaussian and hence is more robust to outliers. 110 | 111 | Compute the conditional distribution $p(\beta_n \mid x_n, y_n, w, a, b)$, and compute the expected log joint 112 | \begin{align*} 113 | \cL(w') &= \E_{p(\beta_n \,|\, x_n, y_n, w, a, b)} \left[ \log p(\{(x_n, y_n, \beta_n)\}_{n=1}^N, w'; \alpha, a, b) \right]. 114 | \end{align*} 115 | What value of~$w$ maximizes the expected log joint probability? Describe an EM procedure to search for, 116 | \begin{align*} 117 | w^* &= \argmax p(w \mid \{(x_n, y_n)\}_{n=1}^N, \alpha, a, b). 118 | \end{align*} 119 | 120 | \end{enumerate} 121 | 122 | \clearpage 123 | 124 | \textbf{Problem 4:} \textit{Multiclass logistic regression applied to larval zebrafish behavior data.} 125 | 126 | Follow the instructions in this Google Colab notebook to implement a multiclass logistic regression model and fit it to larval zebrafish behavior data from a recent paper: 127 | \url{https://colab.research.google.com/drive/1moN5CYNsyxeOSUOmN-QMyqEZwgLSBsjY}. Once you're done, save the notebook in \texttt{.ipynb} format, print a copy in \texttt{.pdf} format, 128 | and submit these files along with the rest of your written assignment. 129 | 130 | 131 | \end{document} 132 | -------------------------------------------------------------------------------- /assignment3/yourname_assignment3.tex: -------------------------------------------------------------------------------- 1 | \documentclass[11pt]{article} 2 | 3 | \input{preamble/preamble.tex} 4 | \input{preamble/preamble_math.tex} 5 | \input{preamble/preamble_acronyms.tex} 6 | 7 | \title{STAT215: Assignment 3} 8 | \author{Your Name} 9 | \date{Due: March 3, 2020} 10 | 11 | \begin{document} 12 | 13 | \maketitle 14 | 15 | \textbf{Problem 1:} \textit{Variational inference.} 16 | 17 | Standard VI minimizes $\KL{q(z)}{p(z \mid x)}$, the Kullback-Leibler divergence from the variational approximation $q(z)$ to the true posterior $p(z \mid x)$. In this problem we will develop some intuition for this optimization problem. For further reference, see Chapter 10 of \textit{Pattern Recognition and Machine Learning} by Bishop. 18 | 19 | \begin{enumerate}[label=(\alph*)] 20 | \item Let $\cQ = \{q(z): q(z) = \prod_{d=1}^D \cN(z_d \mid m_d, v_d^2)\}$ denote the set of Gaussian densities on $z \in \reals^D$ with diagonal covariance matrices. Solve for 21 | \begin{align*} 22 | q^\star &= \argmin_\cQ \KL{q(z)}{\cN(z \mid \mu, \Sigma)}, 23 | \end{align*} 24 | where $\Sigma$ is an arbitrary covariance matrix. 25 | 26 | \begin{solution} 27 | Your answer here. 28 | \end{solution} 29 | 30 | \item Now solve for $q^\star \in \cQ$ that minimizes the KL in the opposite direction, 31 | \begin{align*} 32 | q^\star &= \argmin_\cQ \KL{\cN(z \mid \mu, \Sigma)}{q(z)}. 33 | \end{align*} 34 | 35 | \begin{solution} 36 | Your answer here. 37 | \end{solution} 38 | 39 | \item Plot the contour lines of your solutions to parts (a) and (b) for the case where 40 | \begin{align*} 41 | \mu = \begin{bmatrix}0 \\ 0 \end{bmatrix}, \qquad 42 | \Sigma = \begin{bmatrix} 1 & 0.9 \\ 0.9 & 1 \end{bmatrix}. 43 | \end{align*} 44 | 45 | \end{enumerate} 46 | 47 | 48 | 49 | \clearpage 50 | 51 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 52 | \textbf{Problem 2:} \textit{Variational autoencoders (VAE's)} 53 | 54 | In class we derived VAE's as generative models~$p(x, z; \theta)$ of observations~$x \in \reals^P$ and latent variables~$z \in \reals^D$, with parameters~$\theta$. We used variational expectation-maximization to learn the parameters~$\theta$ that maximize a lower bound on the marginal likelihood, 55 | \begin{align*} 56 | \log p(x; \theta) \geq \sum_{n=1}^N \bbE_{q(z_n | x_n, \phi)}\left[\log p(x_n, z_n; \theta) - \log q(z_n \mid x_n, \phi) \right] \triangleq \cL(\theta, \phi). 57 | \end{align*} 58 | The difference between VAE's and regular variational expectation-maximization is that we constrained the variational distribution $q(z \mid x, \phi)$ to be a parametric function of the data; for example, we considered, 59 | \begin{align*} 60 | q(z_n \mid x_n, \phi) &= \cN \left(z_n \mid \mu(x_n; \phi), \mathrm{diag}([\sigma_1^2(x_n; \phi), \ldots, \sigma_D^2(x_n; \phi)]) \right), 61 | \end{align*} 62 | where~$\mu: \reals^P \to \reals^D$ and $\sigma_d^2: \reals^P \to \reals_+$ are functions parameterized by~$\phi$ that take in a datapoint~$x_n$ and output means and variances of~$z_n$, respectively. In practice, it is common to implement these functions with neural networks. Here we will study VAE's in some special cases. For further reference, see Kingma and Welling (2019), which is linked on the course website. 63 | 64 | \begin{enumerate}[label=(\alph*)] 65 | \item Consider the linear Gaussian model factor model, 66 | \begin{align*} 67 | p(x_n, z_n; \theta) &= \cN(z_n; 0, I) \, \cN(x_n \mid A z_n, \, V), 68 | \end{align*} 69 | where~$A \in \reals^{P \times D}$, $V \in \reals^{P \times P}$ is a diagonal, positive definite matrix, and~$\theta = (A, V)$. Solve for the true posterior~$p(z_n \mid x_n, \theta)$. 70 | 71 | \begin{solution} 72 | Your answer here. 73 | \end{solution} 74 | 75 | \item Consider the variational family of Gaussian densities with diagonal covariance, as described above, and assume that~$\mu(x; \phi)$ and $\log \sigma_d^2(x; \phi)$ are linear functions of~$x$. Does this family contain the true posterior? Find the member of this variational family that maximizes~$\cL(\theta, \phi)$ for fixed $\theta$. (Hint: use your answer to Problem 1a.) 76 | 77 | \begin{solution} 78 | Your answer here. 79 | \end{solution} 80 | 81 | \item Now consider a simple nonlinear factor model, 82 | \begin{align*} 83 | p(x_n, z_n; \theta) &= \cN(z_n \mid 0, I) \, \prod_{p=1}^P \cN(x_{np} \mid e^{a_p^\trans z_n}, v_p), 84 | \end{align*} 85 | parameterized by~$a_p \in \reals^D$ and~$v_p \in \reals_+$. The posterior is no longer Gaussian, since the mean of~$x_{np}$ is a nonlinear function of the latent variable.\footnote{For this particular model, the expectations in~$\cL(\theta, \phi)$ can still be computed in closed form using the fact that~$\bbE[e^z] = e^{\mu + \frac{1}{2}\sigma^2}$ for $z \sim \cN(\mu, \sigma^2)$.} 86 | 87 | Generate a synthetic dataset by sampling $N=1000$ datapoints from a $D=1$, $P=2$ dimensional model with~$A = [1.2, 1]^\trans$ and~$v_p = 0.1$ for $p=1,2$. Use the reparameterization trick and automatic differentiation to perform stochastic gradient descent on~$-\cL(\theta, \phi)$. 88 | 89 | Make the following plots: 90 | \begin{itemize} 91 | \item A scatter plot of your simulated data (with equal axis limits). 92 | \item A plot of $\cL(\theta, \phi)$ as a function of SGD iteration. 93 | \item A plot of the model parameters~$(A_{11}, A_{21}, v_1, v_2)$ as a function of SGD iteration. 94 | \item The approximate Gaussian posterior with mean $\mu(x; \phi)$ and variance~$\sigma_1^2(x; \phi)$ for $x \in \{(0, 0), (1, 1), (10, 7)\}$ using the learned parameters~$\phi$. 95 | \item The true posterior at those points. (Since $z$ is one dimensional, you can compute the true posterior with numerical integration.) 96 | \end{itemize} 97 | Comment on your results. 98 | 99 | \begin{solution} 100 | Your results here. 101 | \end{solution} 102 | 103 | \end{enumerate} 104 | 105 | \clearpage 106 | 107 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 108 | \textbf{Problem 3:} \textit{Semi-Markov models} 109 | 110 | Consider a Markov model as described in class and in, for example, Chapter 13 of \textit{Pattern Recogntion and Machine Learning} by Bishop, 111 | \begin{align*} 112 | p(z_{1:T} \mid \pi, A) &= p(z_1 \mid \pi) \prod_{t=2}^T p(z_t \mid z_{t-1}, A), 113 | \end{align*} 114 | where~$z_t \in \{1, \ldots, K\}$ denotes the ``state,'' and 115 | \begin{align*} 116 | p(z_1 = i) &= \pi_i \\ 117 | p(z_t = j \mid z_{t-1} = i, A) &= A_{ij}. 118 | \end{align*} 119 | We will study the distribution of state durations---the length of time spent in a state before transitioning. Let $d \geq 1$ denote the number of time steps before a transition out of state $z_1$. That is, $z_1 = i, \ldots, z_{d}=i$ for some~$i$, but $z_{d+1} \neq i$. 120 | 121 | \begin{enumerate}[label=(\alph*)] 122 | \item Show that $p(d \mid z_1=i, A) = \mathrm{Geom}(d \mid p_i)$, the probability mass function of the geometric distribution. Solve for the parameter~$p_i$ as a function of the transition matrix~$A$. 123 | 124 | \begin{solution} 125 | Your answer here. 126 | \end{solution} 127 | 128 | \item We can equivalently represent $z_{1:T}$ as a set of states and durations~$\{(\tilde{z}_n, d_n)\}_{n=1}^N$, where $\tilde{z}_n \in \{1, \ldots, K\} \setminus \{\tilde{z}_{n-1}\}$ denotes the index of the $n$-th visited state and $d_n \in \naturals$ denotes the duration spent in that state before transition. There is a one-to-one mapping between states/durations and the original state sequence: 129 | \begin{align*} 130 | (z_1, \ldots, z_T) &= (\underbrace{\tilde{z}_1, \ldots, \tilde{z}_1}_{d_1\,\mathrm{times}}, \underbrace{\tilde{z}_2, \ldots, \tilde{z}_2}_{d_2\,\mathrm{times}}, \ldots \underbrace{\tilde{z}_N, \ldots, \tilde{z}_N}_{d_N\,\mathrm{times}}). 131 | \end{align*} 132 | Show that the probability mass function of the states and durations is of the form 133 | \begin{align*} 134 | p(\{(\tilde{z}_n, d_n)\}_{n=1}^N) &= p(\tilde{z}_1 \mid \pi) \left[ \prod_{n=1}^{N-1} p(d_n \mid \tilde{z}_n, A) \, p(\tilde{z}_{n+1} \mid \tilde{z}_n, A) \right] p(d_N \mid \tilde{z}_N, A), 135 | \end{align*} 136 | and derive each conditional probability mass function. 137 | 138 | \begin{solution} 139 | Your answer here. 140 | \end{solution} 141 | 142 | \item \emph{Semi-Markov} models replace $p(d_n \mid \tilde{z}_n)$ with a more flexible duration distribution. For example, consider the model, 143 | \begin{align*} 144 | p(d_n \mid \tilde{z}_n) &= \mathrm{NB}(d_n \mid r, \theta_{\tilde{z}_n}), 145 | \end{align*} 146 | where $r \in \naturals$ and $\theta_k \in [0, 1]$ for $k=1,\ldots,K$. Recall from Assignment 1 that the negative binomial distribution with integer~$r$ is equivalent to a sum of~$r$ geometric random variables. Use this equivalence to write the semi-Markov model with negative binomial durations as a Markov model on an extended set of states~$s_n \in \{1, \ldots, Kr\}$. Specifically, write the transition matrix for $p(s_n \mid s_{n-1})$ and the mapping from $s_n$ to $z_n$. 147 | 148 | \begin{solution} 149 | Your answer here. 150 | \end{solution} 151 | 152 | \end{enumerate} 153 | 154 | 155 | \end{document} -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Statistical Models in Biology 2 | 3 | STATS215 · Winter 2020 · Stanford University 4 | 5 | ## Course Description 6 | This course is about probabilistic models in biology and the statistical inference algorithms necessary to fit them to data. We will cover some of the most important tools for modeling biological data, including latent variable models, hidden Markov models, dynamical systems, Poisson processes, and recent extensions like variational autoencoders and recurrent neural networks. We will study algorithms for parameter estimation and inference, and we will apply these tools to a variety of problems across biology, with a particular emphasis on applications in neuroscience. In your homework assignments and final project, you will implement these models and algorithms and apply them to real data. 7 | 8 | ## Prerequisites 9 | You should be comfortable with the basics of probability at the level of STAT116, including random variables, joint densities, conditional distributions, etc. You should also be familiar with linear regression and maximum likelihood estimation, for example from STAT200. We will use basic linear algebra (solving linear systems and using eigendecompositions) and multivariable calculus (gradients, Jacobians, Hessians, etc.). 10 | 11 | ## Logistics 12 | * Instructor: [Scott Linderman](http://slinderman.web.stanford.edu/) 13 | * Teaching Assistant: [Kevin Han](mailto:kevinwh@stanford.edu) 14 | * Lectures: Tuesday and Thursday, 12-1:20pm 15 | * Location: 540-108 16 | * Office Hours: 17 | * Scott: Tuesday 2-3pm Sequoia Hall Rm 232 18 | * Kevin: Monday 3-5pm Sequoia Hall Library 19 | 20 | ## Grading 21 | * 4 Assignments: 15% each 22 | * Midterm Exam: 10% 23 | * Final Project Proposal: 5% 24 | * Final Project Report: 20% 25 | * In-class Participation: 5% 26 | 27 | *Late policy*: All assignments are due at 11:59pm PT on the due date. You can turn in homework assignments up to a week late with a 50% penalty on your grade. Final project reports will not be accepted after the deadline. 28 | 29 | *Midterm*: The midterm will be given in class. 30 | 31 | ## Assignments 32 | Assignments consist of math problems and coding problems. You will find a LaTeX template for your write-up in the GitHub folder. Submit a pdf of your write-up and an .ipynb and .pdf of the Google Colab notebook with your completed coding assignment. All submissions should be made through Canvas. 33 | 34 | * [Assignment 1](https://github.com/slinderman/stats215/tree/master/assignment1) 35 | * [Assignment 2](https://github.com/slinderman/stats215/tree/master/assignment2) 36 | * [Assignment 3](https://github.com/slinderman/stats215/tree/master/assignment3) 37 | * [Assignment 4](https://github.com/slinderman/stats215/tree/master/assignment4) 38 | 39 | ## Final project 40 | 41 | The final project is an opportunity to apply these models and algorithms to datasets of interest to you. Ideally, your project will involve some theory – extending a model to fit the needs of your particular problem and studying its properties – and some experimentation – fitting the model to biological data. 42 | 43 | *Proposal*: Start thinking about what datasets you'd like to study and what questions you'd like to answer early! These will inform your choices about modeling tools. As a forcing function, part of your grade will be based on your proposal, which is due roughly a month before the final report. By this point, you should have a pretty clear idea about the dataset and question, and some initial thoughts about the types of models you will explore and experiments you will run. 44 | 45 | *Report*: The final report will present your theoretical work and experimental results. It should look like the start of a research paper. To that end, you will write it in the NeurIPS paper format, and you will submit a link to a GitHub repository with supporting code. 46 | 47 | ## Readings 48 | The readings are meant to supplement lecture with further details and show examples of how different models are being used in biology. I've listed multiple technical readings and applications – you do **not** have to read all of them. You may find it helpful to see the same concepts presented in different ways, or you may find that you like some author's style better than others. Likewise, I've listed multiple references for many biological applications; I will reference some of these in class, and if you're curious you can dig into the others as well. 49 | 50 | # Anticipated Schedule 51 | 52 | | Lecture | Date | Topic | Technical Readings | Applications | 53 | | :------:|:------:|-------|--------------------|--------------| 54 | | 1 | Jan 7 | Probability Review | Bishop (2006) Ch 2
Murphy (2013) Ch 2| | 55 | | 2 | Jan 9 | Graphical Models, Learning, and Inference | Bishop (2006) Ch 8
Barber (2010) Ch 3 and 4
Wainwright and Jordan (2008) Ch 2| Gene Interaction Networks: [Friedman et al (2000)](http://research.cs.queensu.ca/home/shatkay/490papers/FriedmanBayesianNetworks2000.pdf) | 56 | | 3 | Jan 14 | Bayesian Linear Regression | Bishop (2006) Ch 3 and 4
Hastie et al (2009) Ch 3 and 4
Murphy (2013) Ch 7-9 | GWAS: [Visscher et al (2017)](https://www.sciencedirect.com/science/article/pii/S0002929717302409) · [Hilary Finucane's Notes](https://www.dropbox.com/s/eoq94zkzowwkths/Linear_models_for_GWAS_Part_1.pdf?dl=0) · [Kang et al (2010)](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3092069/) · [Lippert et al (2011)](https://www.nature.com/articles/nmeth.1681) and [Supp](https://static-content.springer.com/esm/art%3A10.1038%2Fnmeth.1681/MediaObjects/41592_2011_BFnmeth1681_MOESM290_ESM.pdf)| 57 | | 4 | Jan 16 | Logistic Regression
**HW1 Out** | Goodfellow et al (2016) Ch 6, 8, 9 | Predicting retinal responses: [Pillow et al (2008)](https://www.ncbi.nlm.nih.gov/pubmed/18650810) · [McIntosh et al (2016)](https://papers.nips.cc/paper/6388-deep-learning-models-of-the-retinal-response-to-natural-scenes.pdf) · [Batty et al (2017)](https://openreview.net/forum?id=HkEI22jeg) | 58 | | 5 | Jan 21 | Generalized Linear Models and Exp. Families | Murphy (2013) Ch 9
[demo notebook](https://colab.research.google.com/drive/1tHc5S11azB-hJEeXZMVBWKN2TJ26B9N_) | Predicting retinal responses: [Pillow et al (2008)](https://www.ncbi.nlm.nih.gov/pubmed/18650810) · [McIntosh et al (2016)](https://papers.nips.cc/paper/6388-deep-learning-models-of-the-retinal-response-to-natural-scenes.pdf) · [Batty et al (2017)](https://openreview.net/forum?id=HkEI22jeg) | 59 | | 6 | Jan 23 | Latent Variable Models: Mixtures, Factors, and EM Part I | Murphy (2013) Ch 11 and 12
Bishop Ch 9 and 12 | Spike sorting: [Pachitariu et al (2016)](https://www.biorxiv.org/content/10.1101/061481v1) · [Liam Paninski's Notes](http://www.stat.columbia.edu/~liam/teaching/neurostat-fall19/em-notes.pdf) | 60 | | 7 | Jan 28 | Latent Variable Models: Mixtures, Factors, and EM Part II | Murphy (2013) Ch 11 and 12
Bishop Ch 9 and 12 | Receptive fields: [Liu et al (2017)](https://www.nature.com/articles/s41467-017-00156-9)
Finding motifs: [Mackevicius et al (2019)](https://elifesciences.org/articles/38471)| 61 | | 8 | Jan 30 | Latent Variable Models: Mixtures, Factors, and EM Part III
**HW1 Due · HW2 Out**| Murphy (2013) Ch 11 and 12
Bishop Ch 9 and 12
[demo notebook](https://colab.research.google.com/drive/1lLRfOUatOlfUMrixeMDDpKVoxajRweLB) | Pose estimation with missing data: [Markowitz et al (2018)](https://www.sciencedirect.com/science/article/pii/S0092867418305129) | 62 | | 9 | Feb 4 | Variational Inference and Nonlinear LVMs: Part I | [Blei et al (2017)](https://arxiv.org/abs/1601.00670) | Network Models: [Gopalan and Blei (2013)](https://www.pnas.org/content/110/36/14534) · [Linderman et al (2016)](https://papers.nips.cc/paper/6185-bayesian-latent-structure-discovery-from-multi-neuron-recordings.pdf) | 63 | | — | Feb 6 | **Midterm Exam** | | | 64 | | 10 | Feb 11 | Midterm Review | | | 65 | | 11 | Feb 13 | Variational Inference and Nonlinear LVMs: Part II | [Kingma and Welling (2019) Ch 2](https://arxiv.org/pdf/1906.02691.pdf)
[demo notebook](https://colab.research.google.com/drive/1MQiTLhGVVgRxGA89WXHe9C1gpFJz6Q-g) | Single cell RNAseq: [Lopez et al (2018)](https://www.nature.com/articles/s41592-018-0229-2) and [blog](https://bair.berkeley.edu/blog/2018/12/05/genes/) · [Grønbech et al (2018)](https://www.biorxiv.org/content/biorxiv/early/2018/05/16/318295.full.pdf) | 66 | | 12 | Feb 18 | Hidden Markov Models
**HW2 Due · HW3 Out** | Bishop (2006) Ch 13
Barber (2010) Ch 23
Murphy (2013) Ch 17 | Calcium deconvolution: [Friederich et al (2017)](https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1005423) · [Jewell et al (2018, 2019)](https://jewellsean.github.io/fast-spike-deconvolution) | 67 | | 13 | Feb 20 | Linear Gaussian Dynamical Systems
**Project Proposal Due** | Barber (2010) Ch 24
Murphy (2013) Ch 18
[demo notebook](https://colab.research.google.com/drive/11If3E63EU-ckZ1ma57zt_NnCWkWg6VLH) | Neural state spaces: [Paninski et al (2010)](http://www.stat.columbia.edu/~liam/research/pubs/jcns-state-space-review.pdf) · [Macke et al (2011)](http://www.gatsby.ucl.ac.uk/~maneesh/papers/macke-etal-2011-nips-preprint.pdf) | 68 | | 14 | Feb 25 | Switching Linear Dynamical Systems | Barber (2010) Ch 25
[Linderman et al (2017)](http://proceedings.mlr.press/v54/linderman17a.html) | Postural dynamics: [Wiltschko et al (2015)](https://www.cell.com/neuron/fulltext/S0896-6273(15)01037-5)
Neural circuit dynamics: [Linderman et al (2019)](https://www.biorxiv.org/content/10.1101/621540v1) · [Taghia et al (2018)](https://www.nature.com/articles/s41467-018-04723-6) | 69 | | 15 | Feb 27 | Gaussian Processes | Rasmussen and Williams (2006) Ch 2
[Hensman et al (2013)](http://www.auai.org/uai2013/prints/papers/244.pdf)
[Titsias and Lawerence (2010)](http://proceedings.mlr.press/v9/titsias10a/titsias10a.pdf)
[Wang et al (2019)](https://arxiv.org/abs/1903.08114) | Odor representation in cortex: [Wu et al (2017)](https://papers.nips.cc/paper/6941-gaussian-process-based-nonlinear-latent-structure-discovery-in-multivariate-spike-train-data) and [2018](http://papers.nips.cc/paper/7783-learning-a-latent-manifold-of-odor-representations-from-neural-responses-in-piriform-cortex) | 70 | | 16 | Mar 3 | Guest Lecture: Matt Johnson
Structured, Sequential VAE's
**HW3 Due · HW4 Out**| [Johnson et al (2016)](https://papers.nips.cc/paper/6379-composing-graphical-models-with-neural-networks-for-structured-representations-and-fast-inference) | Nonlinear embedding of neural activity: [Gao et al (2016)](http://papers.nips.cc/paper/6430-linear-dynamical-neural-population-models-through-nonlinear-embeddings) · [Pandarinath et al (2018)](https://www.nature.com/articles/s41592-018-0109-9) | 71 | | 17 | Mar 5 | Poisson Processes | Kingman (1993) Ch 1 and 2
[Uri Eden's Notes](http://www.stat.columbia.edu/~liam/teaching/neurostat-fall19/uri-eden-point-process-notes.pdf) | Neural firing rates: [Brown et al 2002](https://www.mitpressjournals.org/doi/abs/10.1162/08997660252741149?casa_token=ajv4s9er27AAAAAA%3AFWMF03w7_y5tRQHES9AlMa6LeFJUGJG2ogV5dPZHy2d7t8TtjMtcfQhqYUv1dX_5dcTjCplEi_Y&) · [Truccolo et al (2005)](https://www.ncbi.nlm.nih.gov/pubmed/15356183) · [Cunningham et al (2008a](http://www.gatsby.ucl.ac.uk/~maneesh/papers/cunningham-etal-2008-icml.pdf) and [2008b)](http://www.stat.columbia.edu/~cunningham/pdf/CunninghamNIPS2008.pdf) · [Loaiza-Ganem et al (2019)](https://papers.nips.cc/paper/9491-deep-random-splines-for-point-process-intensity-estimation-of-neural-population-data)| 72 | | 18 | Mar 10 | Continuous Time Markov Chains | [Rao and Teh (2013)](http://jmlr.org/papers/volume14/rao13a/rao13a.pdf) | Complex synapses: [Lahiri and Ganguli (2013)](https://ganguli-gang.stanford.edu/pdf/Synapse.NIPS14.pdf) and [Supp](https://ganguli-gang.stanford.edu/pdf/Synapse.NIPS14.Supp.pdf) | 73 | | 19 | Mar 12 | Hawkes and Cox Processes
**HW4 Due** | [Hawkes (1971)](https://www.jstor.org/stable/2334319?seq=1#metadata_info_tab_contents)
Kingman (1993) Ch 6 | Social contagion: [Linderman and Adams (2014)](http://proceedings.mlr.press/v32/linderman14.pdf) | 74 | | — | Mar 20 | **Final Report Due** | | | 75 | 76 | 77 | # Textbooks 78 | [Barber, D. (2012). _Bayesian reasoning and machine learning_. Cambridge University Press.](http://web4.cs.ucl.ac.uk/staff/D.Barber/textbook/090310.pdf) 79 | 80 | [Bishop, C. M. (2006). _Pattern recognition and machine learning_. Springer.](https://www.microsoft.com/en-us/research/uploads/prod/2006/01/Bishop-Pattern-Recognition-and-Machine-Learning-2006.pdf) 81 | 82 | [Ewens, W. J., & Grant, G. (2005). _Statistical Methods in Bioinformatics_. Springer.](https://www.springer.com/gp/book/9780387400822)
83 | _Available in library_ 84 | 85 | [Goodfellow, I., Bengio, Y., & Courville, A. (2016). _The deep learning book._ MIT Press.](http://www.deeplearningbook.org/) 86 | 87 | [Hastie, T., Tibshirani, R., & Friedman, J. (2009). _The elements of statistical learning: data mining, inference, and prediction_. Springer-Verlag.](https://web.stanford.edu/~hastie/ElemStatLearn/) 88 | 89 | [Kingman, J. F. C. (1993). _Poisson processes_. Clarendon Press.](https://www.amazon.com/Poisson-Processes-Oxford-Studies-Probability/dp/0198536933)
90 | _Available in library_ 91 | 92 | [MacKay, D. J. (2003). _Information theory, inference and learning algorithms_. Cambridge university press.](http://www.inference.org.uk/itprnn/book.pdf) 93 | 94 | [Murphy, K. P. (2012). _Machine learning: a probabilistic perspective_. MIT press.](https://www.cs.ubc.ca/~murphyk/MLbook/)
95 | _Available in library_ 96 | 97 | [Williams, C. K., & Rasmussen, C. E. (2006). _Gaussian processes for machine learning_. MIT press.](http://www.gaussianprocess.org/gpml/) 98 | 99 | [Robert, C., & Casella, G. (2013). _Monte Carlo statistical methods_. Springer.](https://www.springer.com/gp/book/9780387212395) 100 | 101 | [Wainwright, M. J., & Jordan, M. I. (2008). Graphical models, exponential families, and variational inference. _Foundations and Trends® in Machine Learning_, 1(1–2), 1-305.](https://people.eecs.berkeley.edu/~wainwrig/Papers/WaiJor08_FTML.pdf) 102 | 103 | -------------------------------------------------------------------------------- /assignment2/yourname_assignment2.tex: -------------------------------------------------------------------------------- 1 | \documentclass[11pt]{article} 2 | 3 | \input{preamble/preamble.tex} 4 | \input{preamble/preamble_math.tex} 5 | \input{preamble/preamble_acronyms.tex} 6 | 7 | \title{STATS215: Assignment 2} 8 | \author{Your Name} 9 | \date{Due: February 14, 2020} 10 | 11 | \begin{document} 12 | 13 | \maketitle 14 | 15 | \textbf{Problem 1:} \textit{Bernoulli GLMs as a latent variable models.} 16 | 17 | Consider a Bernoulli regression model, 18 | \begin{align*} 19 | w &\sim \cN(\mu, \Sigma) \\ 20 | y_n \mid x_n, w &\sim \mathrm{Bern}(f(w^\trans x_n)) \quad \text{for } n = 1,\ldots, N, 21 | \end{align*} 22 | where $w$ and $x_n$ are vectors in $\reals^D$, $y_n \in \{0, 1\}$, and $f: \reals \to [0, 1]$ is the mean function. In class we studied Newton's method for finding the maximum a posteriori (MAP) estimate~$w^\star = \argmax p(w \mid \{x_n, y_n\}_{n=1}^N)$. Now we will consider methods for approximating the full posterior distribution. 23 | 24 | \begin{enumerate}[label=(\alph*)] 25 | \item Rather than using the logistic function, let the mean function be the normal cumulative distribution function (CDF), or ``probit'' function, 26 | \begin{align*} 27 | f(u) &= \Pr(z \leq u) \text{ where } z \sim \cN(0, 1) \\ 28 | &= \int_{-\infty}^u \cN(z; 0, 1) \, \mathrm{d}z. 29 | \end{align*} 30 | This is called the probit regression model. Show that the likelihood~$p(y_n \mid x_n, w)$ is a marginal of a joint distribution, 31 | \begin{align*} 32 | p(y_n, z_n \mid x_n, w) &= \bbI[z_n \geq 0]^{\bbI[y_n = 1]} \, \bbI[z_n < 0]^{\bbI[y_n = 0]} \cN(z_n \mid x_n^\trans w, 1). 33 | \end{align*} 34 | 35 | \begin{solution} 36 | <> 37 | \end{solution} 38 | 39 | \item Derive the conditional distributions~$p(w \mid \{x_n, y_n, z_n\}_{n=1}^N)$ and~$p(z_n \mid x_n, y_n, w)$.\footnote{Observe that $z_n$ is conditionally independent of $\{x_{n'}, y_{n'}, z_{n'}\}_{n' \neq n}$ given $w$.} 40 | 41 | \begin{solution} 42 | <> 43 | \end{solution} 44 | 45 | \item \emph{Gibbs sampling} is a Markov chain Monte Carlo (MCMC) method for approximate posterior inference. It works by repeatedly sampling from the conditional distribution of one variable, holding all others fixed. For the probit regression model, this means iteratively performing these two steps: 46 | \begin{enumerate}[label=\arabic*.] 47 | \item Sample $z_n \sim p(z_n \mid x_n, y_n, w)$ for~$n = 1, \ldots, N$ holding ~$w$ fixed; 48 | \item Sample $w \sim p(w \mid \{x_n, y_n, z_n\}_{n=1}^N)$ holding $\{z_n\}_{n=1}^N$ fixed. 49 | \end{enumerate} 50 | Note the similarity to EM: rather than computing a posterior distribution over~$z_n$, we draw a sample from it; rather than setting~$w$ to maximize the ELBO, we draw a sample from its conditional distribution. It can be shown that this algorithm defines a Markov chain on the space of $(w, \{z_n\}_{n=1}^N)$ whose stationary distribution is the posterior~$p(w, \{z_n\}_{n=1}^N \mid \{x_n, y_n\}_{n=1}^N)$. In other words, repeating these steps infinitely many times would yield samples of~$w$ and~$\{z_n\}_{n=1}^N$ drawn from their posterior distribution. 51 | 52 | Implement this Gibbs sampling algorithm and test it on a synthetic dataset with~$D=2$ dimensional covariates and~$N=100$ data points. Scatter plot your samples of~$w$ and, for comparison, plot the true value of~$w$ that generated the data. Do your samples look approximately Gaussian distributed? How does the posterior distribution change when you vary~$N$? 53 | 54 | \begin{solution} 55 | <> 56 | \end{solution} 57 | 58 | \item \textbf{Bonus.} There are also auxiliary variable methods for logistic regression, where~$f(u) = e^u / (1+e^u)$. Specifically, we have that, 59 | \begin{align*} 60 | \frac{e^{y_n \cdot w^\trans x_n}}{1 + e^{w^\trans x_n}} &= 61 | \int_0^\infty \tfrac{1}{2} \exp \left\{ \big(y_n - \tfrac{1}{2}\big) x_n^\trans w -\tfrac{1}{2} z_n (w^\trans x_n)^2 \right\} \mathrm{PG}(z_n; 1, 0) \, \mathrm{d}z_n, 62 | \end{align*} 63 | where~$\mathrm{PG}(z; b, c)$ is the density function of the \emph{P\'{o}lya-gamma} (PG) distribution over~$z \in \reals_+$ with parameters~$b$ and $c$. The PG distribution has a number of nice properties: it is closed under exponential tilting so that, 64 | \begin{align*} 65 | e^{-\tfrac{1}{2} z c^2} \, \mathrm{PG}(z; b, 0) \propto \mathrm{PG}(z; b, c), 66 | \end{align*} 67 | and its expectation is available in closed form, 68 | \begin{align*} 69 | \bbE_{z \sim \mathrm{PG}(b, c)}[z] &= \frac{b}{2c} \tanh \left(\frac{c}{2} \right). 70 | \end{align*} 71 | Use these properties to derive an EM algorithm for finding~$w^\star = \argmax p(\{y_n\} \mid \{x_n\}, w)$. How do the EM updates compare to Newton's method? 72 | 73 | \end{enumerate} 74 | 75 | \clearpage 76 | 77 | 78 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 79 | \textbf{Problem 2:} \textit{Spike sorting with mixture models} 80 | 81 | As discussed in class, ``spike sorting'' is ultimately a mixture modeling problem. Here we will study the problem in more detail. Let~$\{y_n\}_{n=1}^N$ represent a collection of spikes. Each~$y_n \in \reals^D$ is a vector containing features of the~$n$-th spike waveform. For example, the features may be projections of the spike waveform onto the top~$D$ principal components. We have the following, general model, 82 | \begin{align*} 83 | z_n \mid \pi &\sim \pi \\ 84 | y_n \mid z_n, \theta &\sim p(y_n \mid \theta_{z_n}). 85 | \end{align*} 86 | The label~$z_n \in \{1,\ldots, K\}$ indicates which of the~$K$ neurons generated the~$n$-th spike waveform. The probability vector~$\pi \in \Delta_K$ specifies a prior distribution on spike labels, and the parameters~$\theta = \{\theta_k\}_{k=1}^K$ determine the likelihood of the spike waveforms~$y_n$ for each of the~$K$ neurons. The goal is to infer a posterior distribution~$p(z_n \mid y_n, \pi, \theta)$ over labels for each observed spike, and to learn the parameters~$\pi^\star$ and~$\theta^\star$ that maximize the likelihood of the data. 87 | 88 | \begin{enumerate}[label=(\alph*)] 89 | 90 | \item Start with a Gaussian observation model, 91 | \begin{align*} 92 | y_n \mid z_n, \theta &\sim \cN(y_n \mid \mu_{z_n}, \Sigma_{z_n}), 93 | \end{align*} 94 | where~$\theta_k = (\mu_k, \Sigma_k)$ includes the mean and covariance for the $k$-th neuron. 95 | 96 | Derive an EM algorithm to compute~$\pi^\star, \theta^\star = \argmax p(\{y_n\}_{n=1}^N \mid \pi, \theta)$. Start by deriving the ``responsibilities'' $w_{nk} = p(z_n = k \mid y_n, \pi', \theta')$ for fixed parameters~$\pi'$ and~$\theta'$. Then use the responsibilities to compute the expected log joint probability, 97 | \begin{align*} 98 | \cL(\pi, \theta) &= \sum_{n=1}^N \bbE_{p(z_n | y_n, \pi', \theta')} \left[ \log p(y_n, z_n \mid \pi, \theta) \right]. 99 | \end{align*} 100 | Finally, find closed-form expressions for~$\pi^\star$ and~$\theta^\star$ that optimize~$\cL(\pi, \theta)$. 101 | 102 | \begin{solution} 103 | <> 104 | \end{solution} 105 | 106 | \item The Gaussian model can be sensitive to outliers and lead spikes from one neuron to be split into two clusters. One way to side-step this issue is to replace the Gaussian with a heavier-tailed distribution like the multivariate Student's t, which has probability density, 107 | \begin{align*} 108 | p(y_n \mid \theta_{z_n}) &= {\frac {\Gamma \left[(\alpha_0 +D)/2\right]}{\Gamma (\alpha_0 /2)\alpha_0 ^{D/2}\pi ^{D/2}\left|{\Sigma_{z_n}}\right|^{1/2}}} \left[1+{\frac{1}{\alpha_0}} (y_n-\mu_{z_n})^\trans \Sigma_{z_n}^{-1}(y_n - \mu_{z_n})\right]^{-(\alpha_0 +D)/2} \hspace{-3.5em}. 109 | \end{align*} 110 | We will treat~$\alpha_0$ as a fixed hyperparameter. 111 | 112 | Like the negative binomial distribution studied in HW1, the multivariate Student's t can also be represented as an infinite mixture, 113 | \begin{align*} 114 | p(y_n \mid \theta_{z_n}) &= \int p(y_n, \tau_n \mid \theta_{z_n}) \, \dif \tau_n 115 | = \int \cN(y_n ; \mu_{z_n}, \tau_n^{-1} \Sigma_{z_n}) \, \mathrm{Gamma}(\tau_n ; \tfrac{\alpha_0}{2}, \tfrac{1}{2}) \, \dif \tau_n. 116 | \end{align*} 117 | We will derive an EM algorithm to find~$\pi^\star, \theta^\star$ in this model. 118 | 119 | First, show that the posterior takes the form 120 | \begin{align*} 121 | p(\tau_n, z_n \mid y_n, \pi, \theta) &= p(z_n \mid y_n, \pi, \theta) \, p(\tau_n \mid z_n, y_n, \theta)\\ 122 | &= \prod_{k=1}^K \bigg[ w_{nk} \, \mathrm{Gamma}(\tau_n \mid a_{nk}, b_{nk}) \bigg]^{\bbI[z_n = k]}, 123 | \end{align*} 124 | and solve for the parameters~$w_{nk}, a_{nk}, b_{nk}$ in terms of~$y_n$,~$\pi$, and~$\theta$. 125 | 126 | \begin{solution} 127 | <> 128 | \end{solution} 129 | 130 | \item Now compute the expected log joint probability, 131 | \begin{align*} 132 | \cL(\pi, \theta) &= \sum_{n=1}^N \bbE_{p(\tau_n, z_n | y_n, \pi', \theta')} \left[ \log p(y_n, z_n, \tau_n \mid \pi, \theta) \right], 133 | \end{align*} 134 | using the fact that~$\bbE[X] = a/b$ for~$X \sim \mathrm{Gamma}(a, b)$. You may omit terms that are constant with respect to~$\pi$ and~$\theta$. 135 | 136 | \begin{solution} 137 | <> 138 | \end{solution} 139 | 140 | \item Finally, solve for~$\pi^\star$ and~$\theta^\star$ that maximize the expected log joint probability. How does your answer compare to the solution you found in part (a)? 141 | 142 | \begin{solution} 143 | <> 144 | \end{solution} 145 | 146 | \end{enumerate} 147 | 148 | \clearpage 149 | 150 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 151 | \textbf{Problem 3:} \textit{Poisson matrix factorization} 152 | 153 | Many biological datasets come in the form of matrices of non-negative counts. RNA sequencing data, neural spike trains, and network data (where each entry indicate the number of connections between a pair of nodes) are all good examples. It is common to model these counts as a function of some latent features of the corresponding row and column. Here we consider one such model, which decomposes a count matrix into a superposition of non-negative row and column factors. 154 | 155 | Let~$Y \in \bbN^{M \times N}$ denote an observed~$M \times N$ matrix of non-negative count data. We model this matrix as a function of non-negative row factors~$U \in \reals_+^{M \times K}$ and column factors~$V \in \reals_+^{N \times K}$. Let~$u_m \in \reals_+^K$ and $v_n \in \reals_+^K$ denote the~$m$-th and~$n$-th rows of~$U$ and~$V$, respectively. We assume that each observed count~$y_{mn}$ is conditionally independent of the others given its corresponding row and column factors. Moreover, we assume a linear Poisson model, 156 | \begin{align*} 157 | y_{mn} \mid u_m, v_n &\sim \mathrm{Poisson}(u_m^\trans v_n). 158 | \end{align*} 159 | (Since~$u_m$ and~$v_n$ are non-negative, the mean parameter is valid.) Finally, assume gamma priors, 160 | \begin{align*} 161 | u_{mk} &\sim \mathrm{Gamma}(\alpha_0, \beta_0), \\ 162 | v_{nk} &\sim \mathrm{Gamma}(\alpha_0, \beta_0). 163 | \end{align*} 164 | Note that even though the gamma distribution is conjugate to the Poisson, here we have an inner product of two gamma vectors producing one Poisson random variable. The posterior distribution is more complicated. The entries of~$u_m$ are not independent under the posterior due to the ``explaining away'' effect. Nevertheless, we will derive a mean-field variational inference algorithm to approximate the posterior distribution. 165 | 166 | \begin{enumerate}[label=(\alph*)] 167 | \item First we will use an augmentation trick based on the additivity of Poisson random variables; i.e. the fact that 168 | \begin{align*} 169 | y \sim \mathrm{Poisson}\left(\sum_k \lambda_k \right) \iff y = \sum_k y_k \; \text{where} \; y_k \sim \mathrm{Poisson}(\lambda_k) \; \text{independently}, 170 | \end{align*} 171 | for any collection of non-negative rates~$\lambda_1, \ldots, \lambda_K \in \reals_+$. Use this fact to write the likelihood $p(y_{mn} \mid u_m, v_n)$ as a marginal of a joint distribution~$p(y_{mn}, \bar{y}_{mn} \mid u_m, v_n)$ where $\bar{y}_{mn} = (y_{mn1}, \ldots, y_{mnK})$ is a length-$K$ vector of non-negative counts. (Hint: this is similar to Problem 1 in that~$y_{mn}$ is deterministic given~$\bar{y}_{mn}$.) 172 | 173 | \begin{solution} 174 | <> 175 | \end{solution} 176 | 177 | \item Let~$\bar{Y} \in \bbN^{M \times N \times K}$ denote the augmented data matrix with entries~$y_{mnk}$ as above. We will use mean field variational inference to approximate the posterior as, 178 | \begin{align*} 179 | p(\bar{Y}, U, V \mid Y) &\approx q(\bar{Y}) \, q(U) \, q(V) = \left[\prod_{m=1}^M \prod_{n=1}^N q(\bar{y}_{mn}) \right] 180 | \left[ \prod_{m=1}^M \prod_{k=1}^K q(u_{mk}) \right] \left[ \prod_{n=1}^N \prod_{k=1}^K q(v_{nk}) \right]. 181 | \end{align*} 182 | We will solve for the optimal posterior approximation via coordinate descent on the KL divergence to the true posterior. Recall that holding all factors except for~$q(\bar{y}_{mn})$ fixed, the KL is minimized when 183 | \begin{align*} 184 | q(\bar{y}_{mn}) \propto \exp \left\{\bbE_{q(\bar{Y}_{\neg mn}) q(U) q(V)} \left[ \log p(Y, \bar{Y}, U, V) \right] \right\}, 185 | \end{align*} 186 | where~$q(\bar{Y}_{\neg mn}) = \prod_{(m',n') \neq (m,n)} q(\bar{y}_{m'n'})$ denotes all variational factors except for the~$(m,n)$-th. 187 | 188 | Show that the optimal~$q(\bar{y}_{mn})$ is a multinomial of the form, 189 | \begin{align*} 190 | q(\bar{y}_{mn}) &= \mathrm{Mult}(\bar{y}_{mn} ; y_{mn}, \pi_{mn}), 191 | \end{align*} 192 | and solve for~$\pi_{mn} \in \Delta_K$. You should write your answer in terms of expectations with respect to the other variational factors. 193 | 194 | \begin{solution} 195 | <> 196 | \end{solution} 197 | 198 | \item Holding all factors but~$q(u_{mk})$ fixed, show that optimal distribution is 199 | \begin{align*} 200 | q(u_{mk}) 201 | &= \mathrm{Gamma}(u_{mk}; \alpha_{mk}, \beta_{mk}). 202 | \end{align*} 203 | Solve for~$\alpha_{mk}, \beta_{mk}$; write your answer in terms of expectations with respect to~$q(\bar{y}_{mn})$ and~$q(v_{nk})$. 204 | 205 | \begin{solution} 206 | <> 207 | \end{solution} 208 | 209 | \item Use the symmetry of the model to determine the parameters of the optimal gamma distribution for~$q(v_{nk})$, holding~$q(\bar{y}_{mn})$ and~$q(u_{mk})$ fixed, 210 | \begin{align*} 211 | q(v_{nk}) &= \mathrm{Gamma}(v_{nk}; \alpha_{nk}, \beta_{nk}). 212 | \end{align*} 213 | Solve for~$\alpha_{nk}, \beta_{nk}$; write your answer in terms of expectations with respect to~$q(\bar{y}_{mn})$ and~$q(u_{mk})$. 214 | 215 | \begin{solution} 216 | <> 217 | \end{solution} 218 | 219 | \item Now that the form of all variational factors has been determined, compute the required expectations (in closed form) to write the coordinate descent updates in terms of the other variational parameters. Use the fact that~$\bbE[\log X] = \psi(\alpha) - \log \beta$ for~$X \sim \mathrm{Gamma}(\alpha, \beta)$, where~$\psi$ is the digamma function. 220 | 221 | \begin{solution} 222 | <> 223 | \end{solution} 224 | 225 | \item Suppose that~$Y$ is a sparse matrix with only~$S \ll MN$ non-zero entries. What is the complexity of this mean-field coordinate descent algorithm? 226 | 227 | \begin{solution} 228 | <> 229 | \end{solution} 230 | 231 | \end{enumerate} 232 | 233 | \clearpage 234 | 235 | \textbf{Problem 4:} \textit{Apply Poisson matrix factorization to C. elegans connectomics data} 236 | 237 | Make a copy of this Colab notebook: 238 | 239 | \begin{center} 240 | \url{https://colab.research.google.com/drive/1ZMwcB6vzVaXz4WJiNT514b7zB5s3_SBk} 241 | \end{center} 242 | 243 | Use your solutions from Problem 3 to finish the incomplete code cells. Once you're done, run all the code cells, save the notebook in \texttt{.ipynb} format, print a copy in \texttt{.pdf} format, and submit these files along with the rest of your written assignment. 244 | 245 | \end{document} 246 | -------------------------------------------------------------------------------- /assets/css/github-markdown.css: -------------------------------------------------------------------------------- 1 | @font-face { 2 | font-family: octicons-link; 3 | src: url(data:font/woff;charset=utf-8;base64,d09GRgABAAAAAAZwABAAAAAACFQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABEU0lHAAAGaAAAAAgAAAAIAAAAAUdTVUIAAAZcAAAACgAAAAoAAQAAT1MvMgAAAyQAAABJAAAAYFYEU3RjbWFwAAADcAAAAEUAAACAAJThvmN2dCAAAATkAAAABAAAAAQAAAAAZnBnbQAAA7gAAACyAAABCUM+8IhnYXNwAAAGTAAAABAAAAAQABoAI2dseWYAAAFsAAABPAAAAZwcEq9taGVhZAAAAsgAAAA0AAAANgh4a91oaGVhAAADCAAAABoAAAAkCA8DRGhtdHgAAAL8AAAADAAAAAwGAACfbG9jYQAAAsAAAAAIAAAACABiATBtYXhwAAACqAAAABgAAAAgAA8ASm5hbWUAAAToAAABQgAAAlXu73sOcG9zdAAABiwAAAAeAAAAME3QpOBwcmVwAAAEbAAAAHYAAAB/aFGpk3jaTY6xa8JAGMW/O62BDi0tJLYQincXEypYIiGJjSgHniQ6umTsUEyLm5BV6NDBP8Tpts6F0v+k/0an2i+itHDw3v2+9+DBKTzsJNnWJNTgHEy4BgG3EMI9DCEDOGEXzDADU5hBKMIgNPZqoD3SilVaXZCER3/I7AtxEJLtzzuZfI+VVkprxTlXShWKb3TBecG11rwoNlmmn1P2WYcJczl32etSpKnziC7lQyWe1smVPy/Lt7Kc+0vWY/gAgIIEqAN9we0pwKXreiMasxvabDQMM4riO+qxM2ogwDGOZTXxwxDiycQIcoYFBLj5K3EIaSctAq2kTYiw+ymhce7vwM9jSqO8JyVd5RH9gyTt2+J/yUmYlIR0s04n6+7Vm1ozezUeLEaUjhaDSuXHwVRgvLJn1tQ7xiuVv/ocTRF42mNgZGBgYGbwZOBiAAFGJBIMAAizAFoAAABiAGIAznjaY2BkYGAA4in8zwXi+W2+MjCzMIDApSwvXzC97Z4Ig8N/BxYGZgcgl52BCSQKAA3jCV8CAABfAAAAAAQAAEB42mNgZGBg4f3vACQZQABIMjKgAmYAKEgBXgAAeNpjYGY6wTiBgZWBg2kmUxoDA4MPhGZMYzBi1AHygVLYQUCaawqDA4PChxhmh/8ODDEsvAwHgMKMIDnGL0x7gJQCAwMAJd4MFwAAAHjaY2BgYGaA4DAGRgYQkAHyGMF8NgYrIM3JIAGVYYDT+AEjAwuDFpBmA9KMDEwMCh9i/v8H8sH0/4dQc1iAmAkALaUKLgAAAHjaTY9LDsIgEIbtgqHUPpDi3gPoBVyRTmTddOmqTXThEXqrob2gQ1FjwpDvfwCBdmdXC5AVKFu3e5MfNFJ29KTQT48Ob9/lqYwOGZxeUelN2U2R6+cArgtCJpauW7UQBqnFkUsjAY/kOU1cP+DAgvxwn1chZDwUbd6CFimGXwzwF6tPbFIcjEl+vvmM/byA48e6tWrKArm4ZJlCbdsrxksL1AwWn/yBSJKpYbq8AXaaTb8AAHja28jAwOC00ZrBeQNDQOWO//sdBBgYGRiYWYAEELEwMTE4uzo5Zzo5b2BxdnFOcALxNjA6b2ByTswC8jYwg0VlNuoCTWAMqNzMzsoK1rEhNqByEyerg5PMJlYuVueETKcd/89uBpnpvIEVomeHLoMsAAe1Id4AAAAAAAB42oWQT07CQBTGv0JBhagk7HQzKxca2sJCE1hDt4QF+9JOS0nbaaYDCQfwCJ7Au3AHj+LO13FMmm6cl7785vven0kBjHCBhfpYuNa5Ph1c0e2Xu3jEvWG7UdPDLZ4N92nOm+EBXuAbHmIMSRMs+4aUEd4Nd3CHD8NdvOLTsA2GL8M9PODbcL+hD7C1xoaHeLJSEao0FEW14ckxC+TU8TxvsY6X0eLPmRhry2WVioLpkrbp84LLQPGI7c6sOiUzpWIWS5GzlSgUzzLBSikOPFTOXqly7rqx0Z1Q5BAIoZBSFihQYQOOBEdkCOgXTOHA07HAGjGWiIjaPZNW13/+lm6S9FT7rLHFJ6fQbkATOG1j2OFMucKJJsxIVfQORl+9Jyda6Sl1dUYhSCm1dyClfoeDve4qMYdLEbfqHf3O/AdDumsjAAB42mNgYoAAZQYjBmyAGYQZmdhL8zLdDEydARfoAqIAAAABAAMABwAKABMAB///AA8AAQAAAAAAAAAAAAAAAAABAAAAAA==) format('woff'); 4 | } 5 | 6 | .markdown-body { 7 | -ms-text-size-adjust: 100%; 8 | -webkit-text-size-adjust: 100%; 9 | line-height: 1.5; 10 | color: #24292e; 11 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; 12 | font-size: 16px; 13 | line-height: 1.5; 14 | word-wrap: break-word; 15 | } 16 | 17 | .markdown-body .pl-c { 18 | color: #6a737d; 19 | } 20 | 21 | .markdown-body .pl-c1, 22 | .markdown-body .pl-s .pl-v { 23 | color: #005cc5; 24 | } 25 | 26 | .markdown-body .pl-e, 27 | .markdown-body .pl-en { 28 | color: #6f42c1; 29 | } 30 | 31 | .markdown-body .pl-smi, 32 | .markdown-body .pl-s .pl-s1 { 33 | color: #24292e; 34 | } 35 | 36 | .markdown-body .pl-ent { 37 | color: #22863a; 38 | } 39 | 40 | .markdown-body .pl-k { 41 | color: #d73a49; 42 | } 43 | 44 | .markdown-body .pl-s, 45 | .markdown-body .pl-pds, 46 | .markdown-body .pl-s .pl-pse .pl-s1, 47 | .markdown-body .pl-sr, 48 | .markdown-body .pl-sr .pl-cce, 49 | .markdown-body .pl-sr .pl-sre, 50 | .markdown-body .pl-sr .pl-sra { 51 | color: #032f62; 52 | } 53 | 54 | .markdown-body .pl-v, 55 | .markdown-body .pl-smw { 56 | color: #e36209; 57 | } 58 | 59 | .markdown-body .pl-bu { 60 | color: #b31d28; 61 | } 62 | 63 | .markdown-body .pl-ii { 64 | color: #fafbfc; 65 | background-color: #b31d28; 66 | } 67 | 68 | .markdown-body .pl-c2 { 69 | color: #fafbfc; 70 | background-color: #d73a49; 71 | } 72 | 73 | .markdown-body .pl-c2::before { 74 | content: "^M"; 75 | } 76 | 77 | .markdown-body .pl-sr .pl-cce { 78 | font-weight: bold; 79 | color: #22863a; 80 | } 81 | 82 | .markdown-body .pl-ml { 83 | color: #735c0f; 84 | } 85 | 86 | .markdown-body .pl-mh, 87 | .markdown-body .pl-mh .pl-en, 88 | .markdown-body .pl-ms { 89 | font-weight: bold; 90 | color: #005cc5; 91 | } 92 | 93 | .markdown-body .pl-mi { 94 | font-style: italic; 95 | color: #24292e; 96 | } 97 | 98 | .markdown-body .pl-mb { 99 | font-weight: bold; 100 | color: #24292e; 101 | } 102 | 103 | .markdown-body .pl-md { 104 | color: #b31d28; 105 | background-color: #ffeef0; 106 | } 107 | 108 | .markdown-body .pl-mi1 { 109 | color: #22863a; 110 | background-color: #f0fff4; 111 | } 112 | 113 | .markdown-body .pl-mc { 114 | color: #e36209; 115 | background-color: #ffebda; 116 | } 117 | 118 | .markdown-body .pl-mi2 { 119 | color: #f6f8fa; 120 | background-color: #005cc5; 121 | } 122 | 123 | .markdown-body .pl-mdr { 124 | font-weight: bold; 125 | color: #6f42c1; 126 | } 127 | 128 | .markdown-body .pl-ba { 129 | color: #586069; 130 | } 131 | 132 | .markdown-body .pl-sg { 133 | color: #959da5; 134 | } 135 | 136 | .markdown-body .pl-corl { 137 | text-decoration: underline; 138 | color: #032f62; 139 | } 140 | 141 | .markdown-body .octicon { 142 | display: inline-block; 143 | vertical-align: text-top; 144 | fill: currentColor; 145 | } 146 | 147 | .markdown-body a { 148 | background-color: transparent; 149 | -webkit-text-decoration-skip: objects; 150 | } 151 | 152 | .markdown-body a:active, 153 | .markdown-body a:hover { 154 | outline-width: 0; 155 | } 156 | 157 | .markdown-body strong { 158 | font-weight: inherit; 159 | } 160 | 161 | .markdown-body strong { 162 | font-weight: bolder; 163 | } 164 | 165 | .markdown-body h1 { 166 | font-size: 2em; 167 | margin: 0.67em 0; 168 | } 169 | 170 | .markdown-body img { 171 | border-style: none; 172 | } 173 | 174 | .markdown-body svg:not(:root) { 175 | overflow: hidden; 176 | } 177 | 178 | .markdown-body code, 179 | .markdown-body kbd, 180 | .markdown-body pre { 181 | font-family: monospace, monospace; 182 | font-size: 1em; 183 | } 184 | 185 | .markdown-body hr { 186 | box-sizing: content-box; 187 | height: 0; 188 | overflow: visible; 189 | } 190 | 191 | .markdown-body input { 192 | font: inherit; 193 | margin: 0; 194 | } 195 | 196 | .markdown-body input { 197 | overflow: visible; 198 | } 199 | 200 | .markdown-body [type="checkbox"] { 201 | box-sizing: border-box; 202 | padding: 0; 203 | } 204 | 205 | .markdown-body * { 206 | box-sizing: border-box; 207 | } 208 | 209 | .markdown-body input { 210 | font-family: inherit; 211 | font-size: inherit; 212 | line-height: inherit; 213 | } 214 | 215 | .markdown-body a { 216 | color: #0366d6; 217 | text-decoration: none; 218 | } 219 | 220 | .markdown-body a:hover { 221 | text-decoration: underline; 222 | } 223 | 224 | .markdown-body strong { 225 | font-weight: 600; 226 | } 227 | 228 | .markdown-body hr { 229 | height: 0; 230 | margin: 15px 0; 231 | overflow: hidden; 232 | background: transparent; 233 | border: 0; 234 | border-bottom: 1px solid #dfe2e5; 235 | } 236 | 237 | .markdown-body hr::before { 238 | display: table; 239 | content: ""; 240 | } 241 | 242 | .markdown-body hr::after { 243 | display: table; 244 | clear: both; 245 | content: ""; 246 | } 247 | 248 | .markdown-body table { 249 | border-spacing: 0; 250 | border-collapse: collapse; 251 | } 252 | 253 | .markdown-body td, 254 | .markdown-body th { 255 | padding: 0; 256 | } 257 | 258 | .markdown-body h1, 259 | .markdown-body h2, 260 | .markdown-body h3, 261 | .markdown-body h4, 262 | .markdown-body h5, 263 | .markdown-body h6 { 264 | margin-top: 0; 265 | margin-bottom: 0; 266 | } 267 | 268 | .markdown-body h1 { 269 | font-size: 32px; 270 | font-weight: 600; 271 | } 272 | 273 | .markdown-body h2 { 274 | font-size: 24px; 275 | font-weight: 600; 276 | } 277 | 278 | .markdown-body h3 { 279 | font-size: 20px; 280 | font-weight: 600; 281 | } 282 | 283 | .markdown-body h4 { 284 | font-size: 16px; 285 | font-weight: 600; 286 | } 287 | 288 | .markdown-body h5 { 289 | font-size: 14px; 290 | font-weight: 600; 291 | } 292 | 293 | .markdown-body h6 { 294 | font-size: 12px; 295 | font-weight: 600; 296 | } 297 | 298 | .markdown-body p { 299 | margin-top: 0; 300 | margin-bottom: 10px; 301 | } 302 | 303 | .markdown-body blockquote { 304 | margin: 0; 305 | } 306 | 307 | .markdown-body ul, 308 | .markdown-body ol { 309 | padding-left: 0; 310 | margin-top: 0; 311 | margin-bottom: 0; 312 | } 313 | 314 | .markdown-body ol ol, 315 | .markdown-body ul ol { 316 | list-style-type: lower-roman; 317 | } 318 | 319 | .markdown-body ul ul ol, 320 | .markdown-body ul ol ol, 321 | .markdown-body ol ul ol, 322 | .markdown-body ol ol ol { 323 | list-style-type: lower-alpha; 324 | } 325 | 326 | .markdown-body dd { 327 | margin-left: 0; 328 | } 329 | 330 | .markdown-body code { 331 | font-family: "SFMono-Regular", Consolas, "Liberation Mono", Menlo, Courier, monospace; 332 | font-size: 12px; 333 | } 334 | 335 | .markdown-body pre { 336 | margin-top: 0; 337 | margin-bottom: 0; 338 | font: 12px "SFMono-Regular", Consolas, "Liberation Mono", Menlo, Courier, monospace; 339 | } 340 | 341 | .markdown-body .octicon { 342 | vertical-align: text-bottom; 343 | } 344 | 345 | .markdown-body .pl-0 { 346 | padding-left: 0 !important; 347 | } 348 | 349 | .markdown-body .pl-1 { 350 | padding-left: 4px !important; 351 | } 352 | 353 | .markdown-body .pl-2 { 354 | padding-left: 8px !important; 355 | } 356 | 357 | .markdown-body .pl-3 { 358 | padding-left: 16px !important; 359 | } 360 | 361 | .markdown-body .pl-4 { 362 | padding-left: 24px !important; 363 | } 364 | 365 | .markdown-body .pl-5 { 366 | padding-left: 32px !important; 367 | } 368 | 369 | .markdown-body .pl-6 { 370 | padding-left: 40px !important; 371 | } 372 | 373 | .markdown-body::before { 374 | display: table; 375 | content: ""; 376 | } 377 | 378 | .markdown-body::after { 379 | display: table; 380 | clear: both; 381 | content: ""; 382 | } 383 | 384 | .markdown-body>*:first-child { 385 | margin-top: 0 !important; 386 | } 387 | 388 | .markdown-body>*:last-child { 389 | margin-bottom: 0 !important; 390 | } 391 | 392 | .markdown-body a:not([href]) { 393 | color: inherit; 394 | text-decoration: none; 395 | } 396 | 397 | .markdown-body .anchor { 398 | float: left; 399 | padding-right: 4px; 400 | margin-left: -20px; 401 | line-height: 1; 402 | } 403 | 404 | .markdown-body .anchor:focus { 405 | outline: none; 406 | } 407 | 408 | .markdown-body p, 409 | .markdown-body blockquote, 410 | .markdown-body ul, 411 | .markdown-body ol, 412 | .markdown-body dl, 413 | .markdown-body table, 414 | .markdown-body pre { 415 | margin-top: 0; 416 | margin-bottom: 16px; 417 | } 418 | 419 | .markdown-body hr { 420 | height: 0.25em; 421 | padding: 0; 422 | margin: 24px 0; 423 | background-color: #e1e4e8; 424 | border: 0; 425 | } 426 | 427 | .markdown-body blockquote { 428 | padding: 0 1em; 429 | color: #6a737d; 430 | border-left: 0.25em solid #dfe2e5; 431 | } 432 | 433 | .markdown-body blockquote>:first-child { 434 | margin-top: 0; 435 | } 436 | 437 | .markdown-body blockquote>:last-child { 438 | margin-bottom: 0; 439 | } 440 | 441 | .markdown-body kbd { 442 | display: inline-block; 443 | padding: 3px 5px; 444 | font-size: 11px; 445 | line-height: 10px; 446 | color: #444d56; 447 | vertical-align: middle; 448 | background-color: #fafbfc; 449 | border: solid 1px #c6cbd1; 450 | border-bottom-color: #959da5; 451 | border-radius: 3px; 452 | box-shadow: inset 0 -1px 0 #959da5; 453 | } 454 | 455 | .markdown-body h1, 456 | .markdown-body h2, 457 | .markdown-body h3, 458 | .markdown-body h4, 459 | .markdown-body h5, 460 | .markdown-body h6 { 461 | margin-top: 24px; 462 | margin-bottom: 16px; 463 | font-weight: 600; 464 | line-height: 1.25; 465 | } 466 | 467 | .markdown-body h1 .octicon-link, 468 | .markdown-body h2 .octicon-link, 469 | .markdown-body h3 .octicon-link, 470 | .markdown-body h4 .octicon-link, 471 | .markdown-body h5 .octicon-link, 472 | .markdown-body h6 .octicon-link { 473 | color: #1b1f23; 474 | vertical-align: middle; 475 | visibility: hidden; 476 | } 477 | 478 | .markdown-body h1:hover .anchor, 479 | .markdown-body h2:hover .anchor, 480 | .markdown-body h3:hover .anchor, 481 | .markdown-body h4:hover .anchor, 482 | .markdown-body h5:hover .anchor, 483 | .markdown-body h6:hover .anchor { 484 | text-decoration: none; 485 | } 486 | 487 | .markdown-body h1:hover .anchor .octicon-link, 488 | .markdown-body h2:hover .anchor .octicon-link, 489 | .markdown-body h3:hover .anchor .octicon-link, 490 | .markdown-body h4:hover .anchor .octicon-link, 491 | .markdown-body h5:hover .anchor .octicon-link, 492 | .markdown-body h6:hover .anchor .octicon-link { 493 | visibility: visible; 494 | } 495 | 496 | .markdown-body h1 { 497 | padding-bottom: 0.3em; 498 | font-size: 2em; 499 | border-bottom: 1px solid #eaecef; 500 | } 501 | 502 | .markdown-body h2 { 503 | padding-bottom: 0.3em; 504 | font-size: 1.5em; 505 | border-bottom: 1px solid #eaecef; 506 | } 507 | 508 | .markdown-body h3 { 509 | font-size: 1.25em; 510 | } 511 | 512 | .markdown-body h4 { 513 | font-size: 1em; 514 | } 515 | 516 | .markdown-body h5 { 517 | font-size: 0.875em; 518 | } 519 | 520 | .markdown-body h6 { 521 | font-size: 0.85em; 522 | color: #6a737d; 523 | } 524 | 525 | .markdown-body ul, 526 | .markdown-body ol { 527 | padding-left: 2em; 528 | } 529 | 530 | .markdown-body ul ul, 531 | .markdown-body ul ol, 532 | .markdown-body ol ol, 533 | .markdown-body ol ul { 534 | margin-top: 0; 535 | margin-bottom: 0; 536 | } 537 | 538 | .markdown-body li>p { 539 | margin-top: 16px; 540 | } 541 | 542 | .markdown-body li+li { 543 | margin-top: 0.25em; 544 | } 545 | 546 | .markdown-body dl { 547 | padding: 0; 548 | } 549 | 550 | .markdown-body dl dt { 551 | padding: 0; 552 | margin-top: 16px; 553 | font-size: 1em; 554 | font-style: italic; 555 | font-weight: 600; 556 | } 557 | 558 | .markdown-body dl dd { 559 | padding: 0 16px; 560 | margin-bottom: 16px; 561 | } 562 | 563 | .markdown-body table { 564 | display: block; 565 | width: 100%; 566 | overflow: auto; 567 | } 568 | 569 | .markdown-body table th { 570 | font-weight: 600; 571 | } 572 | 573 | .markdown-body table th, 574 | .markdown-body table td { 575 | padding: 6px 13px; 576 | border: 1px solid #dfe2e5; 577 | } 578 | 579 | .markdown-body table tr { 580 | background-color: #fff; 581 | border-top: 1px solid #c6cbd1; 582 | } 583 | 584 | .markdown-body table tr:nth-child(2n) { 585 | background-color: #f6f8fa; 586 | } 587 | 588 | .markdown-body img { 589 | max-width: 100%; 590 | box-sizing: content-box; 591 | background-color: #fff; 592 | } 593 | 594 | .markdown-body code { 595 | padding: 0; 596 | padding-top: 0.2em; 597 | padding-bottom: 0.2em; 598 | margin: 0; 599 | font-size: 85%; 600 | background-color: rgba(27,31,35,0.05); 601 | border-radius: 3px; 602 | } 603 | 604 | .markdown-body code::before, 605 | .markdown-body code::after { 606 | letter-spacing: -0.2em; 607 | content: "\00a0"; 608 | } 609 | 610 | .markdown-body pre { 611 | word-wrap: normal; 612 | } 613 | 614 | .markdown-body pre>code { 615 | padding: 0; 616 | margin: 0; 617 | font-size: 100%; 618 | word-break: normal; 619 | white-space: pre; 620 | background: transparent; 621 | border: 0; 622 | } 623 | 624 | .markdown-body .highlight { 625 | margin-bottom: 16px; 626 | } 627 | 628 | .markdown-body .highlight pre { 629 | margin-bottom: 0; 630 | word-break: normal; 631 | } 632 | 633 | .markdown-body .highlight pre, 634 | .markdown-body pre { 635 | padding: 16px; 636 | overflow: auto; 637 | font-size: 85%; 638 | line-height: 1.45; 639 | background-color: #f6f8fa; 640 | border-radius: 3px; 641 | } 642 | 643 | .markdown-body pre code { 644 | display: inline; 645 | max-width: auto; 646 | padding: 0; 647 | margin: 0; 648 | overflow: visible; 649 | line-height: inherit; 650 | word-wrap: normal; 651 | background-color: transparent; 652 | border: 0; 653 | } 654 | 655 | .markdown-body pre code::before, 656 | .markdown-body pre code::after { 657 | content: normal; 658 | } 659 | 660 | .markdown-body .full-commit .btn-outline:not(:disabled):hover { 661 | color: #005cc5; 662 | border-color: #005cc5; 663 | } 664 | 665 | .markdown-body kbd { 666 | display: inline-block; 667 | padding: 3px 5px; 668 | font: 11px "SFMono-Regular", Consolas, "Liberation Mono", Menlo, Courier, monospace; 669 | line-height: 10px; 670 | color: #444d56; 671 | vertical-align: middle; 672 | background-color: #fafbfc; 673 | border: solid 1px #d1d5da; 674 | border-bottom-color: #c6cbd1; 675 | border-radius: 3px; 676 | box-shadow: inset 0 -1px 0 #c6cbd1; 677 | } 678 | 679 | .markdown-body :checked+.radio-label { 680 | position: relative; 681 | z-index: 1; 682 | border-color: #0366d6; 683 | } 684 | 685 | .markdown-body .task-list-item { 686 | list-style-type: none; 687 | } 688 | 689 | .markdown-body .task-list-item+.task-list-item { 690 | margin-top: 3px; 691 | } 692 | 693 | .markdown-body .task-list-item input { 694 | margin: 0 0.2em 0.25em -1.6em; 695 | vertical-align: middle; 696 | } 697 | 698 | .markdown-body hr { 699 | border-bottom-color: #eee; 700 | } 701 | --------------------------------------------------------------------------------