├── tests ├── .gitignore ├── test_utils.py ├── test_dpmm.py ├── test_data.py ├── test_density.py ├── test_plot_samples.py ├── test_prior.py └── test_broadcast.py ├── notebooks ├── .gitignore └── Variance Inference with InvGamma prior.ipynb ├── README.md ├── dpmm ├── __init__.py ├── density.py ├── data.py ├── gmm.py ├── utils.py ├── dpmm.py ├── shear.py └── prior.py ├── .travis.yml ├── .gitignore ├── LICENSE ├── SNe └── gen.py └── setup.py /tests/.gitignore: -------------------------------------------------------------------------------- 1 | plots 2 | -------------------------------------------------------------------------------- /notebooks/.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/jmeyers314/DPMM.svg?branch=master)](https://travis-ci.org/jmeyers314/DPMM) 2 | -------------------------------------------------------------------------------- /dpmm/__init__.py: -------------------------------------------------------------------------------- 1 | """ Dirichlet Process Mixture Models 2 | """ 3 | 4 | # DPMM first 5 | from .dpmm import DPMM 6 | from .prior import NormInvWish, GaussianMeanKnownVariance, NormInvChi2, NormInvGamma, InvGamma 7 | from .prior import InvGamma2D 8 | from .data import PseudoMarginalData 9 | from .shear import Linear1DShear, Shear, WeakShear 10 | from .gmm import GaussND, GMM 11 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | def timer(f): 2 | import functools 3 | 4 | @functools.wraps(f) 5 | def f2(*args, **kwargs): 6 | import time 7 | import inspect 8 | t0 = time.time() 9 | result = f(*args, **kwargs) 10 | t1 = time.time() 11 | fname = inspect.stack()[1][4][0].split('(')[0].strip() 12 | print 'time for %s = %.2f' % (fname, t1-t0) 13 | return result 14 | return f2 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | branches: 2 | only: 3 | - master 4 | language: python 5 | python: 6 | - 2.7 7 | # Setup anaconda 8 | before_install: 9 | - wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh 10 | - chmod +x miniconda.sh 11 | - ./miniconda.sh -b 12 | - export PATH=/home/travis/miniconda2/bin:$PATH 13 | - conda update --yes conda 14 | # The next couple lines fix a crash with multiprocessing on Travis and are not specific to using Miniconda 15 | - sudo rm -rf /dev/shm 16 | - sudo ln -s /run/shm /dev/shm 17 | # Install packages 18 | install: 19 | - conda install --yes python=$TRAVIS_PYTHON_VERSION numpy scipy nose matplotlib 20 | - python setup.py install 21 | script: nosetests 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *,cover 45 | 46 | # Translations 47 | *.mo 48 | *.pot 49 | 50 | # Django stuff: 51 | *.log 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # PyBuilder 57 | target/ 58 | -------------------------------------------------------------------------------- /tests/test_dpmm.py: -------------------------------------------------------------------------------- 1 | import dpmm 2 | from test_utils import timer 3 | 4 | # Some common test data: 5 | mu = [-0.5, 0.0, 0.7] # means 6 | V = [0.02, 0.03, 0.1] # variances 7 | p = [0.25, 0.4, 0.35] # proportions 8 | model = dpmm.GMM([dpmm.GaussND(mu0, V0) for mu0, V0 in zip(mu, V)], p) 9 | data = model.sample(size=100) 10 | 11 | @timer 12 | def test_dpmm_GaussianMeanKnownVariance(): 13 | mu_0 = 0.0 14 | sigsqr_0 = 1.0 15 | sigsqr = 0.05 16 | cp = dpmm.GaussianMeanKnownVariance(mu_0, sigsqr_0, sigsqr) 17 | alpha = 1.0 18 | 19 | # Just checking that we can construct a DP and update it. 20 | dp = dpmm.DPMM(cp, alpha, data) 21 | dp.update(100) 22 | 23 | 24 | @timer 25 | def test_dpmm_NormInvChi2(): 26 | mu_0 = 0.3 27 | kappa_0 = 0.1 28 | sigsqr_0 = 1.0 29 | nu_0 = 0.1 30 | cp = dpmm.NormInvChi2(mu_0, kappa_0, sigsqr_0, nu_0) 31 | alpha = 10.0 32 | 33 | # Just checking that we can construct a DP and update it. 34 | dp = dpmm.DPMM(cp, alpha, data) 35 | dp.update(100) 36 | 37 | if __name__ == '__main__': 38 | test_dpmm_GaussianMeanKnownVariance() 39 | test_dpmm_NormInvChi2() 40 | -------------------------------------------------------------------------------- /dpmm/density.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.special import gamma 3 | 4 | 5 | def multivariate_t_density(nu, mu, Sig, x): 6 | """Return multivariate t distribution: t_nu(x | mu, Sig), in d-dimensions.""" 7 | detSig = np.linalg.det(Sig) 8 | invSig = np.linalg.inv(Sig) 9 | d = len(mu) 10 | coef = gamma(nu/2.0+d/2.0) * detSig**(-0.5) 11 | coef /= gamma(nu/2.0) * nu**(d/2.0)*np.pi**(d/2.0) 12 | if x.ndim == 1: 13 | einsum = np.dot(x-mu, np.dot(invSig, x-mu)) 14 | else: 15 | einsum = np.einsum("...i,ij,...j", x-mu, invSig, x-mu) # (x-mu).T * invSig * (x-mu) 16 | return coef * (1.0 + einsum/nu)**(-(nu+d)/2.0) 17 | 18 | 19 | def t_density(nu, mu, sigsqr, x): 20 | c = gamma((nu+1.)/2.)/gamma(nu/2.)/np.sqrt(nu*np.pi*sigsqr) 21 | return c*(1.0+1./nu*((x-mu)**2/sigsqr))**(-(1.+nu)/2.0) 22 | 23 | 24 | def scaled_IX_density(nu, sigsqr, x): 25 | return (1.0/gamma(nu/2.0) * 26 | (nu*sigsqr/2.0)**(nu/2.0) * 27 | x**(-nu/2.0-1.0) * 28 | np.exp(-nu*sigsqr/(2.0*x))) 29 | 30 | 31 | def normal_density(mu, var, x): 32 | return np.exp(-0.5*(x-mu)**2/var)/np.sqrt(2*np.pi*var) 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Josh Meyers 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 15 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 18 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 20 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 21 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 22 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | -------------------------------------------------------------------------------- /tests/test_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import dpmm 3 | from test_utils import timer 4 | 5 | 6 | @timer 7 | def test_advanced_indexing(): 8 | nobj = 10 9 | nsample = 9 10 | ndim = 8 11 | 12 | # actual data 13 | d = np.empty((nobj, nsample, ndim), dtype=np.float) 14 | # interim priors 15 | ips = np.empty((nobj, nsample), dtype=np.float) 16 | 17 | psd = dpmm.PseudoMarginalData(d, ips) 18 | 19 | # compare direct access of data to access through __getitem__ 20 | # 2nd raw item is same as 0th item of [2, 6] fancy indexing 21 | np.testing.assert_equal(psd.data[2], psd[[2, 6]].data[0], 22 | "Advanced indexing didn't work for PseudoMarginalData") 23 | # Check that the interim prior value works this way too. 24 | np.testing.assert_equal(psd.interim_prior[2], psd[[2, 6]].interim_prior[0], 25 | "Advanced indexing didn't work for PseudoMarginalData") 26 | # And now check that 6th raw item is 1st item of [2, 6] fancy indexing. 27 | np.testing.assert_equal(psd.data[6], psd[[2, 6]].data[1], 28 | "Advanced indexing didn't work for PseudoMarginalData") 29 | # and interim prior 30 | np.testing.assert_equal(psd.interim_prior[6], psd[[2, 6]].interim_prior[1], 31 | "Advanced indexing didn't work for PseudoMarginalData") 32 | 33 | 34 | if __name__ == "__main__": 35 | test_advanced_indexing() 36 | -------------------------------------------------------------------------------- /SNe/gen.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from collections import namedtuple 3 | 4 | SN = namedtuple('SN', ['Mag', 'spec', 'label']) 5 | 6 | 7 | class SNFamily(object): 8 | def __init__(self, mean_mag=0.0, std_mag=0.05, mean_spec=0.0, std_spec=1.0, label=None): 9 | self.mean_mag = mean_mag 10 | self.std_mag = std_mag 11 | self.mean_spec = mean_spec 12 | self.std_spec = std_spec 13 | self.label = label 14 | 15 | def sample(self, size=None): 16 | mag = np.random.normal(loc=self.mean_mag, scale=self.std_mag, size=size) 17 | spec = np.random.normal(loc=self.mean_spec, scale=self.std_spec, size=size) 18 | if not hasattr(mag, '__len__'): 19 | return SN(mag, spec, self.label) 20 | else: 21 | return [SN(m, s, self.label) for m, s in zip(mag, spec)] 22 | 23 | 24 | class SNFamilyMixture(object): 25 | def __init__(self, families, proportions): 26 | self.families = families 27 | for i, fam in enumerate(self.families): 28 | fam.label = i 29 | self.proportions = proportions 30 | 31 | def sample(self, size=None): 32 | if size is None: 33 | ncs = np.random.multinomial(1, self.proportions) 34 | c = ncs.index(1) 35 | return self.families[c].sample() 36 | else: 37 | out = [] 38 | ncs = np.random.multinomial(size, self.proportions) 39 | for fam, nc in zip(self.families, ncs): 40 | out.extend(fam.sample(size=nc)) 41 | return out 42 | 43 | 44 | def test(size=10000): 45 | fam1 = SNFamily() 46 | fam2 = SNFamily(mean_mag=0.1, mean_spec=1.0) 47 | mixture = SNFamilyMixture([fam1, fam2], [0.3, 0.7]) 48 | return mixture.sample(size=size) 49 | -------------------------------------------------------------------------------- /dpmm/data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from utils import pick_discrete 3 | 4 | 5 | class PseudoMarginalData(object): 6 | def __init__(self, data, interim_prior): 7 | # Data should have dims [NOBJ, NSAMPLE, NDIM] or [NOBJ, NSAMPLE] if NDIM is 1 8 | # interim_prior should have dims [NOBJ, NSAMPLE] 9 | self.data = data 10 | self.interim_prior = interim_prior 11 | 12 | if self.data.ndim == 2: 13 | self.nobj, self.nsample = self.data.shape 14 | else: 15 | self.nobj, self.nsample, self.ndim = self.data.shape 16 | 17 | if self.interim_prior.shape != (self.nobj, self.nsample): 18 | ds = self.data.shape 19 | ips = self.interim_prior.shape 20 | raise ValueError(("data shape [NOBJ, NSAMPLE, NDIM] = [{}, {}, {}]" + 21 | " inconsistent with interim_prior shape [NOBJ, NSAMPLE] = [{}, {}]") 22 | .format(ds[0], ds[1], ds[2], ips[0], ips[2])) 23 | 24 | def __len__(self): 25 | return self.nobj 26 | 27 | def __getitem__(self, index): 28 | import numbers 29 | cls = type(self) 30 | # *Leave* a shallow axis in the case a single object is requested. 31 | if isinstance(index, numbers.Integral): 32 | return cls(self.data[np.newaxis, index], self.interim_prior[np.newaxis, index]) 33 | else: 34 | return cls(self.data[index], self.interim_prior[index]) 35 | 36 | def random_sample(self): 37 | """Return a [NOBJ, NDIM] numpy array sampling over NSAMPLE using inverse interim_prior 38 | weights. Needed to compute a posterior object.""" 39 | ps = 1./self.interim_prior 40 | ps /= np.sum(ps, axis=1)[:, np.newaxis] 41 | return np.array([self.data[i, pick_discrete(p)] for i, p in enumerate(ps)]) 42 | 43 | 44 | class NullManip(object): 45 | def init(self, D): 46 | pass 47 | 48 | def __call__(self, D): 49 | return D 50 | 51 | def unmanip(self, D): 52 | return D 53 | 54 | def update(self, D, phi, c, prior): 55 | pass 56 | -------------------------------------------------------------------------------- /dpmm/gmm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | class GaussND(object): 5 | def __init__(self, mu, Sig): 6 | self.mu = np.atleast_1d(mu) 7 | self.Sig = np.atleast_2d(Sig) 8 | self.d = len(self.mu) 9 | 10 | def cond(self, x): 11 | fixed = np.nonzero([x_ is not None for x_ in x]) 12 | nonfixed = np.nonzero([x_ is None for x_ in x]) 13 | mu1 = self.mu[nonfixed] 14 | mu2 = self.mu[fixed] 15 | Sig11 = self.Sig[nonfixed, nonfixed] 16 | Sig12 = self.Sig[fixed, nonfixed] 17 | Sig22 = self.Sig[fixed, fixed] 18 | 19 | new_mu = mu1 + np.dot(Sig12, np.dot(np.linalg.inv(Sig22), x[fixed[0]] - mu2)) 20 | new_Sig = Sig11 - np.dot(Sig12, np.dot(np.linalg.inv(Sig22), Sig12.T)) 21 | return GaussND(new_mu, new_Sig) 22 | 23 | def sample(self, size=None): 24 | if self.d == 1: 25 | return np.random.normal(self.mu, scale=np.sqrt(self.Sig), size=size) 26 | else: 27 | return np.random.multivariate_normal(self.mu, self.Sig, size=size) 28 | 29 | 30 | class GMM(object): 31 | def __init__(self, components, proportions): 32 | self.components = components 33 | self.proportions = proportions 34 | self.d = self.components[0].d 35 | 36 | def cond(self, x): 37 | components = [c.cond(x) for c in self.components] 38 | return GMM(components, self.proportions) 39 | 40 | def sample(self, size=None): 41 | if size is None: 42 | nums = np.random.multinomial(1, self.proportions) 43 | c = nums.index(1) # which class got picked 44 | return self.components[c].sample() 45 | else: 46 | n = np.prod(size) 47 | if self.d == 1: 48 | out = np.empty((n,), dtype=float) 49 | nums = np.random.multinomial(n, self.proportions) 50 | i = 0 51 | for component, num in zip(self.components, nums): 52 | out[i:i+num] = component.sample(size=num) 53 | i += num 54 | out = out.reshape(size) 55 | else: 56 | out = np.empty((n, self.d), dtype=float) 57 | nums = np.random.multinomial(n, self.proportions) 58 | i = 0 59 | for component, num in zip(self.components, nums): 60 | out[i:i+num] = component.sample(size=num) 61 | i += num 62 | if isinstance(size, int): 63 | out = out.reshape((size, self.d)) 64 | else: 65 | out = out.reshape(size+(self.d,)) 66 | return out 67 | -------------------------------------------------------------------------------- /dpmm/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.special import gamma 3 | import bisect 4 | 5 | 6 | def vTmv(vec, mat=None, vec2=None): 7 | """Multiply a vector transpose times a matrix times a vector. 8 | 9 | @param vec The first vector (will be transposed). 10 | @param mat The matrix in the middle. Identity by default. 11 | @param vec2 The second vector (will not be transposed.) By default, the same as the vec. 12 | @returns Product. Could be a scalar or a matrix depending on whether vec is a row or column 13 | vector. 14 | """ 15 | if len(vec.shape) == 1: 16 | vec = np.reshape(vec, [vec.shape[0], 1]) 17 | if mat is None: 18 | mat = np.eye(len(vec)) 19 | if vec2 is None: 20 | vec2 = vec 21 | return np.dot(vec.T, np.dot(mat, vec2)) 22 | 23 | 24 | def gammad(d, nu_over_2): 25 | """D-dimensional gamma function.""" 26 | nu = 2.0 * nu_over_2 27 | return np.pi**(d*(d-1.)/4)*np.multiply.reduce([gamma(0.5*(nu+1-i)) for i in range(d)]) 28 | 29 | 30 | def random_wish(dof, S, size=None): 31 | dim = S.shape[0] 32 | if size is None: 33 | x = np.random.multivariate_normal(np.zeros(dim), S, size=dof) 34 | return np.dot(x.T, x) 35 | else: 36 | if isinstance(size, int): 37 | size = (size,) 38 | out = np.empty(size+(dim, dim), dtype=np.float64) 39 | for ind in np.ndindex(size): 40 | x = np.random.multivariate_normal(np.zeros(dim), S, size=dof) 41 | out[ind] = np.dot(x.T, x) 42 | return out 43 | 44 | 45 | def random_invwish(dof, invS, size=None): 46 | return np.linalg.inv(random_wish(dof, invS, size=size)) 47 | 48 | 49 | def pick_discrete(p): 50 | """Pick a discrete integer between 0 and len(p) - 1 with probability given by (normalized) p 51 | array. Note that p array will be normalized here.""" 52 | c = np.cumsum(p) 53 | c /= c[-1] # Normalize 54 | u = np.random.uniform() 55 | return bisect.bisect(c, u) 56 | 57 | 58 | # Modified code from http://stackoverflow.com/questions/9081553/python-scatter-plot-size-and-style-of-the-marker/24567352#24567352 59 | 60 | def ellipses(x, y, s, q, pa, c='b', ax=None, vmin=None, vmax=None, **kwargs): 61 | """Scatter plot of ellipses. 62 | 63 | (x, y) duh. 64 | s size. 65 | q minor-to-major axes ratio b/a 66 | pa position angle in deg, CCW from +y. 67 | """ 68 | from matplotlib.patches import Ellipse 69 | from matplotlib.collections import PatchCollection 70 | import matplotlib.pyplot as plt 71 | 72 | if ax is None: 73 | ax = plt.gca() 74 | 75 | if isinstance(c, basestring): 76 | color = c # ie. use colors.colorConverter.to_rgba_array(c) 77 | else: 78 | color = None # use cmap, norm after collection is created 79 | kwargs.update(color=color) 80 | 81 | w, h = s*np.sqrt(q), s/np.sqrt(q) 82 | 83 | if np.isscalar(x): 84 | patches = [Ellipse((x, y), w, h, pa), ] 85 | else: 86 | patches = [Ellipse((x_, y_), w_, h_, pa_) for x_, y_, w_, h_, pa_ in zip(x, y, w, h, pa)] 87 | collection = PatchCollection(patches, **kwargs) 88 | 89 | if color is None: 90 | collection.set_array(np.asarray(c)) 91 | if vmin is not None or vmax is not None: 92 | collection.set_clim(vmin, vmax) 93 | 94 | ax.add_collection(collection) 95 | ax.autoscale_view() 96 | return collection 97 | 98 | 99 | def plot_ellipse(mu, Sig, ax=None, **kwargs): 100 | import matplotlib.pyplot as plt 101 | if ax is None: 102 | ax = plt.gca() 103 | val, vec = np.linalg.eigh(Sig) 104 | # 5.991 gives 95% ellipses 105 | s = np.sqrt(np.sqrt(5.991*val[0]*val[1])) 106 | q = np.sqrt(val[0]/val[1]) 107 | pa = np.arctan2(vec[0, 1], vec[0, 0])*180/np.pi 108 | ellipses(mu[0], mu[1], s, q, pa, ax=ax, **kwargs) 109 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """A setuptools based setup module. 2 | 3 | See: 4 | https://packaging.python.org/en/latest/distributing.html 5 | https://github.com/pypa/sampleproject 6 | """ 7 | 8 | # Always prefer setuptools over distutils 9 | from setuptools import setup, find_packages 10 | from os import path 11 | 12 | here = path.abspath(path.dirname(__file__)) 13 | 14 | # Get the long description from the relevant file 15 | # with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f: 16 | # long_description = f.read() 17 | 18 | setup( 19 | name='DPMM', 20 | 21 | # Versions should comply with PEP440. For a discussion on single-sourcing 22 | # the version across setup.py and the project code, see 23 | # https://packaging.python.org/en/latest/single_source_version.html 24 | version='0.1.0.dev1', 25 | 26 | description='dpmm.py', 27 | # long_description=long_description, 28 | 29 | # The project's main homepage. 30 | url='https://github.com/jmeyers314/DPMM', 31 | 32 | # Author details 33 | author='Joshua E. Meyers', 34 | author_email='jmeyers314@gmail.com', 35 | 36 | # Choose your license 37 | license='BSD', 38 | 39 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 40 | classifiers=[ 41 | # How mature is this project? Common values are 42 | # 3 - Alpha 43 | # 4 - Beta 44 | # 5 - Production/Stable 45 | 'Development Status :: 2 - Pre-Alpha', 46 | 47 | # Indicate who your project is intended for 48 | 'Intended Audience :: Science/Research', 49 | 'Topic :: Scientific/Engineering :: Mathematics', 50 | 51 | # Pick your license as you wish (should match "license" above) 52 | 'License :: OSI Approved :: BSD License', 53 | 54 | # Specify the Python versions you support here. In particular, ensure 55 | # that you indicate whether you support Python 2, Python 3 or both. 56 | 'Programming Language :: Python :: 2.7' 57 | ], 58 | 59 | # What does your project relate to? 60 | keywords='fit bayesian statistics', 61 | 62 | # You can just specify the packages manually here if your project is 63 | # simple. Or you can use find_packages(). 64 | packages=find_packages(exclude=['contrib', 'docs', 'tests*']), 65 | 66 | # List run-time dependencies here. These will be installed by pip when 67 | # your project is installed. For an analysis of "install_requires" vs pip's 68 | # requirements files see: 69 | # https://packaging.python.org/en/latest/requirements.html 70 | install_requires=['numpy', 'scipy'], 71 | 72 | # List additional groups of dependencies here (e.g. development 73 | # dependencies). You can install these using the following syntax, 74 | # for example: 75 | # $ pip install -e .[dev,test] 76 | # extras_require={ 77 | # 'dev': ['check-manifest'], 78 | # 'test': ['coverage'], 79 | # }, 80 | 81 | # If there are data files included in your packages that need to be 82 | # installed, specify them here. If using Python 2.6 or less, then these 83 | # have to be included in MANIFEST.in as well. 84 | # package_data={ 85 | # 'sample': ['package_data.dat'], 86 | # }, 87 | 88 | # Although 'package_data' is the preferred approach, in some case you may 89 | # need to place data files outside of your packages. See: 90 | # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa 91 | # In this case, 'data_file' will be installed into '/my_data' 92 | # data_files=[('my_data', ['data/data_file'])], 93 | 94 | # To provide executable scripts, use entry points in preference to the 95 | # "scripts" keyword. Entry points provide cross-platform support and allow 96 | # pip to create the appropriate form of executable for the target platform. 97 | # entry_points={ 98 | # 'console_scripts': [ 99 | # 'sample=sample:main', 100 | # ], 101 | # }, 102 | ) 103 | -------------------------------------------------------------------------------- /tests/test_density.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | import numpy as np 3 | from scipy.integrate import quad, dblquad 4 | 5 | from dpmm.density import t_density, multivariate_t_density, scaled_IX_density 6 | from test_utils import timer 7 | 8 | 9 | @timer 10 | def test_scaled_IX_density(): 11 | nu = 3 12 | sigsqr = 1.0 13 | 14 | # test that probability integrates to 1.0 15 | r = quad(lambda x: scaled_IX_density(nu, sigsqr, x), 0.0, np.inf) 16 | np.testing.assert_almost_equal(r[0], 1.0, 10, "scaled_IX_density does not integrate to 1.0") 17 | 18 | # test mean 19 | mean = nu*sigsqr/(nu-2) 20 | r = quad(lambda x: scaled_IX_density(nu, sigsqr, x)*x, 0.0, np.inf) 21 | np.testing.assert_almost_equal(r[0], mean, 10, "scaled_IX_density has wrong mean") 22 | 23 | # test variance 24 | var = 2.0*nu**2*sigsqr/(nu-2.0)**2/(nu-4.0) 25 | with warnings.catch_warnings(): 26 | warnings.simplefilter('ignore') 27 | r = quad(lambda x: scaled_IX_density(nu, sigsqr, x)*(x-mean)**2, 0.0, np.inf) 28 | np.testing.assert_almost_equal(r[0], var, 8, "scaled_IX_density has wrong variance") 29 | 30 | # test vectorizability 31 | x = np.arange(24.0).reshape(4, 3, 2)+1 32 | prs = scaled_IX_density(nu, sigsqr, x) 33 | for (i, j, k), pr in np.ndenumerate(prs): 34 | np.testing.assert_equal( 35 | pr, scaled_IX_density(nu, sigsqr, x[i, j, k]), 36 | "scaled_IX_density does not vectorize correctly!") 37 | 38 | 39 | @timer 40 | def test_t_density(): 41 | nu = 3 42 | mu = 2.2 43 | sigsqr = 1.51 44 | 45 | # test that probability integrates to 1.0 46 | r = quad(lambda x: t_density(nu, mu, sigsqr, x), -np.inf, np.inf) 47 | np.testing.assert_almost_equal(r[0], 1.0, 10, "t_density does not integrate to 1.0") 48 | 49 | # test mean 50 | r = quad(lambda x: t_density(nu, mu, sigsqr, x)*x, -np.inf, np.inf) 51 | np.testing.assert_almost_equal(r[0], mu, 10, "t_density has wrong mean") 52 | 53 | # test variance 54 | r = quad(lambda x: t_density(nu, mu, sigsqr, x)*(x-mu)**2, -np.inf, np.inf) 55 | np.testing.assert_almost_equal(r[0], nu*sigsqr/(nu-2), 10, "t_density has wrong variance") 56 | 57 | # test vectorizability 58 | x = np.arange(24.0).reshape(4, 3, 2)+1 59 | prs = t_density(nu, mu, sigsqr, x) 60 | for (i, j, k), pr in np.ndenumerate(prs): 61 | np.testing.assert_equal( 62 | pr, t_density(nu, mu, sigsqr, x[i, j, k]), 63 | "t_density does not vectorize correctly!") 64 | 65 | 66 | @timer 67 | def test_multivariate_t_density(full=False): 68 | nu = 3 69 | mu = np.r_[1., 2.] 70 | Sig = np.eye(2)+0.1 71 | 72 | # test that integrates to 1.0 73 | r = dblquad(lambda x, y: multivariate_t_density(nu, mu, Sig, np.r_[x, y]), 74 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf) 75 | np.testing.assert_almost_equal( 76 | r[0], 1.0, 5, "multivariate_t_density does not integrate to 1.0") 77 | 78 | if full: 79 | # test mean 80 | with warnings.catch_warnings(): 81 | warnings.simplefilter('ignore') 82 | xbar = dblquad(lambda x, y: multivariate_t_density(nu, mu, Sig, np.r_[x, y])*x, 83 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf)[0] 84 | ybar = dblquad(lambda x, y: multivariate_t_density(nu, mu, Sig, np.r_[x, y])*y, 85 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf)[0] 86 | np.testing.assert_almost_equal( 87 | xbar, mu[0], 5, "multivariate_t_density has wrong mean") 88 | np.testing.assert_almost_equal( 89 | ybar, mu[1], 5, "multivariate_t_density has wrong mean") 90 | # test covariance 91 | with warnings.catch_warnings(): 92 | warnings.simplefilter('ignore') 93 | Ixx = dblquad(lambda x, y: multivariate_t_density(nu, mu, Sig, np.r_[x, y])*(x-xbar)*(x-xbar), 94 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf)[0] 95 | Iyy = dblquad(lambda x, y: multivariate_t_density(nu, mu, Sig, np.r_[x, y])*(y-ybar)*(y-ybar), 96 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf)[0] 97 | Ixy = dblquad(lambda x, y: multivariate_t_density(nu, mu, Sig, np.r_[x, y])*(x-xbar)*(y-ybar), 98 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf)[0] 99 | cov = np.array([[Ixx, Ixy], [Ixy, Iyy]]) 100 | print cov 101 | print "----------" 102 | print nu/(nu-2.)*Sig 103 | np.testing.assert_almost_equal( 104 | cov, nu/(nu-2.)*Sig, 2, "multivariate_t_density has wrong covariance") 105 | 106 | 107 | # test that we can evaluate multiple probabilities in parallel 108 | xy1 = np.r_[0.0, 0.1] 109 | xy2 = np.r_[0.2, 0.3] 110 | pr1 = [multivariate_t_density(nu, mu, Sig, xy1), 111 | multivariate_t_density(nu, mu, Sig, xy2)] 112 | xys = np.vstack([xy1, xy2]) 113 | pr2 = multivariate_t_density(nu, mu, Sig, xys) 114 | np.testing.assert_array_almost_equal(pr1, pr2, 15, "multivariate_t_density does not vectorize correctly") 115 | 116 | # And a harder, higher dimensional case... 117 | xys = np.arange(24.0).reshape(4, 3, 2) 118 | prs = multivariate_t_density(nu, mu, Sig, xys) 119 | assert prs.shape == (4, 3) 120 | for (i, j), pr in np.ndenumerate(prs): 121 | np.testing.assert_array_almost_equal( 122 | pr, multivariate_t_density(nu, mu, Sig, xys[i, j]), 15, 123 | "multivariate_t_density does not vectorize correctly") 124 | 125 | 126 | if __name__ == "__main__": 127 | from argparse import ArgumentParser 128 | parser = ArgumentParser() 129 | parser.add_argument('--full', action='store_true', help="Run full test suite (slow).") 130 | args = parser.parse_args() 131 | 132 | test_scaled_IX_density() 133 | test_t_density() 134 | test_multivariate_t_density(args.full) 135 | -------------------------------------------------------------------------------- /dpmm/dpmm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from utils import pick_discrete 3 | from data import PseudoMarginalData, NullManip 4 | 5 | 6 | class DPMM(object): 7 | """Dirichlet Process Mixture Model. Using algorithm 2 from Neal (2000). 8 | 9 | @param prior The prior object for whatever model is being inferred. 10 | @param alpha DP concentration parameter. 11 | @param D Data. 12 | @param manip A data manipulator. Used for unshearing, for example. 13 | @param phi Optional initial state for each cluster. 14 | @param label Optional initial cluster labels for each data point. 15 | """ 16 | def __init__(self, prior, alpha, D, manip=None, phi=None, label=None): 17 | self.prior = prior 18 | self.alpha = alpha 19 | self._D = D # data 20 | if manip is None: 21 | manip = NullManip() 22 | self.manip = manip 23 | 24 | self._initD() 25 | self.manip.init(self.D) 26 | 27 | self.n = len(self.D) 28 | 29 | # Initialize r_i array 30 | # This is Neal (2000) equation (3.4) without the b factor. 31 | self.p = self.alpha * self.prior.pred(self.mD)[:, np.newaxis] 32 | 33 | if phi is None: 34 | self.init_phi() 35 | else: 36 | self.phi = phi 37 | self.label = label 38 | self.nphi = [np.sum(label == i) for i in xrange(label.max())] 39 | 40 | def init_phi(self): 41 | self.label = np.zeros((self.n), dtype=int) 42 | self.phi = [] 43 | self.nphi = [] 44 | for i in xrange(self.n): 45 | self.update_c_i(i) 46 | self.update_phi() 47 | 48 | @property 49 | def mD(self): 50 | if self.manip_needs_update: 51 | self._mD = self.manip(self.D) 52 | self.manip_needs_update = False 53 | return self._mD 54 | 55 | def _initD(self): 56 | """Initialize latent data vector.""" 57 | if isinstance(self._D, PseudoMarginalData): 58 | self.D = np.mean(self._D.data, axis=1) 59 | else: 60 | self.D = self._D 61 | self.manip_needs_update = True 62 | 63 | def draw_new_label(self, i): 64 | # This is essentially Neal (2000) equation (3.6) 65 | # Note that the p probabilities are unnormalized here, but pick_discrete will rescale them 66 | # so that the total probability is 1.0. This normalization also captures the factors of 67 | # b/(n-1+alpha) in Neal (2000). 68 | # -1 is sentinel for "make a new cluster" 69 | picked = pick_discrete(self.p[i]*np.append([1], self.nphi)) - 1 70 | return picked 71 | 72 | def del_c_i(self, i): 73 | """De-associate the ith data point from its cluster.""" 74 | label = self.label[i] 75 | # We're about to assign this point to a new cluster, so decrement current cluster count. 76 | self.nphi[label] -= 1 77 | # If we just deleted the last cluster member, then delete the cluster from self.phi 78 | if self.nphi[label] == 0: 79 | del self.phi[label] 80 | del self.nphi[label] 81 | # Need to decrement label numbers for labels greater than the one deleted... 82 | self.label[self.label >= label] -= 1 83 | # And remove the corresponding probability column 84 | self.p = np.delete(self.p, label+1, axis=1) 85 | 86 | def update_c_i(self, i): 87 | # for deduplication 88 | # Neal (2000) equation 3.6. See draw_new_label above. 89 | label = self.draw_new_label(i) 90 | # If we selected to create a new cluster, then draw parameters for that cluster. 91 | if label == -1: 92 | new_phi = self.prior.post(self.mD[i]).sample() 93 | self.phi.append(new_phi) 94 | self.nphi.append(1) 95 | self.label[i] = len(self.phi)-1 96 | # Also need to add probabilities for this new phi for gals between i+1 and n. 97 | self.p = np.append(self.p, np.zeros((self.n, 1), dtype=float), axis=1) 98 | self.p[i+1:, -1] = self.prior.like1(self.mD[i+1:], new_phi) 99 | else: # Otherwise just increment the count for the cloned cluster. 100 | self.label[i] = label 101 | self.nphi[label] += 1 102 | 103 | def update_c(self): 104 | # This is the first bullet for Neal (2000) algorithm 2, updating the labels for each data 105 | # point and potentially deleting clusters that are no longer populated or creating new 106 | # clusters with probability proportional to self.alpha. 107 | for i in xrange(self.n): 108 | self.del_c_i(i) 109 | self.update_c_i(i) 110 | 111 | def update_phi(self): 112 | # This is the second bullet for Neal (2000) algorithm 2, updating the parameters phi of each 113 | # cluster conditional on that cluster's currently associated data members. 114 | tot = 0 115 | for i in xrange(len(self.phi)): 116 | index = self.label == i 117 | tot += sum(index) 118 | data = self.mD[index] # nonzero needed? 119 | new_phi = self.prior.post(data).sample() 120 | self.phi[i] = new_phi 121 | self.p[:, 1:] = self.prior.like1(self.mD[:, np.newaxis], np.array(self.phi)) 122 | 123 | def update_latent_data(self): 124 | # Update the latent "true" data in the case that the data is represented by a 125 | # Pseudo-marginal samples or (TBD) means and Gaussian errors. 126 | if isinstance(self._D, PseudoMarginalData): 127 | for i, ph in enumerate(self.phi): 128 | index = np.nonzero(self.label == i)[0] 129 | data = self._D[index] # a PseudoMarginalData instance 130 | # calculate weights for selecting a representative sample 131 | ps = self.prior.like1(self.manip(data.data), ph) / data.interim_prior 132 | ps /= np.sum(ps, axis=1)[:, np.newaxis] # think this line can go. 133 | for j, p in enumerate(ps): 134 | self.D[index[j]] = data.data[j, pick_discrete(p)] 135 | # Need to update the r_i probabilities too since self.D changed. 136 | # self.r_i = self.alpha * self.prior.pred(self.mD) 137 | self.p[:, 0] = self.alpha * self.prior.pred(self.mD) 138 | self.manip_needs_update = True 139 | else: 140 | pass # If data is already a numpy array, there's nothing to update. 141 | 142 | def update(self, n=1): 143 | # Neal (2000) algorithm 2. 144 | for j in xrange(n): 145 | self.update_c() 146 | self.update_latent_data() 147 | self.update_phi() 148 | # Give manip.update() the *unmanipulated* data. 149 | self.manip.update(self.D, self.phi, self.label, self.prior) 150 | self.manip_needs_update = True 151 | -------------------------------------------------------------------------------- /dpmm/shear.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def unshear(D, g): 5 | D1, D2 = D[..., 0], D[..., 1] # real and imag parts of the data. 6 | g1, g2 = g[0], g[1] # real and imag parts of the shear. 7 | a, b = D1 - g1, D2 - g2 8 | c, d = (1.0 - g1*D1 - g2*D2), g2*D1 - g1*D2 9 | # Now divide (a + bi) / (c + di) = 1/(c*c + d*d) * ((ac+bd) + i(bc - ad)) 10 | out = np.empty_like(D) 11 | den = c**2 + d**2 12 | out[..., 0] = (a*c + b*d)/den 13 | out[..., 1] = (b*c - a*d)/den 14 | return out 15 | 16 | 17 | def draw_g_1d_weak_shear(D, phi, label): 18 | """Update the estimate of the shear g. 19 | D here is the *unmanipulated* data. 20 | Assume that phi represents variance of a Gaussian distribution (can we make this more 21 | generic?) 22 | 23 | In the weak shear limit, we can Gibbs update g, since \Prod Pr(g | e_int_i, sigma_e_i) is a 24 | product of Gaussians. Even if we're not in the weak shear limit, though, this is a reasonable 25 | way to generate proposals. 26 | """ 27 | Lam = 0.0 # Use the canonical representation of a Gaussian. 28 | eta = 0.0 29 | for i, ph in enumerate(phi): 30 | index = np.nonzero(label == i) 31 | Lam += len(index[0])/ph 32 | eta += np.sum(D[index]/ph) 33 | var = 1./Lam 34 | mu = eta*var 35 | return np.random.normal(loc=mu, scale=np.sqrt(var)) 36 | 37 | 38 | def draw_g_2d_weak_shear(D, phi, label): 39 | """Update the estimate of the shear g. 40 | D here is the *unmanipulated* data. 41 | Assume that phi represents variance of a Gaussian distribution (can we make this more 42 | generic?) Note that this is a bit weird, since the shear is 2D. What we're really saying is 43 | the covariance matrix is var*np.eye(2). (I think this is "tied" in the scikit-learn lingo.) 44 | 45 | In the weak shear limit, we can Gibbs update g, since \Prod Pr(g | e_int_i, sigma_e_i) is a 46 | product of Gaussians. Even if we're not in the weak shear limit, though, this is a reasonable 47 | way to generate proposals. 48 | """ 49 | Lam = 0.0 # Use the canonical representation of a Gaussian. 50 | eta = 0.0 51 | for i, ph in enumerate(phi): 52 | index = np.nonzero(label == i) 53 | Lam += len(index[0])/ph 54 | eta += np.sum(D[index]/ph, axis=0) 55 | var = 1./Lam 56 | mu = eta*var 57 | return np.random.multivariate_normal(mean=mu, cov=var*np.eye(2)) 58 | 59 | 60 | class Linear1DShear(object): 61 | """Data manipulator for 1-dimensional shear in the weak shear limit. 62 | I.e., manipulates data given shear via e_obs = e_int + g. 63 | """ 64 | def __init__(self, g): 65 | self.g = g 66 | 67 | def init(self, D): 68 | """A quick and dirty estimate of g is just the average over D.""" 69 | self.g = np.mean(D) 70 | 71 | def __call__(self, D): 72 | """Return the manipulated data, i.e., the current estimate for the unsheared ellipticity.""" 73 | return D - self.g 74 | 75 | def unmanip(self, D): 76 | """Reverse transformation from __call__.""" 77 | return D + self.g 78 | 79 | def update(self, D, phi, label, prior): 80 | """Update the estimate of the shear g. 81 | D here is the *unmanipulated* data. 82 | Assume that phi represents variance of a Gaussian distribution (can we make this more 83 | generic?) 84 | 85 | In this case we can Gibbs update g, since \Prod Pr(g | e_int_i, sigma_e_i) is a product of 86 | Gaussians. 87 | """ 88 | self.g = draw_g_1d_weak_shear(D, phi, label) 89 | 90 | 91 | class WeakShear(object): 92 | """Data manipulator for 2-dimensional shear in the weak shear limit. 93 | I.e., manipulates data given shear via e_obs = e_int + g. 94 | """ 95 | def __init__(self, g): 96 | # g should be a length-2 array representing the real and imag parts of the complex 97 | # representation of the reduced shear. 98 | self.g = g 99 | 100 | def init(self, D): 101 | """A quick and dirty estimate of g is just the average over D.""" 102 | self.g = np.mean(D, axis=0) 103 | 104 | def __call__(self, D): 105 | """Return the manipulated data, i.e., the current estimate for the unsheared ellipticity.""" 106 | return D - self.g 107 | 108 | def unmanip(self, D): 109 | """Reverse transformation from __call__.""" 110 | return D + self.g 111 | 112 | def update(self, D, phi, label, prior): 113 | """Update the estimate of the shear g. 114 | D here is the *unmanipulated* data. 115 | Assume that phi represents variance of a Gaussian distribution (can we make this more 116 | generic?) 117 | """ 118 | self.g = draw_g_2d_weak_shear(D, phi, label) 119 | 120 | 121 | class Shear(object): 122 | """Data manipulator for 2-dimensional shear, *not* in the weak shear limit. 123 | I.e., manipulates data given shear via e_obs = (e_int + g) / (1 + g* e_int). 124 | """ 125 | def __init__(self, g): 126 | # g should be a length-2 array representing the real and imag parts of the complex 127 | # representation of the reduced shear. 128 | self.g = g 129 | self.Nproposals = 0 130 | self.Nacceptances = 0 131 | 132 | def init(self, D): 133 | """A quick and dirty estimate of g is just the average over D.""" 134 | self.g = np.mean(D, axis=0) 135 | 136 | def __call__(self, D): 137 | """Return the manipulated data, i.e., the current estimate for the unsheared ellipticity.""" 138 | return unshear(D, self.g) 139 | 140 | def unmanip(self, D): 141 | """Reverse transformation from __call__.""" 142 | return unshear(D, -self.g) 143 | 144 | def update(self, D, phi, label, prior): 145 | """Update the estimate of the shear g. 146 | D here is the *unmanipulated* data. 147 | Assume that phi represents variance of a Gaussian distribution (can we make this more 148 | generic?) 149 | """ 150 | # Pr(g | D, phi, label) is complicated, so we need to MH update it. 151 | # For a proposal, though, we can still use the weak shear limit. 152 | # Whoops! The weak shear limit proposal doesn't lead to *any* acceptances when ngal is 153 | # large. Need to try something more clever. 154 | # prop_g = draw_g_2d_weak_shear(D, phi, label) 155 | prop_g = np.random.multivariate_normal(mean=self.g, cov=np.eye(2)*0.003**2) 156 | 157 | current_e_int = unshear(D, self.g) 158 | prop_e_int = unshear(D, prop_g) 159 | current_lnlike = 0.0 160 | prop_lnlike = 0.0 161 | for i, ph in enumerate(phi): 162 | index = label == i 163 | current_lnlike += prior.lnlikelihood(current_e_int[index], ph) 164 | prop_lnlike += prior.lnlikelihood(prop_e_int[index], ph) 165 | if prop_lnlike > current_lnlike: 166 | self.g = prop_g 167 | self.Nacceptances += 1 168 | else: 169 | u = np.random.uniform() 170 | if u < np.exp(prop_lnlike - current_lnlike): 171 | self.g = prop_g 172 | self.Nacceptances += 1 173 | self.Nproposals += 1 174 | -------------------------------------------------------------------------------- /tests/test_plot_samples.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import dpmm 4 | from test_utils import timer 5 | from dpmm.utils import plot_ellipse, random_wish, random_invwish 6 | from unittest import skip 7 | 8 | @skip 9 | @timer 10 | def test_GaussianMeanKnownVariance(): 11 | mu_0 = 1.1 12 | sigsqr_0 = 0.42 13 | sigsqr = 0.21 14 | model = dpmm.GaussianMeanKnownVariance(mu_0, sigsqr_0, sigsqr) 15 | 16 | samples = model.sample(size=1000) 17 | 18 | f = plt.figure(figsize=(5, 3)) 19 | ax = f.add_subplot(111) 20 | ax.hist(samples, bins=30, normed=True, alpha=0.5, color='k') 21 | xlim = np.percentile(samples, [1.0, 99.0]) 22 | ax.set_xlim(xlim) 23 | x = np.linspace(xlim[0], xlim[1], 100) 24 | y = model(x) 25 | ax.plot(x, y, c='k', lw=3) 26 | ax.set_xlabel("$\mu$") 27 | ax.set_ylabel("Pr($\mu$)") 28 | f.tight_layout() 29 | ax.set_title("GaussianMeanKnownVariance") 30 | f.savefig("plots/GaussianMeanKnownVariance_samples.png") 31 | 32 | 33 | @skip 34 | @timer 35 | def test_InvGamma(): 36 | alpha = 1.4 37 | beta = 1.3 38 | mu = 1.2 39 | model = dpmm.InvGamma(alpha, beta, mu) 40 | 41 | samples = model.sample(size=1000) 42 | xlim = np.percentile(samples, [0.0, 95.0]) 43 | 44 | f = plt.figure(figsize=(5, 3)) 45 | ax = f.add_subplot(111) 46 | ax.hist(samples, bins=30, range=xlim, normed=True, alpha=0.5, color='k') 47 | ax.set_xlim(xlim) 48 | x = np.linspace(xlim[0], xlim[1], 100) 49 | y = model(x) 50 | ax.plot(x, y, c='k', lw=3) 51 | ax.set_xlabel("$\sigma^2$") 52 | ax.set_ylabel("Pr($\sigma^2$)") 53 | f.tight_layout() 54 | ax.set_title("InvGamma") 55 | f.savefig("plots/InvGamma_samples.png") 56 | 57 | 58 | @skip 59 | @timer 60 | def test_NormInvChi2(): 61 | mu_0 = 1.5 62 | kappa_0 = 2.3 63 | sigsqr_0 = 0.24 64 | nu_0 = 2 65 | model = dpmm.NormInvChi2(mu_0, kappa_0, sigsqr_0, nu_0) 66 | 67 | samples = model.sample(size=1000) 68 | mu_samples = np.array([s[0] for s in samples]) 69 | var_samples = np.array([s[1] for s in samples]) 70 | 71 | xlim = np.percentile(mu_samples, [2.5, 97.5]) 72 | f = plt.figure(figsize=(5, 3)) 73 | ax = f.add_subplot(111) 74 | ax.hist(mu_samples, bins=30, range=xlim, normed=True, alpha=0.5, color='k') 75 | ax.set_xlim(xlim) 76 | x = np.linspace(xlim[0], xlim[1], 100) 77 | y = model.marginal_mu(x) 78 | ax.plot(x, y, c='k', lw=3) 79 | ax.set_xlabel("$\mu$") 80 | ax.set_ylabel("Pr($\mu$)") 81 | f.tight_layout() 82 | ax.set_title("NormInvChi2") 83 | f.savefig("plots/NormInvChi2_mu_samples.png") 84 | 85 | xlim = np.percentile(var_samples, [0.0, 95.0]) 86 | f = plt.figure(figsize=(5, 3)) 87 | ax = f.add_subplot(111) 88 | ax.hist(var_samples, bins=30, range=xlim, normed=True, alpha=0.5, color='k') 89 | ax.set_xlim(xlim) 90 | x = np.linspace(xlim[0], xlim[1], 100) 91 | y = model.marginal_var(x) 92 | ax.plot(x, y, c='k', lw=3) 93 | ax.set_xlabel("$\sigma^2$") 94 | ax.set_ylabel("Pr($\sigma^2$)") 95 | f.tight_layout() 96 | ax.set_title("NormInvChi2") 97 | f.savefig("plots/NormInvChi2_var_samples.png") 98 | 99 | @skip 100 | @timer 101 | def test_NormInvGamma(): 102 | mu_0 = 1.5 103 | V_0 = 1.2 104 | a_0 = 1.24 105 | b_0 = 1.1 106 | model = dpmm.NormInvGamma(mu_0, V_0, a_0, b_0) 107 | 108 | samples = model.sample(size=1000) 109 | mu_samples = np.array([s[0] for s in samples]) 110 | var_samples = np.array([s[1] for s in samples]) 111 | 112 | xlim = np.percentile(mu_samples, [2.5, 97.5]) 113 | f = plt.figure(figsize=(5, 3)) 114 | ax = f.add_subplot(111) 115 | ax.hist(mu_samples, bins=30, range=xlim, normed=True, alpha=0.5, color='k') 116 | ax.set_xlim(xlim) 117 | x = np.linspace(xlim[0], xlim[1], 100) 118 | y = model.marginal_mu(x) 119 | ax.plot(x, y, c='k', lw=3) 120 | ax.set_xlabel("$\mu$") 121 | ax.set_ylabel("Pr($\mu$)") 122 | f.tight_layout() 123 | ax.set_title("NormInvGamma") 124 | f.savefig("plots/NormInvGamma_mu_samples.png") 125 | 126 | xlim = np.percentile(var_samples, [0.0, 95.0]) 127 | f = plt.figure(figsize=(5, 3)) 128 | ax = f.add_subplot(111) 129 | ax.hist(var_samples, bins=30, range=xlim, normed=True, alpha=0.5, color='k') 130 | ax.set_xlim(xlim) 131 | x = np.linspace(xlim[0], xlim[1], 100) 132 | y = model.marginal_var(x) 133 | ax.plot(x, y, c='k', lw=3) 134 | ax.set_xlabel("$\sigma^2$") 135 | ax.set_ylabel("Pr($\sigma^2$)") 136 | f.tight_layout() 137 | ax.set_title("NormInvGamma") 138 | f.savefig("plots/NormInvGamma_var_samples.png") 139 | 140 | 141 | @skip 142 | @timer 143 | def test_NormInvWish(): 144 | mu_0 = np.r_[0.3, -0.2] 145 | d = len(mu_0) 146 | Lam_0 = np.linalg.inv(np.array([[2, 1.1], [1.1, 1.2]])) 147 | kappa_0 = 2.1 148 | nu_0 = 8 149 | 150 | model = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 151 | 152 | # First check some numerics 153 | Nsample = 5000 154 | samples = model.sample(size=Nsample) 155 | mu_samples = [s[0] for s in samples] 156 | cov_samples = [s[1] for s in samples] 157 | 158 | mean = np.mean(mu_samples, axis=0) 159 | std = np.std(mu_samples, axis=0)/np.sqrt(Nsample) 160 | print "NormInvWish mu_0 = {}".format(mu_0) 161 | print "NormInvWish E(mu) = {} +/- {}".format(mean, std) 162 | 163 | mean_cov = np.mean(cov_samples, axis=0) 164 | std_cov = np.std(cov_samples, axis=0)/np.sqrt(Nsample) 165 | print "NormInvWish (Lam_0)^(-1)/(nu_0-d-1) = \n{}".format(np.linalg.inv(Lam_0)/(nu_0-d-1)) 166 | print "NormInvWish E(Sig) = \n{}\n +/-\n{}".format(mean_cov, std_cov) 167 | 168 | # Now try some plots with different values of kappa_0 and nu_0 169 | f = plt.figure(figsize=(7, 7)) 170 | for i, (kappa_0, nu_0) in enumerate(zip([0.4, 0.4, 6.5, 6.5], 171 | [10, 4, 10, 4])): 172 | model = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 173 | samples = model.sample(size=25) 174 | ax = f.add_subplot(2, 2, i+1) 175 | for sample in samples: 176 | mu, Sig = sample 177 | plot_ellipse(mu, Sig, ax=ax, facecolor='none', edgecolor='k', alpha=0.2) 178 | plot_ellipse(mu_0, np.linalg.inv(Lam_0)/(nu_0-d-1), ax=ax, facecolor='none', edgecolor='r') 179 | ax.set_xlim(-3, 3) 180 | ax.set_ylim(-3, 3) 181 | ax.axvline(mu_0[0], c='r', alpha=0.1) 182 | ax.axhline(mu_0[1], c='r', alpha=0.1) 183 | ax.set_title(r"$\kappa_0$={}, $\nu_0$={}".format(kappa_0, nu_0)) 184 | print np.mean([s[1] for s in samples], axis=0) 185 | f.savefig("plots/NormInvWish_samples.png") 186 | 187 | 188 | @skip 189 | @timer 190 | def test_random_wish(): 191 | dof = 3 192 | S = np.array([[1.0, 0.25], [0.25, 0.5]]) 193 | Nsamples = 5000 194 | samples = random_wish(dof, S, size=Nsamples) 195 | mean = np.mean(samples, axis=0) 196 | std = np.std(samples, axis=0)/np.sqrt(Nsamples) 197 | 198 | print "E(wish) = \n{}".format(dof * S) 199 | print " = \n{}\n +/-\n{}".format(mean, std) 200 | 201 | 202 | @skip 203 | @timer 204 | def test_random_invwish(): 205 | dof = 6 206 | d = 2 207 | S = np.array([[1.0, 0.25], [0.25, 0.5]]) 208 | invS = np.linalg.inv(S) 209 | Nsamples = 5000 210 | samples = random_invwish(dof, invS, size=Nsamples) 211 | mean = np.mean(samples, axis=0) 212 | std = np.std(samples, axis=0)/np.sqrt(Nsamples) 213 | 214 | print "E(invwish) = \n{}".format(S/(dof-d-1)) 215 | print " = \n{}\n +/-\n{}".format(mean, std) 216 | 217 | 218 | @skip 219 | @timer 220 | def test_ellipse_plotter(): 221 | f = plt.figure(figsize=(7, 7)) 222 | for i, Sig in enumerate([np.array([[1.0, 0.0], [0.0, 0.25]]), 223 | np.array([[0.25, 0.0], [0.0, 1.0]]), 224 | np.array([[1.0, 0.8], [0.8, 1.0]]), 225 | np.array([[1.0, -0.8], [-0.8, 1.0]])]): 226 | ax = f.add_subplot(2, 2, i+1) 227 | ax.set_xlim(-1, 1) 228 | ax.set_ylim(-1, 1) 229 | plot_ellipse([0., 0.], Sig) 230 | ax.set_title("$\Sigma$={}".format(Sig)) 231 | f.tight_layout() 232 | f.savefig("plots/ellipse.png") 233 | 234 | if __name__ == "__main__": 235 | test_GaussianMeanKnownVariance() 236 | test_InvGamma() 237 | test_NormInvChi2() 238 | test_NormInvGamma() 239 | test_NormInvWish() 240 | test_random_wish() 241 | test_random_invwish() 242 | test_ellipse_plotter() 243 | -------------------------------------------------------------------------------- /dpmm/prior.py: -------------------------------------------------------------------------------- 1 | # Equation numbers refer to Kevin Murphy's "Conjugate Bayesian analysis of the Gaussian 2 | # distribution" note unless otherwise specified. 3 | 4 | from operator import mul 5 | import numpy as np 6 | from scipy.special import gamma 7 | from utils import gammad, random_invwish 8 | from density import multivariate_t_density, t_density, normal_density, scaled_IX_density 9 | 10 | 11 | class Prior(object): 12 | """ 13 | In general, `Prior` object models represent the proabilistic graphical model: 14 | psi -> theta -> [x] 15 | where psi are hyperparameters of a prior, 16 | theta are parameters of the model being constrained, 17 | and [x] are data. 18 | 19 | For example, for an InvGamma model, the data (x) are scalar samples, the model is to infer the 20 | variance (theta) of a generative Gaussian distribution with known mean, and alpha, beta (psi) 21 | are hyperparameters for the prior on the Gaussian variance. 22 | 23 | We define the following probability distributions for each `Prior` object: 24 | 25 | Likelihood: Pr([x] | theta). Note [x] is conditionally independent of psi given theta. 26 | Prior: Pr(theta | psi). 27 | Posterior: Pr(theta | x, psi) By Bayes theorem, equal to: 28 | Pr(x | theta) Pr(theta | psi) / Pr (x | psi) 29 | Predictive (or evidence): Pr(x | psi) = \int Pr(x | theta) Pr(theta | psi) d(theta). 30 | """ 31 | def __init__(self, post=None, *args, **kwargs): 32 | # Assume conjugate prior by default, i.e. that posterior is same form as prior 33 | if post is None: 34 | post = type(self) 35 | self._post = post 36 | 37 | def sample(self, size=None): 38 | """Return one or more samples of the model parameters from prior distribution.""" 39 | raise NotImplementedError 40 | 41 | def like1(self, x, *args, **kwargs): 42 | """Return likelihood for single data element. Pr(x | theta). This is conditionally 43 | independent of the hyperparameters psi. If more than one data element is passed, then the 44 | likelihood will be returned for each element.""" 45 | raise NotImplementedError 46 | 47 | def likelihood(self, D, *args, **kwargs): 48 | # It's quite likely overriding this will yield faster results... 49 | """Returns Pr(D | theta). Does not broadcast over theta!""" 50 | return np.prod(self.like1(D, *args, **kwargs)) 51 | 52 | def lnlikelihood(self, D, *args, **kwargs): 53 | """Returns ln(Pr(D | theta)). Does not broadcast over theta!""" 54 | return np.log(self.likelihood(D, *args, **kwargs)) 55 | 56 | def __call__(self, *args): 57 | """Returns Pr(theta | psi), i.e. the prior probability.""" 58 | raise NotImplementedError 59 | 60 | def _post_params(self, D): 61 | """Returns new hyperparameters psi' for updating prior->posterior. Can be sent to 62 | constructor to initialize a new object.""" 63 | raise NotImplementedError 64 | 65 | def post(self, D): 66 | """Returns new Prior object using updated hyperparameters psi, which is the posterior given 67 | the data D.""" 68 | return self._post(*self._post_params(D)) 69 | 70 | def pred(self, x): 71 | """Prior predictive. Pr(x | params). Integrates out theta.""" 72 | raise NotImplementedError 73 | 74 | 75 | class GaussianMeanKnownVariance(Prior): 76 | """Model univariate Gaussian with known variance and unknown mean. 77 | 78 | Model parameters 79 | ---------------- 80 | mu : multivariate mean 81 | 82 | Prior parameters 83 | ---------------- 84 | mu_0 : prior mean 85 | sigsqr_0 : prior variance 86 | 87 | Fixed parameters 88 | ---------------- 89 | sigsqr : Known variance. Treat as a prior parameter to make __init__() with with 90 | _post_params(), post(), post_pred(), etc., though note this never actually gets 91 | updated. 92 | """ 93 | def __init__(self, mu_0, sigsqr_0, sigsqr): 94 | self.mu_0 = mu_0 95 | self.sigsqr_0 = sigsqr_0 96 | self.sigsqr = sigsqr 97 | self._norm1 = np.sqrt(2*np.pi*self.sigsqr) 98 | self._norm2 = np.sqrt(2*np.pi*self.sigsqr_0) 99 | super(GaussianMeanKnownVariance, self).__init__() 100 | 101 | def sample(self, size=None): 102 | """Return a sample `mu` or samples [mu1, mu2, ...] from distribution.""" 103 | if size is None: 104 | return np.random.normal(self.mu_0, np.sqrt(self.sigsqr_0)) 105 | else: 106 | return np.random.normal(self.mu_0, np.sqrt(self.sigsqr_0), size=size) 107 | 108 | def like1(self, x, mu): 109 | """Returns likelihood Pr(x | mu), for a single data point. 110 | """ 111 | return np.exp(-0.5*(x-mu)**2/self.sigsqr) / self._norm1 112 | 113 | def __call__(self, mu): 114 | """Returns Pr(mu), i.e., the prior.""" 115 | return np.exp(-0.5*(mu-self.mu_0)**2/self.sigsqr_0) / self._norm2 116 | 117 | def _post_params(self, D): 118 | """Recall D is [NOBS].""" 119 | try: 120 | n = len(D) 121 | except TypeError: 122 | n = 1 123 | Dbar = np.mean(D) 124 | sigsqr_n = 1./(n/self.sigsqr + 1./self.sigsqr_0) 125 | mu_n = sigsqr_n * (self.mu_0/self.sigsqr_0 + n*Dbar/self.sigsqr) 126 | return mu_n, sigsqr_n, self.sigsqr 127 | 128 | def pred(self, x): 129 | """Prior predictive. Pr(x)""" 130 | sigsqr = self.sigsqr + self.sigsqr_0 131 | return np.exp(-0.5*(x-self.mu_0)**2/sigsqr) / np.sqrt(2*np.pi*sigsqr) 132 | 133 | # FIXME! 134 | # def evidence(self, D): 135 | # """Fully marginalized likelihood Pr(D)""" 136 | # try: 137 | # n = len(D) 138 | # except: 139 | # n = 1 140 | # # import ipdb; ipdb.set_trace() 141 | # D = np.array(D) 142 | # Dbar = np.sum(D) 143 | # num = np.sqrt(self.sigsqr) 144 | # den = (2*np.pi*self.sigsqr)**(n/2.0)*np.sqrt(n*self.sigsqr_0+self.sigsqr) 145 | # exponent = -np.sum(D**2)/(2.0*self.sigsqr) - self.mu_0/(2.0*self.sigsqr_0) 146 | # expnum = self.sigsqr_0*n**2*Dbar**2/self.sigsqr + self.sigsqr*self.mu_0**2/self.sigsqr_0 147 | # expnum += 2.0*n*Dbar*self.mu_0 148 | # expden = 2.0*(n*self.sigsqr_0+self.sigsqr) 149 | # return num/den*np.exp(exponent+expnum/expden) 150 | 151 | 152 | class InvGamma(Prior): 153 | """Inverse Gamma distribution. Note this parameterization matches Murphy's, not wikipedia's.""" 154 | def __init__(self, alpha, beta, mu): 155 | self.alpha = alpha 156 | self.beta = beta 157 | self.mu = mu 158 | super(InvGamma, self).__init__() 159 | 160 | def sample(self, size=None): 161 | return 1./np.random.gamma(self.alpha, scale=self.beta, size=size) 162 | 163 | def like1(self, x, var): 164 | """Returns likelihood Pr(x | var), for a single data point.""" 165 | return np.exp(-0.5*(x-self.mu)**2/var) / np.sqrt(2*np.pi*var) 166 | 167 | def __call__(self, var): 168 | """Returns Pr(var), i.e., the prior density.""" 169 | al, be = self.alpha, self.beta 170 | return be**(-al)/gamma(al) * var**(-1.-al) * np.exp(-1./(be*var)) 171 | 172 | def _post_params(self, D): 173 | try: 174 | n = len(D) 175 | except TypeError: 176 | n = 1 177 | al_n = self.alpha + n/2.0 178 | be_n = 1./(1./self.beta + 0.5*np.sum((np.array(D)-self.mu)**2)) 179 | return al_n, be_n, self.mu 180 | 181 | def pred(self, x): 182 | """Prior predictive. Pr(x)""" 183 | # Careful. Use 1/beta/alpha to match Murphy, not wikipedia! 184 | return t_density(2*self.alpha, self.mu, 1./self.beta/self.alpha, x) 185 | 186 | def evidence(self, D): 187 | """Fully marginalized likelihood Pr(D)""" 188 | raise NotImplementedError 189 | 190 | 191 | class InvGamma2D(Prior): 192 | """Inverse Gamma distribution, but for modeling 2D covariance matrices proportional to the 193 | identity matrix.""" 194 | def __init__(self, alpha, beta, mu): 195 | self.alpha = alpha 196 | self.beta = beta 197 | self.mu = np.array(mu) 198 | assert len(mu) == 2 199 | super(InvGamma2D, self).__init__() 200 | 201 | def sample(self, size=None): 202 | return 1./np.random.gamma(self.alpha, scale=self.beta, size=size) 203 | 204 | def like1(self, x, var): 205 | """Returns likelihood Pr(x | var), for a single data point.""" 206 | assert isinstance(x, np.ndarray) 207 | assert x.shape[-1] == 2 208 | return np.exp(-0.5*np.sum((x-self.mu)**2, axis=-1)/var) / (2*np.pi*var) 209 | 210 | def lnlikelihood(self, D, var): 211 | """Returns the log likelihood for data D""" 212 | return -0.5*np.sum((D-self.mu)**2)/var - D.shape[0]*np.log(2*np.pi*var) 213 | 214 | def __call__(self, var): 215 | """Returns Pr(var), i.e., the prior density.""" 216 | al, be = self.alpha, self.beta 217 | return be**(-al)/gamma(al) * var**(-1.-al) * np.exp(-1./(be*var)) 218 | 219 | def _post_params(self, D): 220 | try: 221 | n = len(D) 222 | except TypeError: 223 | n = 1 224 | al_n = self.alpha + n # it's + n/2.0 in InvGamma, but in 2D it's + n. 225 | be_n = 1./(1./self.beta + 0.5*np.sum((np.array(D)-self.mu)**2)) # Same formula for beta. 226 | return al_n, be_n, self.mu 227 | 228 | def pred(self, x): 229 | """Prior predictive. Pr(x)""" 230 | assert isinstance(x, np.ndarray) 231 | assert x.shape[-1] == 2 232 | # Generalized from InvGamma. Tested numerically. 233 | return multivariate_t_density(2*self.alpha, self.mu, 1./self.beta/self.alpha*np.eye(2), x) 234 | 235 | def evidence(self, D): 236 | """Fully marginalized likelihood Pr(D)""" 237 | raise NotImplementedError 238 | 239 | 240 | class NormInvChi2(Prior): 241 | """Normal-Inverse-Chi-Square model for univariate Gaussian with params for mean and variance. 242 | 243 | Model parameters 244 | ---------------- 245 | mu : mean 246 | var : variance 247 | 248 | Prior parameters 249 | ---------------- 250 | mu_0 : prior mean 251 | kappa_0 : belief in mu_0 252 | sigsqr_0 : prior variance 253 | nu_0 : belief in sigsqr_0 254 | """ 255 | def __init__(self, mu_0, kappa_0, sigsqr_0, nu_0): 256 | self.mu_0 = float(mu_0) 257 | self.kappa_0 = float(kappa_0) 258 | self.sigsqr_0 = float(sigsqr_0) 259 | self.nu_0 = float(nu_0) 260 | super(NormInvChi2, self).__init__() 261 | 262 | model_dtype = np.dtype([('mu', float), ('var', float)]) 263 | 264 | def sample(self, size=None): 265 | if size is None: 266 | var = 1./np.random.chisquare(df=self.nu_0)*self.nu_0*self.sigsqr_0 267 | ret = np.zeros(1, dtype=self.model_dtype) 268 | ret['mu'] = np.random.normal(self.mu_0, np.sqrt(var/self.kappa_0)) 269 | ret['var'] = var 270 | return ret[0] 271 | else: 272 | var = 1./np.random.chisquare(df=self.nu_0, size=size)*self.nu_0*self.sigsqr_0 273 | ret = np.zeros(size, dtype=self.model_dtype) 274 | ret['mu'] = (np.random.normal(self.mu_0, np.sqrt(1./self.kappa_0), size=size) 275 | * np.sqrt(var)) 276 | ret['var'] = var 277 | return ret 278 | 279 | def like1(self, *args): 280 | """Returns likelihood Pr(x | mu, var), for a single data point.""" 281 | if len(args) == 3: 282 | x, mu, var = args 283 | elif len(args) == 2: 284 | x, theta = args 285 | mu = theta['mu'] 286 | var = theta['var'] 287 | return np.exp(-0.5*(x-mu)**2/var) / np.sqrt(2*np.pi*var) 288 | 289 | def __call__(self, *args): 290 | """Returns Pr(mu, var), i.e., the prior density.""" 291 | if len(args) == 2: 292 | mu, var = args 293 | elif len(args) == 1: 294 | mu = args[0]['mu'] 295 | var = args[0]['var'] 296 | return (normal_density(self.mu_0, var/self.kappa_0, mu) * 297 | scaled_IX_density(self.nu_0, self.sigsqr_0, var)) 298 | 299 | def _post_params(self, D): 300 | try: 301 | n = len(D) 302 | except TypeError: 303 | n = 1 304 | Dbar = np.mean(D) 305 | kappa_n = self.kappa_0 + n 306 | mu_n = (self.kappa_0*self.mu_0 + n*Dbar)/kappa_n 307 | nu_n = self.nu_0 + n 308 | sigsqr_n = ((self.nu_0*self.sigsqr_0 + np.sum((D-Dbar)**2) + 309 | n*self.kappa_0/(self.kappa_0+n)*(self.mu_0-Dbar)**2)/nu_n) 310 | return mu_n, kappa_n, sigsqr_n, nu_n 311 | 312 | def pred(self, x): 313 | """Prior predictive. Pr(x)""" 314 | return t_density(self.nu_0, self.mu_0, (1.+self.kappa_0)*self.sigsqr_0/self.kappa_0, x) 315 | 316 | def evidence(self, D): 317 | """Fully marginalized likelihood Pr(D)""" 318 | mu_n, kappa_n, sigsqr_n, nu_n = self._post_params(D) 319 | try: 320 | n = len(D) 321 | except: 322 | n = 1 323 | return (gamma(nu_n/2.0)/gamma(self.nu_0/2.0) * np.sqrt(self.kappa_0/kappa_n) * 324 | (self.nu_0*self.sigsqr_0)**(self.nu_0/2.0) / 325 | (nu_n*sigsqr_n)**(nu_n/2.0) / 326 | np.pi**(n/2.0)) 327 | 328 | def marginal_var(self, var): 329 | """Return Pr(var)""" 330 | return scaled_IX_density(self.nu_0, self.sigsqr_0, var) 331 | 332 | def marginal_mu(self, mu): 333 | return t_density(self.nu_0, self.mu_0, self.sigsqr_0/self.kappa_0, mu) 334 | 335 | 336 | class NormInvGamma(Prior): 337 | """Normal-Inverse-Gamma prior for univariate Gaussian with params for mean and variance. 338 | 339 | Model parameters 340 | ---------------- 341 | mu : mean 342 | var : variance 343 | 344 | Prior parameters 345 | ---------------- 346 | mu_0 : prior mean 347 | V_0 348 | a_0, b_0 : gamma parameters (note these are a/b-like, not alpha/beta-like) 349 | """ 350 | def __init__(self, m_0, V_0, a_0, b_0): 351 | self.m_0 = float(m_0) 352 | self.V_0 = float(V_0) 353 | self.a_0 = float(a_0) 354 | self.b_0 = float(b_0) 355 | super(NormInvGamma, self).__init__() 356 | 357 | model_dtype = np.dtype([('mu', float), ('var', float)]) 358 | 359 | def sample(self, size=None): 360 | if size is None: 361 | var = 1./np.random.gamma(self.a_0, scale=1./self.b_0) 362 | ret = np.zeros(1, dtype=self.model_dtype) 363 | ret['mu'] = np.random.normal(self.m_0, np.sqrt(self.V_0*var)) 364 | ret['var'] = var 365 | return ret[0] 366 | else: 367 | var = 1./np.random.gamma(self.a_0, scale=1./self.b_0, size=size) 368 | ret = np.zeros(size, dtype=self.model_dtype) 369 | ret['mu'] = np.random.normal(self.m_0, np.sqrt(self.V_0), size=size)*np.sqrt(var) 370 | ret['var'] = var 371 | return ret 372 | 373 | def like1(self, *args): 374 | """Returns likelihood Pr(x | mu, var), for a single data point.""" 375 | if len(args) == 3: 376 | x, mu, var = args 377 | elif len(args) == 2: 378 | x, theta = args 379 | mu = theta['mu'] 380 | var = theta['var'] 381 | return np.exp(-0.5*(x-mu)**2/var) / np.sqrt(2*np.pi*var) 382 | 383 | def __call__(self, *args): 384 | """Returns Pr(mu, var), i.e., the prior density.""" 385 | if len(args) == 1: 386 | mu = args[0]['mu'] 387 | var = args[0]['var'] 388 | elif len(args) == 2: 389 | mu, var = args 390 | normal = np.exp(-0.5*(self.m_0-mu)**2/(var*self.V_0))/np.sqrt(2*np.pi*var*self.V_0) 391 | ig = self.b_0**self.a_0/gamma(self.a_0)*var**(-(self.a_0+1))*np.exp(-self.b_0/var) 392 | return normal*ig 393 | 394 | def _post_params(self, D): 395 | try: 396 | n = len(D) 397 | except TypeError: 398 | n = 1 399 | Dbar = np.mean(D) 400 | invV_0 = 1./self.V_0 401 | V_n = 1./(invV_0 + n) 402 | m_n = V_n*(invV_0*self.m_0 + n*Dbar) 403 | a_n = self.a_0 + n/2.0 404 | # The commented line below is from Murphy. It doesn't pass the unit tests so I derived 405 | # my own formula which does. 406 | # b_n = self.b_0 + 0.5*(self.m_0**2*invV_0 + np.sum(Dbar**2) - m_n**2/V_n) 407 | b_n = self.b_0 + 0.5*(np.sum((D-Dbar)**2)+n/(1.0+n*self.V_0)*(self.m_0-Dbar)**2) 408 | return m_n, V_n, a_n, b_n 409 | 410 | def pred(self, x): 411 | """Prior predictive. Pr(x)""" 412 | return t_density(2.0*self.a_0, self.m_0, self.b_0*(1.0+self.V_0)/self.a_0, x) 413 | 414 | def evidence(self, D): 415 | """Fully marginalized likelihood Pr(D)""" 416 | m_n, V_n, a_n, b_n = self._post_params(D) 417 | try: 418 | n = len(D) 419 | except: 420 | n = 1 421 | return (np.sqrt(np.abs(V_n/self.V_0)) * (self.b_0**self.a_0)/(b_n**a_n) * 422 | gamma(a_n)/gamma(self.a_0) / (np.pi**(n/2.0)*2.0**(n/2.0))) 423 | 424 | def marginal_var(self, var): 425 | """Return Pr(var)""" 426 | # Don't have an independent source for this, so convert params to NIX and use that result. 427 | nu_0 = 2*self.a_0 428 | sigsqr_0 = 2*self.b_0/nu_0 429 | return scaled_IX_density(nu_0, sigsqr_0, var) 430 | 431 | def marginal_mu(self, mu): 432 | """Return Pr(mu)""" 433 | # Don't have an independent source for this, so convert params to NIX and use that result. 434 | mu_0 = self.m_0 435 | kappa_0 = 1./self.V_0 436 | nu_0 = 2*self.a_0 437 | sigsqr_0 = 2*self.b_0/nu_0 438 | return t_density(nu_0, mu_0, sigsqr_0/kappa_0, mu) 439 | 440 | 441 | class NormInvWish(Prior): 442 | """Normal-Inverse-Wishart prior for multivariate Gaussian distribution. 443 | 444 | Model parameters 445 | ---------------- 446 | mu : multivariate mean 447 | Sig : covariance matrix 448 | 449 | Prior parameters 450 | ---------------- 451 | mu_0 452 | kappa_0 453 | Lam_0 454 | nu_0 455 | """ 456 | def __init__(self, mu_0, kappa_0, Lam_0, nu_0): 457 | self.mu_0 = np.array(mu_0, dtype=float) 458 | self.kappa_0 = float(kappa_0) 459 | self.Lam_0 = np.array(Lam_0, dtype=float) 460 | self.nu_0 = int(nu_0) 461 | self.d = len(mu_0) 462 | self.model_dtype = np.dtype([('mu', float, self.d), ('Sig', float, (self.d, self.d))]) 463 | super(NormInvWish, self).__init__() 464 | 465 | def _S(self, D): 466 | """Scatter matrix. D is [NOBS, NDIM]. Returns [NDIM, NDIM] array.""" 467 | # Eq (244) 468 | Dbar = np.mean(D, axis=0) 469 | return np.dot((D-Dbar).T, (D-Dbar)) 470 | 471 | def sample(self, size=None): 472 | """Return a sample {mu, Sig} or list of samples [{mu_1, Sig_1}, ...] from 473 | distribution. 474 | """ 475 | Sig = random_invwish(dof=self.nu_0, invS=self.Lam_0, size=size) 476 | if size is None: 477 | ret = np.zeros(1, dtype=self.model_dtype) 478 | ret['Sig'] = Sig 479 | ret['mu'] = np.random.multivariate_normal(self.mu_0, Sig/self.kappa_0) 480 | return ret[0] 481 | else: 482 | ret = np.zeros(size, dtype=self.model_dtype) 483 | ret['Sig'] = Sig 484 | for r in ret.ravel(): 485 | r['mu'] = np.random.multivariate_normal(self.mu_0, r['Sig']/self.kappa_0) 486 | return ret 487 | 488 | def like1(self, *args): 489 | """Returns likelihood Pr(x | mu, Sig), for a single data point.""" 490 | if len(args) == 2: 491 | x, theta = args 492 | mu = theta['mu'] 493 | Sig = theta['Sig'] 494 | elif len(args) == 3: 495 | x, mu, Sig = args 496 | assert x.shape[-1] == self.d 497 | assert mu.shape[-1] == self.d 498 | assert Sig.shape[-1] == Sig.shape[-2] == self.d 499 | norm = np.sqrt((2*np.pi)**self.d * np.linalg.det(Sig)) 500 | # Tricky to make this broadcastable... 501 | einsum = np.einsum("...i,...ij,...j", x-mu, np.linalg.inv(Sig), x-mu) 502 | return np.exp(-0.5*einsum)/norm 503 | 504 | def __call__(self, *args): 505 | """Returns Pr(mu, Sig), i.e., the prior.""" 506 | if len(args) == 1: 507 | mu = args[0]['mu'] 508 | Sig = args[0]['Sig'] 509 | elif len(args) == 2: 510 | mu, Sig = args 511 | nu_0, d = self.nu_0, self.d 512 | # Eq (249) 513 | Z = (2.0**(nu_0*d/2.0) * gammad(d, nu_0/2.0) * 514 | (2.0*np.pi/self.kappa_0)**(d/2.0) / np.linalg.det(self.Lam_0)**(nu_0/2.0)) 515 | detSig = np.linalg.det(Sig) 516 | invSig = np.linalg.inv(Sig) 517 | einsum = np.einsum("...i,...ij,...j", mu-self.mu_0, invSig, mu-self.mu_0) 518 | # Eq (248) 519 | return 1./Z * detSig**(-((nu_0+d)/2.0+1.0)) * np.exp( 520 | -0.5*np.trace(np.einsum("...ij,...jk->...ik", self.Lam_0, invSig), axis1=-2, axis2=-1) - 521 | self.kappa_0/2.0*einsum) 522 | # return 1./Z * detSig**(-((nu_0+d)/2.0+1.0)) * np.exp( 523 | # -0.5*np.trace(np.dot(self.Lam_0, invSig)) - 524 | # self.kappa_0/2.0*einsum) 525 | 526 | def _post_params(self, D): 527 | """Recall D is [NOBS, NDIM].""" 528 | shape = D.shape 529 | if len(shape) == 2: 530 | n = shape[0] 531 | Dbar = np.mean(D, axis=0) 532 | elif len(shape) == 1: 533 | n = 1 534 | Dbar = np.mean(D) 535 | # Eq (252) 536 | kappa_n = self.kappa_0 + n 537 | # Eq (253) 538 | nu_n = self.nu_0 + n 539 | # Eq (251) (note typo in original, mu+0 -> mu_0) 540 | mu_n = (self.kappa_0 * self.mu_0 + n * Dbar) / kappa_n 541 | # Eq (254) 542 | x = (Dbar-self.mu_0)[:, np.newaxis] 543 | Lam_n = (self.Lam_0 + 544 | self._S(D) + 545 | self.kappa_0*n/kappa_n*np.dot(x, x.T)) 546 | return mu_n, kappa_n, Lam_n, nu_n 547 | 548 | def pred(self, x): 549 | """Prior predictive. Pr(x)""" 550 | return multivariate_t_density(self.nu_0-self.d+1, self.mu_0, 551 | self.Lam_0*(self.kappa_0+1)/(self.kappa_0 - self.d + 1), x) 552 | 553 | def evidence(self, D): 554 | """Return Pr(D) = \int Pr(D | theta) Pr(theta)""" 555 | shape = D.shape 556 | if len(shape) == 2: 557 | n, d = shape 558 | elif len(shape) == 1: 559 | n, d = 1, shape[0] 560 | assert d == self.d 561 | # Eq (266) 562 | mu_n, kappa_n, Lam_n, nu_n = self._post_params(D) 563 | detLam0 = np.linalg.det(self.Lam_0) 564 | detLamn = np.linalg.det(Lam_n) 565 | num = gammad(d, nu_n/2.0) * detLam0**(self.nu_0/2.0) 566 | den = np.pi**(n*d/2.0) * gammad(d, self.nu_0/2.0) * detLamn**(nu_n/2.0) 567 | return num/den * (self.kappa_0/kappa_n)**(d/2.0) 568 | -------------------------------------------------------------------------------- /tests/test_prior.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | import numpy as np 3 | from scipy.integrate import quad, dblquad, tplquad 4 | 5 | import dpmm 6 | from test_utils import timer 7 | 8 | 9 | @timer 10 | def test_GaussianMeanKnownVariance(): 11 | mu_0 = 0.15 12 | sigsqr_0 = 1.2 13 | sigsqr = 0.15 14 | model = dpmm.GaussianMeanKnownVariance(mu_0, sigsqr_0, sigsqr) 15 | 16 | D = np.r_[1.0, 2.2, 1.1, -1.13] 17 | mus = np.r_[1.1, 2.0, 0.1] 18 | 19 | # Check prior density 20 | r = quad(model, -np.inf, np.inf) 21 | np.testing.assert_almost_equal(r[0], 1.0, 10, 22 | "GaussianMeanKnownVariance prior density does not integrate to 1.0") 23 | 24 | # Check prior predictive density 25 | r = quad(model.pred, -np.inf, np.inf) 26 | np.testing.assert_almost_equal( 27 | r[0], 1.0, 10, 28 | "GaussianMeanKnownVariance prior predictive density does not integrate to 1.0") 29 | 30 | # Check posterior density 31 | r = quad(model.post(D), -np.inf, np.inf) 32 | np.testing.assert_almost_equal( 33 | r[0], 1.0, 10, 34 | "GaussianMeanKnownVariance posterior density does not integrate to 1.0") 35 | 36 | # Check posterior predictive density 37 | r = quad(model.post(D).pred, -np.inf, np.inf) 38 | np.testing.assert_almost_equal( 39 | r[0], 1.0, 10, 40 | "GaussianMeanKnownVariance posterior predictive density does not integrate to 1.0") 41 | 42 | # Check that the likelihood integrates to 1. 43 | r = quad(lambda x: model.like1(x, mu=1.1), -np.inf, np.inf) 44 | np.testing.assert_almost_equal(r[0], 1.0, 10, 45 | "GaussianMeanKnownVariance likelihood does not integrate to 1.0") 46 | 47 | # # Check that evidence (of single data point) integrates to 1. 48 | # r = quad(lambda x: model.evidence(x), -np.inf, np.inf) 49 | # np.testing.assert_almost_equal(r[0], 1.0, 10, 50 | # "GaussianMeanKnownVariance evidence does not integrate to 1.0") 51 | 52 | # # Check evidence for two data points. 53 | # r = dblquad(lambda x, y: model.evidence([x, y]), 54 | # -np.inf, np.inf, 55 | # lambda x: -np.inf, lambda x: np.inf) 56 | # np.testing.assert_almost_equal(r[0], 1.0, 5, 57 | # "GaussianMeanKnownVariance evidence does not integrate to 1.0") 58 | 59 | # # Check that posterior = prior * likelihood / evidence 60 | # post = model.post(D) 61 | # post1 = [model(mu)*model.likelihood(mu, D=D) / model.evidence(D) for mu in mus] 62 | # post2 = [post(mu) for mu in mus] 63 | # np.testing.assert_array_almost_equal( 64 | # post1, post2, 10, 65 | # "GaussianMeanKnownVariance posterior != prior * likelihood / evidence") 66 | 67 | # Check that posterior is proportional to prior * likelihood 68 | # Add some more data points 69 | posts = [model.post(D)(mu) for mu in mus] 70 | posts2 = [model(mu)*model.likelihood(D, mu) for mu in mus] 71 | 72 | np.testing.assert_array_almost_equal( 73 | posts/posts[0], posts2/posts2[0], 5, 74 | "GaussianMeanKnownVariance posterior not proportional to prior * likelihood.") 75 | 76 | # Check that integrating out theta yields the prior predictive. 77 | xs = [0.1, 0.2, 0.3, 0.4] 78 | preds1 = np.array([quad(lambda theta: model(theta) * model.like1(x, theta), -np.inf, np.inf)[0] for x in xs]) 79 | preds2 = np.array([model.pred(x) for x in xs]) 80 | 81 | np.testing.assert_array_almost_equal( 82 | preds1/preds1[0], preds2/preds2[0], 5, 83 | "Prior predictive not proportional to integral of likelihood * prior") 84 | 85 | @timer 86 | def test_InvGamma(): 87 | alpha = 1.1 88 | beta = 1.2 89 | mu = 0.1 90 | ig = dpmm.InvGamma(alpha, beta, mu) 91 | ig.sample() 92 | 93 | # Check prior density 94 | r = quad(ig, 0.0, np.inf) 95 | np.testing.assert_almost_equal(r[0], 1.0, 5, "InvGamma prior density does not integrate to 1.0") 96 | 97 | # Check prior predictive density 98 | r = quad(ig.pred, -np.inf, np.inf) 99 | np.testing.assert_almost_equal(r[0], 1.0, 10, 100 | "InvGamma prior predictive density does not integrate to 1.0") 101 | 102 | # Check posterior density 103 | D = [1.0, 2.0, 3.0] 104 | r = quad(ig.post(D), 0.0, np.inf) 105 | np.testing.assert_almost_equal(r[0], 1.0, 7, 106 | "InvGamma posterior density does not integrate to 1.0") 107 | 108 | # Check posterior predictive density 109 | r = quad(ig.post(D).pred, -np.inf, np.inf) 110 | np.testing.assert_almost_equal( 111 | r[0], 1.0, 10, "InvGamma posterior predictive density does not integrate to 1.0") 112 | 113 | # Check that the likelihood integrates to 1. 114 | r = quad(lambda x: ig.like1(x, var=2.1), -np.inf, np.inf) 115 | np.testing.assert_almost_equal(r[0], 1.0, 10, 116 | "InvGamma likelihood does not integrate to 1.0") 117 | 118 | # Check that posterior is proportional to prior * likelihood 119 | # Add some more data points 120 | D = np.array([1.0, 2.0, 3.0, 2.2, 2.3, 1.2]) 121 | vars_ = [0.7, 1.1, 1.2, 1.5] 122 | posts = [ig.post(D)(var) for var in vars_] 123 | posts2 = [ig(var)*ig.likelihood(D, var) for var in vars_] 124 | 125 | np.testing.assert_array_almost_equal( 126 | posts/posts[0], posts2/posts2[0], 5, 127 | "InvGamma posterior not proportional to prior * likelihood.") 128 | 129 | # Check mean and variance 130 | mean = 1./beta/(alpha-1.0) 131 | np.testing.assert_almost_equal(quad(lambda x: ig(x)*x, 0.0, np.inf)[0], mean, 10, 132 | "InvGamma has wrong mean.") 133 | var = beta**(-2)/(alpha-1)**2/(alpha-2) 134 | with warnings.catch_warnings(): 135 | warnings.simplefilter('ignore') 136 | np.testing.assert_almost_equal(quad(lambda x: ig(x)*(x-mean)**2, 0.0, np.inf)[0], var, 5, 137 | "InvGamma has wrong variance.") 138 | 139 | # Check that integrating out theta yields the prior predictive. 140 | xs = [0.1, 0.2, 0.3, 0.4] 141 | preds1 = np.array([quad(lambda theta: ig(theta) * ig.like1(x, theta), 0, np.inf)[0] for x in xs]) 142 | preds2 = np.array([ig.pred(x) for x in xs]) 143 | 144 | np.testing.assert_array_almost_equal( 145 | preds1/preds1[0], preds2/preds2[0], 5, 146 | "Prior predictive not proportional to integral of likelihood * prior") 147 | 148 | 149 | @timer 150 | def test_InvGamma2D(full=False): 151 | alpha = 1.1 152 | beta = 1.2 153 | mu = np.r_[0.1, 0.2] 154 | ig2d = dpmm.InvGamma2D(alpha, beta, mu) 155 | ig2d.sample() 156 | 157 | # Check prior density 158 | r = quad(ig2d, 0.0, np.inf) 159 | np.testing.assert_almost_equal( 160 | r[0], 1.0, 5, "InvGamma2D prior density does not integrate to 1.0") 161 | 162 | if __name__ == '__main__' and full: 163 | # Check prior predictive density 164 | r = dblquad(lambda x, y: ig2d.pred(np.r_[x, y]), 165 | -np.inf, np.inf, 166 | lambda x: -np.inf, lambda x: np.inf) 167 | np.testing.assert_almost_equal( 168 | r[0], 1.0, 5, "InvGamma2D prior predictive density does not integrate to 1.0") 169 | 170 | # Check posterior density 171 | D = np.array([[0.1, 0.2], [0.2, 0.3]]) 172 | r = quad(ig2d.post(D), 0.0, np.inf) 173 | np.testing.assert_almost_equal(r[0], 1.0, 7, 174 | "InvGamma2D posterior density does not integrate to 1.0") 175 | 176 | # Check posterior predictive density 177 | r = dblquad(lambda x, y: ig2d.post(D).pred(np.r_[x, y]), 178 | -np.inf, np.inf, 179 | lambda x: -np.inf, lambda x: np.inf) 180 | np.testing.assert_almost_equal( 181 | r[0], 1.0, 5, "InvGamma2D posterior predictive density does not integrate to 1.0") 182 | 183 | # Check that the likelihood integrates to 1. 184 | r = dblquad(lambda x, y: ig2d.like1(np.r_[x, y], var=2.1), 185 | -np.inf, np.inf, 186 | lambda x: -np.inf, lambda x: np.inf) 187 | np.testing.assert_almost_equal(r[0], 1.0, 10, 188 | "InvGamma2D likelihood does not integrate to 1.0") 189 | 190 | # Check that posterior is proportional to prior * likelihood 191 | vars_ = [0.7, 1.1, 1.2, 1.5] 192 | posts = np.array([ig2d.post(D)(var) for var in vars_]) 193 | posts2 = np.array([ig2d(var)*ig2d.likelihood(D, var) for var in vars_]) 194 | 195 | np.testing.assert_array_almost_equal( 196 | posts/posts[0], posts2/posts2[0], 5, 197 | "InvGamma2D posterior not proportional to prior * likelihood.") 198 | 199 | # Check mean and variance 200 | mean = 1./beta/(alpha-1.0) 201 | np.testing.assert_almost_equal(quad(lambda x: ig2d(x)*x, 0.0, np.inf)[0], mean, 10, 202 | "InvGamma2D has wrong mean.") 203 | var = beta**(-2)/(alpha-1)**2/(alpha-2) 204 | with warnings.catch_warnings(): 205 | warnings.simplefilter('ignore') 206 | np.testing.assert_almost_equal(quad(lambda x: ig2d(x)*(x-mean)**2, 0.0, np.inf)[0], var, 5, 207 | "InvGamma2D has wrong variance.") 208 | 209 | # Check that integrating out theta yields the prior predictive. 210 | xs = [np.r_[0.1, 0.2], np.r_[0.2, 0.3], np.r_[0.1, 0.3]] 211 | preds1 = np.array([quad(lambda theta: ig2d(theta) * ig2d.like1(x, theta), 0, np.inf)[0] for x in xs]) 212 | preds2 = np.array([ig2d.pred(x) for x in xs]) 213 | 214 | np.testing.assert_array_almost_equal( 215 | preds1/preds1[0], preds2/preds2[0], 5, 216 | "Prior predictive not proportional to integral of likelihood * prior") 217 | 218 | 219 | @timer 220 | def test_NormInvChi2(): 221 | mu_0 = -0.1 222 | sigsqr_0 = 1.1 223 | kappa_0 = 2 224 | nu_0 = 3 225 | 226 | nix = dpmm.NormInvChi2(mu_0, kappa_0, sigsqr_0, nu_0) 227 | 228 | D = np.r_[1.0, 2.0, 3.0] 229 | mus = np.r_[1.1, 1.2, 1.3] 230 | vars_ = np.r_[1.2, 3.2, 2.3] 231 | 232 | # Check prior density 233 | with warnings.catch_warnings(): 234 | warnings.simplefilter('ignore') 235 | r = dblquad(nix, 0.0, np.inf, lambda x: -np.inf, lambda x: np.inf) 236 | np.testing.assert_almost_equal(r[0], 1.0, 5, 237 | "NormInvChi2 prior density does not integrate to 1.0") 238 | 239 | # Check prior predictive density 240 | r = quad(nix.pred, -np.inf, np.inf) 241 | np.testing.assert_almost_equal(r[0], 1.0, 10, 242 | "NormInvChi2 prior predictive density does not integrate to 1.0") 243 | 244 | # Check posterior density 245 | r = dblquad(nix.post(D), 0.0, np.inf, lambda x: -np.inf, lambda x: np.inf) 246 | np.testing.assert_almost_equal(r[0], 1.0, 7, 247 | "NormInvChi2 posterior density does not integrate to 1.0") 248 | 249 | # Check posterior predictive density 250 | r = quad(nix.post(D).pred, -np.inf, np.inf) 251 | np.testing.assert_almost_equal( 252 | r[0], 1.0, 10, 253 | "NormInvChi2 posterior predictive density does not integrate to 1.0") 254 | 255 | # Check that the likelihood integrates to 1. 256 | r = quad(lambda x: nix.like1(x, 1.1, 2.1), -np.inf, np.inf) 257 | np.testing.assert_almost_equal(r[0], 1.0, 10, 258 | "NormInvChi2 likelihood does not integrate to 1.0") 259 | 260 | # Check that evidence (of single data point) integrates to 1. 261 | r = quad(lambda x: nix.evidence(x), -np.inf, np.inf) 262 | np.testing.assert_almost_equal(r[0], 1.0, 10, 263 | "NormInvChi2 evidence does not integrate to 1.0") 264 | # Check evidence for two data points. 265 | r = dblquad(lambda x, y: nix.evidence([x, y]), 266 | -np.inf, np.inf, 267 | lambda x: -np.inf, lambda x: np.inf) 268 | np.testing.assert_almost_equal(r[0], 1.0, 5, 269 | "NormInvChi2 evidence does not integrate to 1.0") 270 | 271 | # Check that posterior = prior * likelihood / evidence 272 | post = nix.post(D) 273 | post1 = [nix(mu, var)*nix.likelihood(D, mu, var) / nix.evidence(D) 274 | for mu, var in zip(mus, vars_)] 275 | post2 = [post(mu, var) for mu, var in zip(mus, vars_)] 276 | np.testing.assert_array_almost_equal(post1, post2, 10, 277 | "NormInvChi2 posterior != prior * likelihood / evidence") 278 | 279 | # Test that marginal variance probability method matches integrated result. 280 | Pr_var1 = [nix.marginal_var(var) for var in vars_] 281 | Pr_var2 = [quad(lambda mu: nix(mu, var), -np.inf, np.inf)[0] for var in vars_] 282 | np.testing.assert_array_almost_equal( 283 | Pr_var1, Pr_var2, 10, 284 | "Pr(var) method calculation does not match integrated result.") 285 | 286 | # Test that marginal mean probability method matches integrated result. 287 | Pr_mu1 = [nix.marginal_mu(mu) for mu in mus] 288 | Pr_mu2 = [quad(lambda var: nix(mu, var), 0.0, np.inf)[0] for mu in mus] 289 | np.testing.assert_array_almost_equal( 290 | Pr_mu1, Pr_mu2, 10, 291 | "Pr(mu) method calculation does not match integrated result.") 292 | 293 | # Check that integrating out theta yields the prior predictive. 294 | xs = [0.1, 0.2, 0.3, 0.4] 295 | preds1 = np.array([dblquad(lambda mu, var: nix(mu, var) * nix.like1(x, mu, var), 296 | 0, np.inf, 297 | lambda var: -np.inf, lambda var: np.inf)[0] 298 | for x in xs]) 299 | preds2 = np.array([nix.pred(x) for x in xs]) 300 | 301 | np.testing.assert_array_almost_equal( 302 | preds1/preds1[0], preds2/preds2[0], 5, 303 | "Prior predictive not proportional to integral of likelihood * prior") 304 | 305 | @timer 306 | def test_NormInvGamma(): 307 | m_0 = -0.1 308 | V_0 = 1.1 309 | a_0 = 2.0 310 | b_0 = 3.0 311 | 312 | nig = dpmm.NormInvGamma(m_0, V_0, a_0, b_0) 313 | 314 | D = np.r_[1.0, 2.0, 3.0] 315 | mus = np.r_[1.1, 1.2, 1.3] 316 | vars_ = np.r_[1.2, 3.2, 2.3] 317 | 318 | # Check prior density 319 | with warnings.catch_warnings(): 320 | warnings.simplefilter('ignore') 321 | r = dblquad(nig, 0.0, np.inf, lambda x: -np.inf, lambda x: np.inf) 322 | np.testing.assert_almost_equal(r[0], 1.0, 5, 323 | "NormInvGamma prior density does not integrate to 1.0") 324 | 325 | # Check prior predictive density 326 | r = quad(nig.pred, -np.inf, np.inf) 327 | np.testing.assert_almost_equal( 328 | r[0], 1.0, 10, 329 | "NormInvGamma prior predictive density does not integrate to 1.0") 330 | 331 | # Check posterior density 332 | r = dblquad(nig.post(D), 0.0, np.inf, lambda x: -np.inf, lambda x: np.inf) 333 | np.testing.assert_almost_equal(r[0], 1.0, 7, 334 | "NormInvGamma posterior density does not integrate to 1.0") 335 | 336 | # Check posterior predictive density 337 | r = quad(nig.post(D).pred, -np.inf, np.inf) 338 | np.testing.assert_almost_equal( 339 | r[0], 1.0, 10, 340 | "NormInvGamma posterior predictive density does not integrate to 1.0") 341 | 342 | # Check that the likelihood integrates to 1. 343 | r = quad(lambda x: nig.like1(x, 1.1, 2.1), -np.inf, np.inf) 344 | np.testing.assert_almost_equal(r[0], 1.0, 10, 345 | "NormInvGamma likelihood does not integrate to 1.0") 346 | 347 | # Check that evidence (of single data point) integrates to 1. 348 | r = quad(lambda x: nig.evidence(x), -np.inf, np.inf) 349 | np.testing.assert_almost_equal(r[0], 1.0, 10, 350 | "NormInvGamma evidence does not integrate to 1.0") 351 | # Check evidence for two data points. 352 | r = dblquad(lambda x, y: nig.evidence([x, y]), 353 | -np.inf, np.inf, 354 | lambda x: -np.inf, lambda x: np.inf) 355 | np.testing.assert_almost_equal(r[0], 1.0, 5, 356 | "NormInvGamma evidence does not integrate to 1.0") 357 | 358 | # Check that posterior = prior * likelihood / evidence 359 | post = nig.post(D) 360 | post1 = [nig(mu, var)*nig.likelihood(D, mu, var) / nig.evidence(D) 361 | for mu, var in zip(mus, vars_)] 362 | post2 = [post(mu, var) for mu, var in zip(mus, vars_)] 363 | np.testing.assert_array_almost_equal(post1, post2, 10, 364 | "NormInvGamma posterior != prior * likelihood / evidence") 365 | 366 | # Test that marginal variance probability method matches integrated result. 367 | Pr_var1 = [nig.marginal_var(var) for var in vars_] 368 | Pr_var2 = [quad(lambda mu: nig(mu, var), -np.inf, np.inf)[0] for var in vars_] 369 | np.testing.assert_array_almost_equal( 370 | Pr_var1, Pr_var2, 10, 371 | "Pr(var) method calculation does not match integrated result.") 372 | 373 | # Test that marginal mean probability method matches integrated result. 374 | Pr_mu1 = [nig.marginal_mu(mu) for mu in mus] 375 | Pr_mu2 = [quad(lambda var: nig(mu, var), 0.0, np.inf)[0] for mu in mus] 376 | np.testing.assert_array_almost_equal( 377 | Pr_mu1, Pr_mu2, 10, 378 | "Pr(mu) method calculation does not match integrated result.") 379 | 380 | # Check that integrating out theta yields the prior predictive. 381 | xs = [0.1, 0.2, 0.3, 0.4] 382 | preds1 = np.array([dblquad(lambda mu, var: nig(mu, var) * nig.like1(x, mu, var), 383 | 0, np.inf, 384 | lambda var: -np.inf, lambda var: np.inf)[0] 385 | for x in xs]) 386 | preds2 = np.array([nig.pred(x) for x in xs]) 387 | 388 | np.testing.assert_array_almost_equal( 389 | preds1/preds1[0], preds2/preds2[0], 5, 390 | "Prior predictive not proportional to integral of likelihood * prior") 391 | 392 | @timer 393 | def test_NormInvChi2_eq_NormInvGamma(): 394 | mu_0 = 0.1 395 | sigsqr_0 = 1.1 396 | kappa_0 = 2 397 | nu_0 = 3 398 | 399 | m_0 = mu_0 400 | V_0 = 1./kappa_0 401 | a_0 = nu_0/2.0 402 | b_0 = nu_0*sigsqr_0/2.0 403 | 404 | model1 = dpmm.NormInvChi2(mu_0, kappa_0, sigsqr_0, nu_0) 405 | model2 = dpmm.NormInvGamma(m_0, V_0, a_0, b_0) 406 | 407 | mus = np.linspace(-2.2, 2.2, 5) 408 | vars_ = np.linspace(1.0, 4.0, 5) 409 | xs = np.arange(-1.1, 1.1, 5) 410 | 411 | for x in xs: 412 | np.testing.assert_equal( 413 | model1.pred(x), model2.pred(x), 414 | "NormInvChi2 and NormInvGamma prior predictive densities don't agree at x = ".format(x)) 415 | np.testing.assert_equal( 416 | model1.post(x).pred(x), model2.post(x).pred(x), 417 | "NormInvChi2 and NormInvGamma posterior " + 418 | "predictive densities don't agree at x = {}".format(x)) 419 | 420 | for mu, var in zip(mus, vars_): 421 | np.testing.assert_almost_equal( 422 | model1(mu, var), model2(mu, var), 10, 423 | "NormInvChi2 and NormInvGamma prior densities " + 424 | "don't agree at mu, var = {}, {}".format(mu, var)) 425 | 426 | post1 = model1.post(xs) 427 | post2 = model2.post(xs) 428 | for mu, var in zip(mus, vars_): 429 | np.testing.assert_almost_equal( 430 | post1(mu, var), post2(mu, var), 10, 431 | "NormInvChi2 and NormInvGamma posterior densities " + 432 | "don't agree at mu, var = {}, {}".format(mu, var)) 433 | 434 | for mu, var, x in zip(mus, vars_, xs): 435 | np.testing.assert_almost_equal( 436 | model1.like1(x, mu, var), model2.like1(x, mu, var), 10, 437 | "NormInvChi2 and NormInvGamma likelihoods don't " + 438 | "agree at mu, var, x = {}, {}, {}".format(mu, var, x)) 439 | 440 | np.testing.assert_almost_equal( 441 | model1.evidence(xs), model2.evidence(xs), 10, 442 | "NormInvChi2 and NormInvGamma evidences don't agree") 443 | 444 | 445 | @timer 446 | def test_NormInvWish(full=False): 447 | mu_0 = np.r_[0.2, 0.1] 448 | kappa_0 = 2.0 449 | Lam_0 = np.eye(2)+0.1 450 | nu_0 = 3 451 | 452 | # Create a Normal-Inverse-Wishart prior. 453 | niw = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 454 | 455 | # Check that we can draw samples from NormInvWish. 456 | niw.sample() 457 | niw.sample(size=10) 458 | 459 | # Check that we can evaluate a likelihood given data. 460 | theta = np.zeros(1, dtype=niw.model_dtype) 461 | theta['mu'] = np.r_[1.0, 1.0] 462 | theta['Sig'] = np.eye(2)+0.12 463 | D = np.array([[0.1, 0.2], [0.2, 0.3], [0.1, 0.2], [0.4, 0.3]]) 464 | niw.likelihood(D, theta) 465 | 466 | # Evaluate prior 467 | niw(theta) 468 | 469 | if __name__ == "__main__" and full: 470 | # Check prior predictive density 471 | with warnings.catch_warnings(): 472 | warnings.simplefilter('ignore') 473 | r = dblquad(lambda x, y: niw.pred(np.r_[x, y]), -np.inf, np.inf, 474 | lambda x: -np.inf, lambda x: np.inf) 475 | np.testing.assert_almost_equal(r[0], 1.0, 5, 476 | "NormInvWish prior predictive density does not integrate to 1.0") 477 | 478 | # Check posterior predictive density 479 | r = dblquad(lambda x, y: niw.post(D).pred(np.r_[x, y]), -np.inf, np.inf, 480 | lambda x: -np.inf, lambda x: np.inf) 481 | np.testing.assert_almost_equal( 482 | r[0], 1.0, 5, "NormInvWish posterior predictive density does not integrate to 1.0") 483 | 484 | # Check that the likelihood of a single point in 2 dimensions integrates to 1. 485 | r = dblquad(lambda x, y: niw.like1(np.r_[x, y], np.r_[1.2, 1.1], np.eye(2)+0.12), 486 | -np.inf, np.inf, lambda x: -np.inf, lambda x: np.inf) 487 | np.testing.assert_almost_equal(r[0], 1.0, 10, 488 | "NormInvWish likelihood does not integrate to 1.0") 489 | 490 | if __name__ == "__main__" and full: 491 | # Check that likelihood of a single point in 3 dimensions integrates to 1. 492 | niw3 = dpmm.NormInvWish(np.r_[1, 1, 1], 2.0, np.eye(3), 3) 493 | r = tplquad(lambda x, y, z: niw3.like1(np.r_[x, y, z], np.r_[0.1, 0.2, 0.3], np.eye(3)+0.1), 494 | -np.inf, np.inf, 495 | lambda x: -np.inf, lambda x: np.inf, 496 | lambda x, y: -np.inf, lambda x, y: np.inf) 497 | np.testing.assert_almost_equal(r[0], 1.0, 8, 498 | "NormInvWish likelihood does not integrate to 1.0") 499 | 500 | # Check that posterior is proportional to prior * likelihood 501 | D = np.array([[0.1, 0.2], [0.2, 0.3], [0.1, 0.2], [0.4, 0.3]]) 502 | mus = [np.r_[2.1, 1.1], np.r_[0.9, 1.2], np.r_[0.9, 1.1]] 503 | Sigs = [np.eye(2)*1.5, np.eye(2)*0.7, np.array([[1.1, -0.1], [-0.1, 1.2]])] 504 | posts = [niw.post(D)(mu, Sig) for mu, Sig in zip(mus, Sigs)] 505 | posts2 = [niw(mu, Sig)*niw.likelihood(D, mu, Sig) for mu, Sig, in zip(mus, Sigs)] 506 | 507 | np.testing.assert_array_almost_equal( 508 | posts/posts[0], posts2/posts2[0], 5, 509 | "NormInvWish posterior not proportional to prior * likelihood.") 510 | 511 | # Check that posterior = prior * likelihood / evidence 512 | mus = [np.r_[1.1, 1.1], np.r_[1.1, 1.2], np.r_[0.7, 1.3]] 513 | Sigs = [np.eye(2)*0.2, np.eye(2)*0.1, np.array([[2.1, -0.1], [-0.1, 2.2]])] 514 | post = niw.post(D) 515 | post1 = [niw(mu, Sig) * niw.likelihood(D, mu, Sig) / niw.evidence(D) 516 | for mu, Sig in zip(mus, Sigs)] 517 | post2 = [post(mu, Sig) for mu, Sig in zip(mus, Sigs)] 518 | np.testing.assert_array_almost_equal(post1, post2, 10, 519 | "NormInvWish posterior != prior * likelihood / evidence") 520 | 521 | # Would like to check that pred(x) == int prior(theta) * like1(x, theta) d(theta), but I don't 522 | # know how to integrate over all covariance matrices. Plus, integrating over a 2D covariance 523 | # matrix plus a 2D mean is a 5 dimensional integral, which sounds nasty to do. 524 | 525 | 526 | if __name__ == "__main__": 527 | from argparse import ArgumentParser 528 | parser = ArgumentParser() 529 | parser.add_argument('--full', action='store_true', help="Run full test suite (slow).") 530 | args = parser.parse_args() 531 | 532 | test_GaussianMeanKnownVariance() 533 | test_InvGamma() 534 | test_InvGamma2D() 535 | test_NormInvChi2() 536 | test_NormInvGamma() 537 | test_NormInvChi2_eq_NormInvGamma() 538 | test_NormInvWish(args.full) 539 | -------------------------------------------------------------------------------- /tests/test_broadcast.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import dpmm 3 | from test_utils import timer 4 | 5 | @timer 6 | def test_GaussianMeanKnownVariance(): 7 | """Test broadcasting rules for GaussianMeanKnownVariance prior.""" 8 | 9 | # Test sample() method: 10 | prior = dpmm.GaussianMeanKnownVariance(1.0, 1.0, 1.0) 11 | arr = prior.sample() 12 | assert isinstance(arr, float) 13 | 14 | arr = prior.sample(size=1) 15 | assert isinstance(arr, np.ndarray) 16 | assert arr.shape == (1,) 17 | assert arr.dtype == float 18 | 19 | arr = prior.sample(size=(1,)) 20 | assert isinstance(arr, np.ndarray) 21 | assert arr.shape == (1,) 22 | assert arr.dtype == float 23 | 24 | arr = prior.sample(size=10) 25 | assert isinstance(arr, np.ndarray) 26 | assert arr.shape == (10,) 27 | assert arr.dtype == float 28 | 29 | arr = prior.sample(size=(10, 20)) 30 | assert isinstance(arr, np.ndarray) 31 | assert arr.shape == (10, 20) 32 | assert arr.dtype == float 33 | 34 | # Test like1() method: 35 | prior = dpmm.GaussianMeanKnownVariance(1.0, 1.0, 1.0) 36 | x = 1.0 37 | mu = 1.0 38 | arr = prior.like1(x, mu) 39 | assert isinstance(arr, float) 40 | 41 | x = np.array([1.0]) 42 | arr = prior.like1(x, mu) 43 | assert isinstance(arr, np.ndarray) 44 | assert arr.shape == (1,) 45 | assert arr.dtype == float 46 | assert arr[0] == prior.like1(x[0], mu) 47 | 48 | x = np.array([1.0, 2.0]) 49 | arr = prior.like1(x, mu) 50 | assert isinstance(arr, np.ndarray) 51 | assert arr.shape == (2,) 52 | assert arr.dtype == float 53 | for i, r in np.ndenumerate(arr): 54 | assert r == prior.like1(x[i], mu) 55 | 56 | x = np.array([[1.0, 2.0], [3.0, 4.0]]) 57 | arr = prior.like1(x, mu) 58 | assert isinstance(arr, np.ndarray) 59 | assert arr.shape == (2, 2) 60 | assert arr.dtype == float 61 | for (i, j), r in np.ndenumerate(arr): 62 | assert r == prior.like1(x[i, j], mu) 63 | 64 | x = np.array([1.0, 2.0]) 65 | mu = np.array([2.0, 3.0]) 66 | arr = prior.like1(x, mu) 67 | assert isinstance(arr, np.ndarray) 68 | assert arr.shape == (2,) 69 | assert arr.dtype == float 70 | for i, r in np.ndenumerate(arr): 71 | assert r == prior.like1(x[i], mu[i]) 72 | 73 | x = np.array([1.0, 2.0]) 74 | mu = np.array([1.0, 2.0, 3.0]) 75 | arr = prior.like1(x[:, np.newaxis], mu) 76 | assert isinstance(arr, np.ndarray) 77 | assert arr.shape == (2, 3) 78 | assert arr.dtype == float 79 | for (i, j), r in np.ndenumerate(arr): 80 | assert r == prior.like1(x[i], mu[j]) 81 | arr = prior.like1(x, mu[:, np.newaxis]) 82 | assert isinstance(arr, np.ndarray) 83 | assert arr.shape == (3, 2) 84 | assert arr.dtype == float 85 | for (i, j), r in np.ndenumerate(arr): 86 | assert r == prior.like1(x[j], mu[i]) 87 | 88 | x = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) 89 | mu = np.array([10.0, 11.0, 12.0, 13.0]) 90 | arr = prior.like1(x[:, :, np.newaxis], mu) 91 | assert isinstance(arr, np.ndarray) 92 | assert arr.shape == (2, 3, 4) 93 | assert arr.dtype == float 94 | for (i, j, k), r in np.ndenumerate(arr): 95 | assert r == prior.like1(x[i, j], mu[k]) 96 | arr = prior.like1(x, mu[:, np.newaxis, np.newaxis]) 97 | assert isinstance(arr, np.ndarray) 98 | assert arr.shape == (4, 2, 3) 99 | assert arr.dtype == float 100 | for (i, j, k), r in np.ndenumerate(arr): 101 | assert r == prior.like1(x[j, k], mu[i]) 102 | 103 | # Test __call__() method: 104 | prior = dpmm.GaussianMeanKnownVariance(1.0, 1.0, 1.0) 105 | mu = 1.0 106 | arr = prior(mu) 107 | assert isinstance(arr, float) 108 | 109 | mu = np.array([1.0]) 110 | arr = prior(mu) 111 | assert isinstance(arr, np.ndarray) 112 | assert arr.shape == (1,) 113 | assert arr.dtype == float 114 | assert arr[0] == prior(mu[0]) 115 | 116 | mu = np.array([1.0, 2.0]) 117 | arr = prior(mu) 118 | assert isinstance(arr, np.ndarray) 119 | assert arr.shape == (2,) 120 | assert arr.dtype == float 121 | for i, r in np.ndenumerate(arr): 122 | assert r == prior(mu[i]) 123 | 124 | mu = np.array([[1.0, 2.0], [3.0, 4.0]]) 125 | arr = prior(mu) 126 | assert isinstance(arr, np.ndarray) 127 | assert arr.shape == (2, 2) 128 | assert arr.dtype == float 129 | for (i, j), r in np.ndenumerate(arr): 130 | assert r == prior(mu[i, j]) 131 | 132 | # Should _post_params method do any broadcasting? 133 | 134 | # Test pred method(): 135 | prior = dpmm.InvGamma(1.0, 1.0, 0.0) 136 | x = 1.0 137 | arr = prior.pred(x) 138 | assert isinstance(arr, float) 139 | 140 | x = np.array([1.0]) 141 | arr = prior.pred(x) 142 | assert isinstance(arr, np.ndarray) 143 | assert arr.shape == (1,) 144 | assert arr.dtype == float 145 | assert arr[0] == prior.pred(x[0]) 146 | 147 | x = np.array([1.0, 2.0]) 148 | arr = prior.pred(x) 149 | assert isinstance(arr, np.ndarray) 150 | assert arr.shape == (2,) 151 | assert arr.dtype == float 152 | for i, r in np.ndenumerate(arr): 153 | assert r == prior.pred(x[i]) 154 | 155 | x = np.arange(6.0).reshape(3, 2)+1 156 | arr = prior.pred(x) 157 | assert isinstance(arr, np.ndarray) 158 | assert arr.shape == (3, 2) 159 | assert arr.dtype == float 160 | for (i, j), r in np.ndenumerate(arr): 161 | assert r == prior.pred(x[i, j]) 162 | 163 | 164 | @timer 165 | def test_InvGamma(): 166 | """Test broadcasting rules for InvGamma prior.""" 167 | 168 | # Test sample() method: 169 | prior = dpmm.InvGamma(1.0, 1.0, 0.0) 170 | arr = prior.sample() 171 | assert isinstance(arr, float) 172 | 173 | arr = prior.sample(size=1) 174 | assert isinstance(arr, np.ndarray) 175 | assert arr.shape == (1,) 176 | assert arr.dtype == float 177 | 178 | arr = prior.sample(size=(1,)) 179 | assert isinstance(arr, np.ndarray) 180 | assert arr.shape == (1,) 181 | assert arr.dtype == float 182 | 183 | arr = prior.sample(size=10) 184 | assert isinstance(arr, np.ndarray) 185 | assert arr.shape == (10,) 186 | assert arr.dtype == float 187 | 188 | arr = prior.sample(size=(10, 20)) 189 | assert isinstance(arr, np.ndarray) 190 | assert arr.shape == (10, 20) 191 | assert arr.dtype == float 192 | 193 | # Test like1() method: 194 | prior = dpmm.InvGamma(1.0, 1.0, 0.0) 195 | x = 1.0 196 | var = 1.0 197 | arr = prior.like1(x, var) 198 | assert isinstance(arr, float) 199 | 200 | x = np.array([1.0]) 201 | arr = prior.like1(x, var) 202 | assert isinstance(arr, np.ndarray) 203 | assert arr.shape == (1,) 204 | assert arr.dtype == float 205 | assert arr[0] == prior.like1(x[0], var) 206 | 207 | x = np.array([1.0, 2.0]) 208 | arr = prior.like1(x, var) 209 | assert isinstance(arr, np.ndarray) 210 | assert arr.shape == (2,) 211 | assert arr.dtype == float 212 | for i, r in np.ndenumerate(arr): 213 | assert r == prior.like1(x[i], var) 214 | 215 | x = np.array([[1.0, 2.0], [3.0, 4.0]]) 216 | arr = prior.like1(x, var) 217 | assert isinstance(arr, np.ndarray) 218 | assert arr.shape == (2, 2) 219 | assert arr.dtype == float 220 | for (i, j), r in np.ndenumerate(arr): 221 | assert r == prior.like1(x[i, j], var) 222 | 223 | x = np.array([1.0, 2.0]) 224 | var = np.array([2.0, 3.0]) 225 | arr = prior.like1(x, var) 226 | assert isinstance(arr, np.ndarray) 227 | assert arr.shape == (2,) 228 | assert arr.dtype == float 229 | for i, r in np.ndenumerate(arr): 230 | assert r == prior.like1(x[i], var[i]) 231 | 232 | 233 | x = np.array([1.0, 2.0]) 234 | var = np.array([1.0, 2.0, 3.0]) 235 | arr = prior.like1(x[:, np.newaxis], var) 236 | assert isinstance(arr, np.ndarray) 237 | assert arr.shape == (2, 3) 238 | assert arr.dtype == float 239 | for (i, j), r in np.ndenumerate(arr): 240 | assert r == prior.like1(x[i], var[j]) 241 | arr = prior.like1(x, var[:, np.newaxis]) 242 | assert isinstance(arr, np.ndarray) 243 | assert arr.shape == (3, 2) 244 | assert arr.dtype == float 245 | for (i, j), r in np.ndenumerate(arr): 246 | assert r == prior.like1(x[j], var[i]) 247 | 248 | x = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) 249 | var = np.array([10.0, 11.0, 12.0, 13.0]) 250 | arr = prior.like1(x[:, :, np.newaxis], var) 251 | assert isinstance(arr, np.ndarray) 252 | assert arr.shape == (2, 3, 4) 253 | assert arr.dtype == float 254 | for (i, j, k), r in np.ndenumerate(arr): 255 | assert r == prior.like1(x[i, j], var[k]) 256 | arr = prior.like1(x, var[:, np.newaxis, np.newaxis]) 257 | assert isinstance(arr, np.ndarray) 258 | assert arr.shape == (4, 2, 3) 259 | assert arr.dtype == float 260 | for (i, j, k), r in np.ndenumerate(arr): 261 | assert r == prior.like1(x[j, k], var[i]) 262 | 263 | # Test __call__() method: 264 | prior = dpmm.InvGamma(1.0, 1.0, 0.0) 265 | var = 1.0 266 | arr = prior(var) 267 | assert isinstance(arr, float) 268 | 269 | var = np.array([1.0]) 270 | arr = prior(var) 271 | assert isinstance(arr, np.ndarray) 272 | assert arr.shape == (1,) 273 | assert arr.dtype == float 274 | assert arr[0] == prior(var[0]) 275 | 276 | var = np.array([1.0, 2.0]) 277 | arr = prior(var) 278 | assert isinstance(arr, np.ndarray) 279 | assert arr.shape == (2,) 280 | assert arr.dtype == float 281 | for i, r in np.ndenumerate(arr): 282 | assert r == prior(var[i]) 283 | 284 | var = np.array([[1.0, 2.0], [3.0, 4.0]]) 285 | arr = prior(var) 286 | assert isinstance(arr, np.ndarray) 287 | assert arr.shape == (2, 2) 288 | assert arr.dtype == float 289 | for (i, j), r in np.ndenumerate(arr): 290 | assert r == prior(var[i, j]) 291 | 292 | # Should _post_params method do any broadcasting? 293 | 294 | # Test pred method(): 295 | prior = dpmm.InvGamma(1.0, 1.0, 0.0) 296 | x = 1.0 297 | arr = prior.pred(x) 298 | assert isinstance(arr, float) 299 | 300 | x = np.array([1.0]) 301 | arr = prior.pred(x) 302 | assert isinstance(arr, np.ndarray) 303 | assert arr.shape == (1,) 304 | assert arr.dtype == float 305 | assert arr[0] == prior.pred(x[0]) 306 | 307 | x = np.array([1.0, 2.0]) 308 | arr = prior.pred(x) 309 | assert isinstance(arr, np.ndarray) 310 | assert arr.shape == (2,) 311 | assert arr.dtype == float 312 | for i, r in np.ndenumerate(arr): 313 | assert r == prior.pred(x[i]) 314 | 315 | x = np.arange(6.0).reshape(3, 2)+1 316 | arr = prior.pred(x) 317 | assert isinstance(arr, np.ndarray) 318 | assert arr.shape == (3, 2) 319 | assert arr.dtype == float 320 | for (i, j), r in np.ndenumerate(arr): 321 | assert r == prior.pred(x[i, j]) 322 | 323 | 324 | @timer 325 | def test_InvGamma2D(): 326 | """Test broadcasting rules for InvGamma2D prior.""" 327 | 328 | # Test sample() method: 329 | prior = dpmm.InvGamma2D(1.0, 1.0, np.array([0.0, 0.0])) 330 | arr = prior.sample() 331 | assert isinstance(arr, float) 332 | 333 | arr = prior.sample(size=1) 334 | assert isinstance(arr, np.ndarray) 335 | assert arr.shape == (1,) 336 | assert arr.dtype == float 337 | 338 | arr = prior.sample(size=(1,)) 339 | assert isinstance(arr, np.ndarray) 340 | assert arr.shape == (1,) 341 | assert arr.dtype == float 342 | 343 | arr = prior.sample(size=10) 344 | assert isinstance(arr, np.ndarray) 345 | assert arr.shape == (10,) 346 | assert arr.dtype == float 347 | 348 | arr = prior.sample(size=(10, 20)) 349 | assert isinstance(arr, np.ndarray) 350 | assert arr.shape == (10, 20) 351 | assert arr.dtype == float 352 | 353 | # Test like1() method: 354 | prior = dpmm.InvGamma2D(1.0, 1.0, np.array([0.0, 0.0])) 355 | x = np.array([1.0, 2.0]) # Data is 2D, so trailing axis should always be len 2. 356 | var = 1.0 357 | arr = prior.like1(x, var) 358 | assert isinstance(arr, float) 359 | 360 | x = np.array([1.0]) # If trailing axis is not 2, then should get an AssertionError 361 | var = 1.0 362 | np.testing.assert_raises(AssertionError, prior.like1, x, var) 363 | 364 | x = np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]) 365 | var = 1.0 366 | arr = prior.like1(x, var) 367 | assert isinstance(arr, np.ndarray) 368 | assert arr.shape == (3,) 369 | assert arr.dtype == float 370 | for i, r in np.ndenumerate(arr): 371 | assert r == prior.like1(x[i], var) 372 | 373 | x = np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]) 374 | var = np.array([2.0, 3.0]) 375 | arr = prior.like1(x, var[:, np.newaxis]) 376 | assert isinstance(arr, np.ndarray) 377 | assert arr.shape == (2, 3) 378 | assert arr.dtype == float 379 | for (i, j), r in np.ndenumerate(arr): 380 | assert r == prior.like1(x[j], var[i]) 381 | 382 | x = np.arange(24, dtype=float).reshape(3, 4, 2) 383 | var = np.array([2.0, 3.0]) 384 | arr = prior.like1(x[:,:,np.newaxis,:], var) 385 | assert isinstance(arr, np.ndarray) 386 | assert arr.shape == (3, 4, 2) 387 | assert arr.dtype == float 388 | for (i, j, k), r in np.ndenumerate(arr): 389 | assert r == prior.like1(x[i, j], var[k]) 390 | 391 | x = np.arange(24, dtype=float).reshape(3, 4, 2) 392 | var = np.arange(12, dtype=float).reshape(3, 4) + 1 # add 1 so we don't divide by zero 393 | arr = prior.like1(x, var) 394 | assert isinstance(arr, np.ndarray) 395 | assert arr.shape == (3, 4) 396 | assert arr.dtype == float 397 | for (i, j), r in np.ndenumerate(arr): 398 | assert r == prior.like1(x[i, j], var[i, j]) 399 | 400 | # Test __call__() method: 401 | prior = dpmm.InvGamma2D(1.0, 1.0, np.array([0.0, 0.0])) 402 | var = 1.0 403 | arr = prior(var) 404 | assert isinstance(arr, float) 405 | 406 | var = np.array([1.0]) 407 | arr = prior(var) 408 | assert isinstance(arr, np.ndarray) 409 | assert arr.shape == (1,) 410 | assert arr.dtype == float 411 | assert arr[0] == prior(var[0]) 412 | 413 | var = np.array([1.0, 2.0]) 414 | arr = prior(var) 415 | assert isinstance(arr, np.ndarray) 416 | assert arr.shape == (2,) 417 | assert arr.dtype == float 418 | for i, r in np.ndenumerate(arr): 419 | assert r == prior(var[i]) 420 | 421 | var = np.array([[1.0, 2.0], [3.0, 4.0]]) 422 | arr = prior(var) 423 | assert isinstance(arr, np.ndarray) 424 | assert arr.shape == (2, 2) 425 | assert arr.dtype == float 426 | for (i, j), r in np.ndenumerate(arr): 427 | assert r == prior(var[i, j]) 428 | 429 | # Should _post_params method do any broadcasting? 430 | 431 | # Test pred method(): 432 | prior = dpmm.InvGamma2D(1.0, 1.0, np.array([0.0, 0.0])) 433 | x = 1.0 434 | np.testing.assert_raises(AssertionError, prior.pred, x) 435 | 436 | x = np.array([1.0, 2.0]) 437 | arr = prior.pred(x) 438 | assert isinstance(arr, float) 439 | 440 | x = np.arange(24, dtype=float).reshape(3, 4, 2) 441 | arr = prior.pred(x) 442 | assert isinstance(arr, np.ndarray) 443 | assert arr.shape == (3, 4) 444 | assert arr.dtype == float 445 | for (i, j), r in np.ndenumerate(arr): 446 | assert r == prior.pred(x[i, j]) 447 | 448 | 449 | @timer 450 | def test_NormInvChi2(): 451 | """Test broadcasting rules for NormInvChi2 prior.""" 452 | 453 | # Test sample() method: 454 | prior = dpmm.NormInvChi2(1.0, 1.0, 1.0, 1.0) 455 | arr = prior.sample() 456 | assert isinstance(arr, np.void) 457 | assert arr.dtype == prior.model_dtype 458 | 459 | arr = prior.sample(size=1) 460 | assert isinstance(arr, np.ndarray) 461 | assert arr.shape == (1,) 462 | assert arr.dtype == prior.model_dtype 463 | 464 | arr = prior.sample(size=(1,)) 465 | assert isinstance(arr, np.ndarray) 466 | assert arr.shape == (1,) 467 | assert arr.dtype == prior.model_dtype 468 | 469 | arr = prior.sample(size=10) 470 | assert isinstance(arr, np.ndarray) 471 | assert arr.shape == (10,) 472 | assert arr.dtype == prior.model_dtype 473 | 474 | arr = prior.sample(size=(10, 20)) 475 | assert isinstance(arr, np.ndarray) 476 | assert arr.shape == (10, 20) 477 | assert arr.dtype == prior.model_dtype 478 | 479 | # Test like1() method: 480 | prior = dpmm.NormInvChi2(1.0, 1.0, 1.0, 1.0) 481 | x = 1.0 482 | mu = 1.0 483 | var = 1.0 484 | arr = prior.like1(x, mu, var) 485 | assert isinstance(arr, float) 486 | 487 | x = np.array([1.0]) 488 | mu = 1.0 489 | var = 1.0 490 | arr = prior.like1(x, mu, var) 491 | assert isinstance(arr, np.ndarray) 492 | assert arr.shape == (1,) 493 | assert arr.dtype == float 494 | assert arr[0] == prior.like1(x[0], mu, var) 495 | 496 | x = np.array([1.0, 2.0, 3.0, 4.0]) 497 | mu = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) 498 | var = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) 499 | arr = prior.like1(x[:, np.newaxis, np.newaxis], mu, var) 500 | assert isinstance(arr, np.ndarray) 501 | assert arr.shape == (4, 2, 3) 502 | assert arr.dtype == float 503 | for (i, j, k), r in np.ndenumerate(arr): 504 | assert r == prior.like1(x[i], mu[j, k], var[j, k]) 505 | 506 | theta = np.zeros((2, 3), dtype=prior.model_dtype) 507 | theta['mu'] = mu 508 | theta['var'] = var 509 | arr = prior.like1(x[:, np.newaxis, np.newaxis], theta) 510 | assert isinstance(arr, np.ndarray) 511 | assert arr.shape == (4, 2, 3) 512 | assert arr.dtype == float 513 | for (i, j, k), r in np.ndenumerate(arr): 514 | assert r == prior.like1(x[i], theta[j, k]) 515 | 516 | arr = prior.like1(x, mu[:, :, np.newaxis], var[:, :, np.newaxis]) 517 | assert isinstance(arr, np.ndarray) 518 | assert arr.shape == (2, 3, 4) 519 | assert arr.dtype == float 520 | for (i, j, k), r in np.ndenumerate(arr): 521 | assert r == prior.like1(x[k], mu[i, j], var[i, j]) 522 | 523 | arr = prior.like1(x, theta[:, :, np.newaxis]) 524 | assert isinstance(arr, np.ndarray) 525 | assert arr.shape == (2, 3, 4) 526 | assert arr.dtype == float 527 | for (i, j, k), r in np.ndenumerate(arr): 528 | assert r == prior.like1(x[k], theta[i, j]) 529 | 530 | # Test __call__() method: 531 | prior = dpmm.NormInvChi2(1.0, 1.0, 1.0, 1.0) 532 | mu = 1.0 533 | var = 1.0 534 | arr = prior(mu, var) 535 | assert isinstance(arr, float) 536 | 537 | mu = np.array([1.0]) 538 | arr = prior(mu, var) 539 | assert isinstance(arr, np.ndarray) 540 | assert arr.shape == (1,) 541 | assert arr.dtype == float 542 | assert arr[0] == prior(mu[0], var) 543 | 544 | mu = np.array([1.0, 2.0]) 545 | var = np.array([10.0, 11.0, 12.0]) 546 | arr = prior(mu[:, np.newaxis], var) 547 | assert isinstance(arr, np.ndarray) 548 | assert arr.shape == (2, 3) 549 | assert arr.dtype == float 550 | for (i, j), r in np.ndenumerate(arr): 551 | assert r == prior(mu[i], var[j]) 552 | 553 | theta = np.zeros((2, 3), dtype=prior.model_dtype) 554 | theta['mu'] = mu[:, np.newaxis] 555 | theta['var'] = var 556 | arr = prior(theta) 557 | assert isinstance(arr, np.ndarray) 558 | assert arr.shape == (2, 3) 559 | assert arr.dtype == float 560 | for (i, j), r in np.ndenumerate(arr): 561 | assert r == prior(theta[i][j]) 562 | 563 | # Should _post_params method do any broadcasting? 564 | 565 | # Test pred method(): 566 | prior = dpmm.NormInvChi2(1.0, 1.0, 1.0, 1.0) 567 | x = 1.0 568 | arr = prior.pred(x) 569 | assert isinstance(arr, float) 570 | 571 | x = np.array([1.0]) 572 | arr = prior.pred(x) 573 | assert isinstance(arr, np.ndarray) 574 | assert arr.shape == (1,) 575 | assert arr.dtype == float 576 | assert arr[0] == prior.pred(x[0]) 577 | 578 | x = np.array([1.0, 2.0]) 579 | arr = prior.pred(x) 580 | assert isinstance(arr, np.ndarray) 581 | assert arr.shape == (2,) 582 | assert arr.dtype == float 583 | for i, r in np.ndenumerate(arr): 584 | assert r == prior.pred(x[i]) 585 | 586 | x = np.array([[1.0, 2.0], [3.0, 4.0]]) 587 | arr = prior.pred(x) 588 | assert isinstance(arr, np.ndarray) 589 | assert arr.shape == (2, 2) 590 | assert arr.dtype == float 591 | for (i, j), r in np.ndenumerate(arr): 592 | assert r == prior.pred(x[i, j]) 593 | 594 | 595 | @timer 596 | def test_NormInvGamma(): 597 | """Test broadcasting rules for NormInvGamma prior.""" 598 | 599 | # Test sample() method: 600 | prior = dpmm.NormInvGamma(1.0, 1.0, 1.0, 1.0) 601 | arr = prior.sample() 602 | assert isinstance(arr, np.void) 603 | assert arr.dtype == prior.model_dtype 604 | 605 | arr = prior.sample(size=1) 606 | assert isinstance(arr, np.ndarray) 607 | assert arr.shape == (1,) 608 | assert arr.dtype == prior.model_dtype 609 | 610 | arr = prior.sample(size=(1,)) 611 | assert isinstance(arr, np.ndarray) 612 | assert arr.shape == (1,) 613 | assert arr.dtype == prior.model_dtype 614 | 615 | arr = prior.sample(size=10) 616 | assert isinstance(arr, np.ndarray) 617 | assert arr.shape == (10,) 618 | assert arr.dtype == prior.model_dtype 619 | 620 | arr = prior.sample(size=(10, 20)) 621 | assert isinstance(arr, np.ndarray) 622 | assert arr.shape == (10, 20) 623 | assert arr.dtype == prior.model_dtype 624 | 625 | # Test like1() method: 626 | prior = dpmm.NormInvGamma(1.0, 1.0, 1.0, 1.0) 627 | x = 1.0 628 | mu = 1.0 629 | var = 1.0 630 | arr = prior.like1(x, mu, var) 631 | assert isinstance(arr, float) 632 | 633 | x = np.array([1.0]) 634 | mu = 1.0 635 | var = 1.0 636 | arr = prior.like1(x, mu, var) 637 | assert isinstance(arr, np.ndarray) 638 | assert arr.shape == (1,) 639 | assert arr.dtype == float 640 | assert arr[0] == prior.like1(x[0], mu, var) 641 | 642 | x = np.array([1.0, 2.0, 3.0, 4.0]) 643 | mu = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) 644 | var = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) 645 | arr = prior.like1(x[:, np.newaxis, np.newaxis], mu, var) 646 | assert isinstance(arr, np.ndarray) 647 | assert arr.shape == (4, 2, 3) 648 | assert arr.dtype == float 649 | for (i, j, k), r in np.ndenumerate(arr): 650 | assert r == prior.like1(x[i], mu[j, k], var[j, k]) 651 | 652 | theta = np.zeros((2, 3), dtype=prior.model_dtype) 653 | theta['mu'] = mu 654 | theta['var'] = var 655 | arr = prior.like1(x[:, np.newaxis, np.newaxis], theta) 656 | assert isinstance(arr, np.ndarray) 657 | assert arr.shape == (4, 2, 3) 658 | assert arr.dtype == float 659 | for (i, j, k), r in np.ndenumerate(arr): 660 | assert r == prior.like1(x[i], theta[j, k]) 661 | 662 | arr = prior.like1(x, mu[:, :, np.newaxis], var[:, :, np.newaxis]) 663 | assert isinstance(arr, np.ndarray) 664 | assert arr.shape == (2, 3, 4) 665 | assert arr.dtype == float 666 | for (i, j, k), r in np.ndenumerate(arr): 667 | assert r == prior.like1(x[k], mu[i, j], var[i, j]) 668 | 669 | arr = prior.like1(x, theta[:, :, np.newaxis]) 670 | assert isinstance(arr, np.ndarray) 671 | assert arr.shape == (2, 3, 4) 672 | assert arr.dtype == float 673 | for (i, j, k), r in np.ndenumerate(arr): 674 | assert r == prior.like1(x[k], theta[i, j]) 675 | 676 | # Test __call__() method: 677 | prior = dpmm.NormInvGamma(1.0, 1.0, 1.0, 1.0) 678 | mu = 1.0 679 | var = 1.0 680 | arr = prior(mu, var) 681 | assert isinstance(arr, float) 682 | 683 | mu = np.array([1.0]) 684 | arr = prior(mu, var) 685 | assert isinstance(arr, np.ndarray) 686 | assert arr.shape == (1,) 687 | assert arr.dtype == float 688 | assert arr[0] == prior(mu[0], var) 689 | 690 | mu = np.array([1.0, 2.0]) 691 | var = np.array([10.0, 11.0, 12.0]) 692 | arr = prior(mu[:, np.newaxis], var) 693 | assert isinstance(arr, np.ndarray) 694 | assert arr.shape == (2, 3) 695 | assert arr.dtype == float 696 | for (i, j), r in np.ndenumerate(arr): 697 | assert r == prior(mu[i], var[j]) 698 | 699 | theta = np.zeros((2, 3), dtype=prior.model_dtype) 700 | theta['mu'] = mu[:, np.newaxis] 701 | theta['var'] = var 702 | arr = prior(theta) 703 | assert isinstance(arr, np.ndarray) 704 | assert arr.shape == (2, 3) 705 | assert arr.dtype == float 706 | for (i, j), r in np.ndenumerate(arr): 707 | assert r == prior(theta[i, j]) 708 | 709 | # Should _post_params method do any broadcasting? 710 | 711 | # Test pred method(): 712 | prior = dpmm.NormInvGamma(1.0, 1.0, 1.0, 1.0) 713 | x = 1.0 714 | arr = prior.pred(x) 715 | assert isinstance(arr, float) 716 | 717 | x = np.array([1.0]) 718 | arr = prior.pred(x) 719 | assert isinstance(arr, np.ndarray) 720 | assert arr.shape == (1,) 721 | assert arr.dtype == float 722 | assert arr[0] == prior.pred(x[0]) 723 | 724 | x = np.array([1.0, 2.0]) 725 | arr = prior.pred(x) 726 | assert isinstance(arr, np.ndarray) 727 | assert arr.shape == (2,) 728 | assert arr.dtype == float 729 | for i, r in np.ndenumerate(arr): 730 | assert r == prior.pred(x[i]) 731 | 732 | x = np.array([[1.0, 2.0], [3.0, 4.0]]) 733 | arr = prior.pred(x) 734 | assert isinstance(arr, np.ndarray) 735 | assert arr.shape == (2, 2) 736 | assert arr.dtype == float 737 | for (i, j), r in np.ndenumerate(arr): 738 | assert r == prior.pred(x[i, j]) 739 | 740 | 741 | @timer 742 | def test_NormInvWish(): 743 | """Test broadcasting rules for NormInvWish prior.""" 744 | 745 | # Test sample() method: 746 | mu_0 = np.arange(3.0) 747 | kappa_0 = 3.0 748 | Lam_0 = np.eye(3) + 0.01*np.arange(9).reshape(3,3) 749 | Lam_0 += Lam_0.T # To make symmetric 750 | nu_0 = 3 751 | prior = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 752 | arr = prior.sample() 753 | assert isinstance(arr, np.void) 754 | assert arr.dtype == prior.model_dtype 755 | 756 | arr = prior.sample(size=1) 757 | assert isinstance(arr, np.ndarray) 758 | assert arr.shape == (1,) 759 | assert arr.dtype == prior.model_dtype 760 | 761 | arr = prior.sample(size=(1,)) 762 | assert isinstance(arr, np.ndarray) 763 | assert arr.shape == (1,) 764 | assert arr.dtype == prior.model_dtype 765 | 766 | arr = prior.sample(size=10) 767 | assert isinstance(arr, np.ndarray) 768 | assert arr.shape == (10,) 769 | assert arr.dtype == prior.model_dtype 770 | 771 | arr = prior.sample(size=(10, 20)) 772 | assert isinstance(arr, np.ndarray) 773 | assert arr.shape == (10, 20) 774 | assert arr.dtype == prior.model_dtype 775 | 776 | # Test like1() method: 777 | prior = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 778 | x = np.arange(3.0) 779 | mu = np.arange(3.0)+1.0 780 | Sig = np.eye(3) + 0.03*np.arange(9).reshape(3, 3) 781 | Sig += Sig.T 782 | arr = prior.like1(x, mu, Sig) 783 | assert isinstance(arr, float) 784 | 785 | # If trailing axis of x is not dim 3 (for these prior parameters), should get and AssertionError 786 | xbad = np.arange(2.0) 787 | np.testing.assert_raises(AssertionError, prior.like1, xbad, mu, Sig) 788 | 789 | # And similar checks for mu and Sig 790 | mubad = np.arange(4.0) 791 | np.testing.assert_raises(AssertionError, prior.like1, x, mubad, Sig) 792 | 793 | Sigbad = np.eye(2) 794 | np.testing.assert_raises(AssertionError, prior.like1, x, mu, Sigbad) 795 | 796 | # Try some non-trival broadcasts 797 | mu = np.arange(6.0).reshape(2, 3) 798 | arr = prior.like1(x, mu, Sig) 799 | assert isinstance(arr, np.ndarray) 800 | assert arr.shape == (2,) 801 | for i, r in np.ndenumerate(arr): 802 | assert r == prior.like1(x, mu[i], Sig) 803 | 804 | theta = np.zeros((2,), dtype=prior.model_dtype) 805 | theta['mu'] = mu 806 | theta['Sig'] = Sig 807 | arr = prior.like1(x, theta) 808 | for i, r in np.ndenumerate(arr): 809 | assert r == prior.like1(x, theta[i]) 810 | 811 | mu = np.empty((3, 4, 3), dtype=float) 812 | Sig = np.empty((3, 4, 3, 3), dtype=float) 813 | for i in range(3): 814 | for j in range(4): 815 | mu[i, j] = np.arange(3.0) 816 | Sig[i, j] = np.eye(3)+0.1*i+0.2*j 817 | arr = prior.like1(x, mu, Sig) 818 | for (i, j), r in np.ndenumerate(arr): 819 | assert r == prior.like1(x, mu[i, j], Sig[i, j]) 820 | 821 | theta = np.empty((3, 4), dtype=prior.model_dtype) 822 | theta['mu'] = mu 823 | theta['Sig'] = Sig 824 | arr = prior.like1(x, theta) 825 | for (i, j), r in np.ndenumerate(arr): 826 | assert r == prior.like1(x, theta[i, j]) 827 | 828 | mu = np.arange(6.0).reshape(2, 3) 829 | arr = prior.like1(x, mu[:, np.newaxis, np.newaxis, :], Sig) 830 | for (i, j, k), r in np.ndenumerate(arr): 831 | assert r == prior.like1(x, mu[i], Sig[j, k]) 832 | 833 | theta = np.empty((2, 3, 4), dtype=prior.model_dtype) 834 | theta['mu'] = (np.arange(6.0).reshape(2, 3))[:, np.newaxis, np.newaxis, :] 835 | theta['Sig'] = Sig 836 | arr = prior.like1(x, theta) 837 | for (i, j, k), r in np.ndenumerate(arr): 838 | assert r == prior.like1(x, theta[i, j, k]) 839 | 840 | # Test __call__() method: 841 | prior = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 842 | mu = np.arange(3.0) 843 | Sig = np.eye(3) 844 | arr = prior(mu, Sig) 845 | assert isinstance(arr, float) 846 | 847 | theta = np.zeros(1, dtype=prior.model_dtype) 848 | theta['mu'] = mu 849 | theta['Sig'] = Sig 850 | arr = prior(theta[0]) 851 | assert isinstance(arr, float) 852 | assert arr == prior(mu, Sig) 853 | 854 | mu = np.arange(6.0).reshape(2, 3) 855 | arr = prior(mu, Sig) 856 | assert isinstance(arr, np.ndarray) 857 | assert arr.shape == (2,) 858 | assert arr.dtype == float 859 | for i, r in np.ndenumerate(arr): 860 | assert r == prior(mu[i], Sig) 861 | 862 | theta = np.zeros(2, dtype=prior.model_dtype) 863 | theta['mu'] = mu 864 | theta['Sig'] = Sig 865 | arr = prior(theta) 866 | assert isinstance(arr, np.ndarray) 867 | assert arr.shape == (2,) 868 | assert arr.dtype == float 869 | for i, r in np.ndenumerate(arr): 870 | assert r == prior(theta[i]) 871 | 872 | mu = np.empty((3, 4, 3), dtype=float) 873 | Sig = np.empty((3, 4, 3, 3), dtype=float) 874 | for i in range(3): 875 | for j in range(4): 876 | mu[i, j] = np.arange(3.0) 877 | Sig[i, j] = np.eye(3)+0.1*i+0.2*j 878 | arr = prior(mu, Sig) 879 | for (i, j), r in np.ndenumerate(arr): 880 | assert r == prior(mu[i, j], Sig[i, j]) 881 | 882 | theta = np.zeros((3, 4), dtype=prior.model_dtype) 883 | theta['mu'] = mu 884 | theta['Sig'] = Sig 885 | arr = prior(theta) 886 | for (i, j), r in np.ndenumerate(arr): 887 | assert r == prior(theta[i, j]) 888 | 889 | mu = np.arange(6.0).reshape(2, 3) 890 | arr = prior(mu[:, np.newaxis, np.newaxis, :], Sig) 891 | for (i, j, k), r in np.ndenumerate(arr): 892 | assert r == prior(mu[i], Sig[j, k]) 893 | 894 | theta = np.zeros((2, 3, 4), dtype=prior.model_dtype) 895 | theta['mu'] = mu[:, np.newaxis, np.newaxis, :] 896 | theta['Sig'] = Sig 897 | arr = prior(theta) 898 | for (i, j, k), r in np.ndenumerate(arr): 899 | assert r == prior(theta[i, j, k]) 900 | 901 | # Should _post_params method do any broadcasting? 902 | 903 | # Test pred method(): 904 | prior = dpmm.NormInvWish(mu_0, kappa_0, Lam_0, nu_0) 905 | x = np.arange(3.0)+1 906 | arr = prior.pred(x) 907 | assert isinstance(arr, float) 908 | 909 | x = np.arange(6.0).reshape(2, 3) 910 | arr = prior.pred(x) 911 | assert isinstance(arr, np.ndarray) 912 | assert arr.shape == (2,) 913 | assert arr.dtype == float 914 | for i, r in np.ndenumerate(arr): 915 | assert r == prior.pred(x[i]) 916 | 917 | x = np.arange(24.0).reshape(2, 4, 3) 918 | arr = prior.pred(x) 919 | assert isinstance(arr, np.ndarray) 920 | assert arr.shape == (2, 4) 921 | assert arr.dtype == float 922 | for (i, j), r in np.ndenumerate(arr): 923 | np.testing.assert_almost_equal(r, prior.pred(x[i, j])) 924 | 925 | if __name__ == '__main__': 926 | test_GaussianMeanKnownVariance() 927 | test_InvGamma() 928 | test_InvGamma2D() 929 | test_NormInvChi2() 930 | test_NormInvGamma() 931 | test_NormInvWish() 932 | -------------------------------------------------------------------------------- /notebooks/Variance Inference with InvGamma prior.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# Load packages\n", 12 | "import numpy as np\n", 13 | "import matplotlib.pyplot as plt\n", 14 | "import dpmm\n", 15 | "%matplotlib inline" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 2, 21 | "metadata": { 22 | "collapsed": true 23 | }, 24 | "outputs": [], 25 | "source": [ 26 | "# Make a 1-D Gaussian mixture model with all component means = 0 and specified variances.\n", 27 | "class gaussV(object):\n", 28 | " def __init__(self, V):\n", 29 | " self.V = V\n", 30 | "\n", 31 | " def sample(self, size=None):\n", 32 | " return np.random.normal(scale=np.sqrt(self.V), size=size)\n", 33 | "\n", 34 | " def __call__(self, x):\n", 35 | " return np.exp(-0.5*x**2/self.V)/np.sqrt(2*np.pi*self.V)\n", 36 | "\n", 37 | "class MM(object):\n", 38 | " def __init__(self, components, proportions):\n", 39 | " self.components = components\n", 40 | " self.proportions = proportions\n", 41 | "\n", 42 | " def sample(self, size=None):\n", 43 | " if size is None:\n", 44 | " nums = np.random.multinomial(1, self.proportions)\n", 45 | " c = num.index(1) # which class got picked\n", 46 | " return self.components[c].sample()\n", 47 | " else:\n", 48 | " out = np.empty((size,), dtype=float)\n", 49 | " nums = np.random.multinomial(size, self.proportions)\n", 50 | " i = 0\n", 51 | " for component, num in zip(self.components, nums):\n", 52 | " out[i:i+num] = component.sample(size=num)\n", 53 | " i += num\n", 54 | " return out\n", 55 | "\n", 56 | " def __call__(self, x):\n", 57 | " return np.sum([p*c(x) for p, c in zip(self.proportions, self.components)], axis=0)\n", 58 | "\n", 59 | " def plot(self, axis=None, **kwargs):\n", 60 | " \"\"\" Plot the mixture model pdf.\"\"\"\n", 61 | " if axis is None:\n", 62 | " axis = plt.gca()\n", 63 | " x = np.arange(-2,2,0.01)\n", 64 | " y = self(x)\n", 65 | " axis.plot(x, y, **kwargs)" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 3, 71 | "metadata": { 72 | "collapsed": true 73 | }, 74 | "outputs": [], 75 | "source": [ 76 | "# Mixture model parameters\n", 77 | "V = [0.3**2, 0.05**2] # variances\n", 78 | "p = [0.5, 0.5] # proportions\n", 79 | "model = MM([gaussV(V0) for V0 in V], p)" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 4, 85 | "metadata": { 86 | "collapsed": false 87 | }, 88 | "outputs": [ 89 | { 90 | "name": "stderr", 91 | "output_type": "stream", 92 | "text": [ 93 | "/Users/josh/miniconda2/lib/python2.7/site-packages/matplotlib/figure.py:397: UserWarning: matplotlib is currently using a non-GUI backend, so cannot show the figure\n", 94 | " \"matplotlib is currently using a non-GUI backend, \"\n" 95 | ] 96 | }, 97 | { 98 | "data": { 99 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAEACAYAAABF+UbAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAF31JREFUeJzt3XmUHWWZx/Hf09lJuhMCIUQiIBBARhFQAcUjFwEJDCoq\nbqiMy0FRFDzgwhkX2vE4iHJE52j0qDiAiMpBZBlECEuDgIYlixDWIQZGSCAJSbo7K0k/88d7Q5q2\nl6p7q27deu/3c849fdNdt+6TSufXbz/11lvm7gIAlFNb0QUAAGpHiANAiRHiAFBihDgAlBghDgAl\nRogDQImNTrOxmS2VtFZSn6QX3f3QPIoCACSTKsQVwrvi7qvzKAYAkE7adorV8BoAQE7SBrJLmmtm\n95nZaXkUBABILm075Qh3X2Zm0xTC/BF3vyuPwgAAI0sV4u6+rPpxhZn9QdKhkl4KcTNjIRYAqIG7\nWy2vS9xOMbMdzGxS9flESW+X9NAghTT947zzziu8BuqkzjLXWYYay1RnPdKMxKdL+kN1tD1a0q/d\n/ea63h0AUJfEIe7uf5d0UI61AABSasnpgpVKpegSEqHObFFndspQo1SeOuth9fZjXrYzM89yfwDQ\nCsxMnveJTQBA8yHEAaDECHEAKDFCHABKjBAHgBIjxAGgxAhxACgxQhwASowQR0u4/XZp1aqiqwCy\nR4ijJXz2s9KVVxZdBZA9QhzRW7FCevRR6S5uX4IIEeKI3t13S7NmSX/+c9GVANkjxBG9efOkj3xE\n2rBBevbZoqsBskWII3rPPivtvrv0yldKy5YVXQ2QLUIc0VuxQpo2LTxWrCi6GiBbhDiiR4gjZoQ4\nokeII2aEOKJHiCNmhDiitm6dtHWrNHGitPPO0vPPF10RkC1CHFHbNgo3YySOOBHiiNq2EJcIccSJ\nEEfUCHHEjhBH1FauJMQRN0IcUVuzRtpxx/B88mSpt1fq6yu2JiBLhDii1tMjtbeH56NGSRMmhBkr\nQCwIcUStu1vq6Nj+546O8DkgFoQ4otbdvX0kLhHiiA8hjqj19Lx8JN7eHj4HxIIQR9RopyB2hDii\n1v/EphRCnJE4YkKII2oDR+Lt7YzEERdCHFHjxCZiR4gjagNPbNJOQWwIcUSNdgpilyrEzazNzOab\n2XV5FQRkZcsWadMmaYcdtn+OkThik3Ykfpakh/MoBMjatpkpZts/R08csUkc4mY2U9IJkn6RXzlA\ndga2UiTaKYhPmpH4RZK+JMlzqgXI1MA54hLtFMRndJKNzOxfJT3n7gvNrCLJhtq2s7PzpeeVSkWV\nSqW+CoEaDZxeKDESR3Po6upSV1dXJvsy95EH1mb2n5I+ImmLpAmS2iVd7e6nDtjOk+wPaIRbbpHO\nP1+69dbtn3v4Yenkk8NHoFmYmdx9yMHxcBK1U9z93919d3ffS9IHJd02MMCBZrNuXbjLfX8TJ7Ke\nOOLCPHFEa/36l08vlMKf168vph4gD4l64v25+x2S7sihFiBTjMTRChiJI1qDjcTHj5c2buQ+m4gH\nIY5oDTYSb2sL99ncsKGYmoCsEeKI1mAjcYm+OOJCiCNag43EpRDi9MURC0Ic0RpqJD5xIiNxxIMQ\nR7TWrx96JE6IIxaEOKK1bt3QI3HaKYgFIY5ocWITrYAQR7SGOrHJSBwxIcQRLUbiaAWEOKLFFEO0\nAkIc0WKKIVoBIY5oMRJHKyDEES1G4mgFhDii1NcXViucMOGfv8ZIHDEhxBGlDRvCsrNtg3yHMxJH\nTAhxRGmofrjESBxxIcQRpaH64RIjccSFEEeUhlr8SuJiH8SFEEeUhlr8SqKdgrgQ4ogS7RS0CkIc\nUeLEJloFIY4oMRJHqyDEESVG4mgVhDiixEgcrYIQR5SGG4mPGRM+bt7cuHqAvBDiiNJwI3GJ0Tji\nQYgjSsONxCX64ogHIY4ojTQS56pNxIIQR5RGGolzs2TEghBHlBiJo1UQ4ojScAtgSZzYRDwIcURp\nuAWwJE5sIh6EOKLESBytghBHlBiJo1WMTrqhmY2TdKeksdXXXeXu38yrMKAeXOyDVpE4xN19k5kd\n5e7rzWyUpLvN7EZ3vzfH+oCaMBJHq0jVTnH3bWOXcQo/ADzzioAMjNQTZ4ohYpEqxM2szcwWSFou\naa6735dPWUB9OLGJVpF2JN7n7gdLminpMDM7IJ+ygNptW51w22qFg6Gdglgk7on35+7dZna7pNmS\nHu7/tc7OzpeeVyoVVSqVOsoD0hvppKZEOwXF6urqUldXVyb7MvdkbW0z21nSi+6+1swmSLpJ0nfc\n/Y/9tvGk+wPy8uyz0hveED4O5aqrpN/+NnwEimZmcner5bVpRuIzJF1qZm0KbZjf9Q9woFmMNDNF\nop2CeKSZYvigpENyrAXIBO0UtBKu2ER0koQ4s1MQC0Ic0RlpeqHESBzxIMQRHXriaCWEOKJDOwWt\nhBBHdGinoJUQ4ohOknbKhAkhxLmsAWVHiCM6Sdopo0ZJ48ZJGzc2piYgL4Q4opMkxCVaKogDIY7o\nJOmJS4Q44kCIIzpJeuIS0wwRB0Ic0UnaTmGaIWJAiCM6tFPQSghxRId2CloJIY7o0E5BKyHEER3a\nKWglhDiiQzsFrYQQR3Rop6CVEOKIDldsopUQ4ogOPXG0EkIcUXEPfe4JE0beduJEeuIoP0IcUdm8\nOaxQOGbMyNsyEkcMCHFEJWkrRSLEEQdCHFFJOr1Qop2COBDiiErSmSkSI3HEgRBHVGinoNUQ4ogK\n7RS0GkIcUaGdglZDiCMqhDhaDSGOqKTpibN2CmJAiCMqaXrirGKIGBDiiEqadsr48dLGjVJfX741\nAXkixBGVNO2UtrawxsrGjfnWBOSJEEdU0rRTJFoqKD9CHFFJ006RmKGC8iPEEZV165K3U6SwbW9v\nfvUAeSPEEZXeXqm9Pfn2kybRTkG5JQ5xM5tpZreZ2WIze9DMzsyzMKAWvb0hmJNqb5d6evKrB8jb\n6BTbbpF0trsvNLNJkh4ws5vd/dGcagNS6+lJF+KTJtFOQbklHom7+3J3X1h93ivpEUm75VUYUIu0\nI3FCHGVXU0/czPaUdJCkeVkWA9SLEEerSR3i1VbKVZLOqo7IgaZRy4lNeuIoszQ9cZnZaIUA/5W7\nXzvYNp2dnS89r1QqqlQqdZQHpFPLiU1G4mi0rq4udXV1ZbIvc/fkG5tdJmmlu589xNc9zf6ArI0d\nG0bW48Yl2/7CC6Xly8NHoChmJne3Wl6bZorhEZI+LOltZrbAzOab2exa3hTIw+bNknsI8qToiaPs\nErdT3P1uSaNyrAWoy7Z+uKUYz9ATR9lxxSaikbYfLjESR/kR4ogGIY5WRIgjGrWEOLNTUHaEOKLR\n05NujrhETxzlR4gjGrRT0IoIcUSDEEcrIsQRjbQrGErb2ylco4ayIsQRje5uqaMj3WvGjQs3TOZm\nySgrQhzRWLtWmjw5/esmTw6vBcqIEEc06gnx7u7s6wEagRBHNLq7GYmj9RDiiMbatel74lJ4DSGO\nsiLEEQ164mhFhDiiQU8crYgQRzRop6AVEeKIBic20YoIcUTBvfaROO0UlBkhjihs2CCNGpX83pr9\n0U5BmRHiiEKtJzUl2ikoN0IcUSDE0aoIcUSh1pOaEj1xlBshjiisWVN7iE+ZIq1enW09QKMQ4ojC\nqlXSTjvV9tqpU6UXXsi2HqBRCHFEod4QX71a6uvLtiagEQhxRKGeEB8zRpo4kZObKCdCHFF44YXa\nQ1wKr6WlgjIixBGFVatCW6RWU6eGfQBlQ4gjCvW0U6TwWkIcZUSIIwqEOFoVIY4o0BNHqyLEEQV6\n4mhVhDhKb8sWqacnXHlZK9opKCtCHKW3enW45H7UqNr3sfPOhDjKiRBH6S1fLu26a337mD497Aco\nG0IcpbdsmTRjRn37mDEj7AcoG0IcpUeIo5UlDnEzu9jMnjOzv+VZEJDWsmX1t1OmTJE2bZLWr8+m\nJqBR0ozE/1vScXkVAtQqi5G4WfhBwGgcZZM4xN39LkksnY+mk0WIS7RUUE70xFF6hDha2eisd9jZ\n2fnS80qlokqlkvVbAC9DiKNsurq61NXVlcm+zN2Tb2y2h6Tr3f3AIb7uafYH1KuvT5o0SXr++fCx\nHuefHy4c+u53s6kNSMrM5O5Wy2vTtlOs+gCawvLlUnt7/QEuSXvtJS1ZUv9+gEZKM8XwCkn3SNrX\nzJ42s4/nVxaQzJNPhvDNwt57h/0BZZK4J+7up+RZCFCLJUtC+GZh20jcPUw5BMqA2SkotSefzC7E\np06V2tpYCAvlQoij1JYsya6dIoUfCPTFUSaEOErt8celffbJbn+zZkmPPZbd/oC8EeIorRdflBYv\nlg4cdMJrbQ46SFq4MLv9AXkjxFFajz4qzZwZphhm5eCDpfnzs9sfkDdCHKU1f750yCHZ7vPgg6UF\nC8JFREAZEOIorfvvzz7Ep02TOjqYL47yIMRRWnPnSkcdlf1+jzoq7BsoA0IcpfT3v4d1TrIeiUvS\n8cdLN96Y/X6BPBDiKKXrr5eOOy5cnJO1Y4+V7rxT6u3Nft9A1ghxlI679NOfSh/PafWenXaSjj5a\nuvzyfPYPZIkQR+nccIM0apSU51L1n/+8dNFF0ubN+b0HkAVCHKXS2yuddZb0ve/lu0hVpRIuwb/w\nwvzeA8hCqptCjLgzbgqBHG3eLJ18srTLLtIvfpH/+z31lHT44dJPfiKddFL+74fW1cibQgCFWLYs\nzBppa5PmzGnMe+6xh3TdddJnPiP94AehFw80G0IcTe+aa8KVlG95i3TVVdLYsY177ze+UfrLX6Rf\n/lL6wAdYphbNhxBH0+rtlT71Kenss6Wrr5a++U1pdOa39h7ZnntK8+ZJu+0mve510s03N74GYCiE\nOJrSvHlh9L1pU1hV8M1vLraeCRPCbJVLLpE++UnpzDOlDRuKrQmQCHE0mS1bpM5O6Z3vDHefv/TS\nsJZJszjmGGnRIun556XXv54VD1E8ZqegaTzxhPTRj4bQvuQS6RWvKLqioblLV1whfeELod3z5S+H\nuetALZidglJzl37+c+lNb5JOOUX605+aO8ClMEf9wx+WHngg9MgrlbCeC9BohDgKtWJFmIM9Z05Y\nr+TMM/NZDyUvu+8u3Xpr+Dscemj4DYJfRtFIJfrvgtj88Y9htsf++4cTmQccUHRFtWlrk845J4T5\n978vvfe90sqVRVeFVkGIo+HWr5c+97lwEc1vfiNdcEFj537n5cADpXvvlfbaK/xwYjlbNAIhjoaa\nPz/M6li9OszyOPLIoivK1vjxYb2Vyy+XTj9dOuOM8EMLyAshjobYsiVMGZw9W/r616Vf/1qaMqXo\nqvJz1FHhh9TateHGFfffX3RFiBVTDJG7RYukT3xCmjo1LFy1xx5FV9RYv/tdWNr2tNPCD7Dx44uu\nCM2GKYZoSps2Sd/4RrhA5owzwlS8VgtwKay5smiR9PjjoVd+xx1FV4SYMBJHLm6/PZy83GefsJRr\ns8/7bpRrrgnH5YQTwgndHXcsuiI0A0biaBpPPy29//3h1mnf+lYILQJ8u5NOkhYvlsaMCVMr58wJ\n5wuAWhHiyMSaNdLXvhZO4h1wgPTww9J73pPv3XfKavJk6cc/lubODUvrHnywdNNNXCSE2hDiqMv6\n9aEtsO++4cYN8+eHBax22KHoyprfgQeGC4T+4z/CGixvfWtoQwFpEOKoyYoVIaxf9aowfe7OO6WL\nLw6XoSM5M+nd75YeeijMK//0p8M6LDfcIPX1FV0dyoATm0hlwQLpZz8L0+ZOPjlcbr7ffkVXFY8t\nW8Kxvegiqbs73BR628qOiFc9JzYJcYxoxQrpyivDSPuFF8JJy9NPl6ZPL7qyeLlLd98d7u15yy3S\niSdKp54qHX00S97GqGEhbmazJf1AoQ1zsbtfMODrhHgkli4NNwm++upwZ53jjw8X7Bx9dLlWGYzB\nypVhjZnLLguzf97xjnDTjGOO4dxDLBoyxdDM2iT9SNJxkv5F0ofMbP9a3rRoXV1dRZeQSKPq7OsL\nN2S44opwVeFee0mHHRZOUp5zjrR8eQiRY48dPMA5ntkaWOfOO4crPu+7T7rnHuk1r5F++MPwm1Cl\nIp13nnTbbVJPT3E1Nquy1FmPNGOqQyU94e5PufuLkn4r6V35lJWvsvzDZl2ne5hBsu0k5Be/GNb4\nmDo1jOp+/3vpta+Vrr8+BPcll4RR30iXibfq8czLcHXuvXeYyXLrrdIzz0jnnitt3hymd+66qzRr\nlvS+90nf/naYo//gg+GG042ssZmUpc56pLl3+G6S/q/fn/+hEOwowNat0saNYfTV3R0WWuru3v58\n1aoQ2P0f//hHuOHvrFnhsd9+IQQOOUSaNq3ovxHS6ugIC4rNnh3+vGVL+I1qwYLwuPhiacmScMeh\njg5pzz3D6H369BD406eHf/f29vD19vaXP49heeBWkCbEEznxxPCxf2s8yfNaXlPr65cuDfNxi3r/\nJK/v6wuhe/nlYQ2SzZvDY9vzrVvDCLmjI1w80tHx8udTp0ozZoS1OmbMCI+ZM8PXEafRo6VXvzo8\nTjll++f7+sJvVkuXSs89t/2xeHE4ad3Ts/3R3b39Y19fCPKxY8MVpv2fr1kjXXtteE+z8Ghre/nH\noZ73/9y2R14efzy0oYZT9AVpu+1W3+sTn9g0s8Mldbr77Oqfz5Xk/U9umhlnNQGgBrnPTjGzUZIe\nk3S0pGWS7pX0IXd/pJY3BgDUL3E7xd23mtnnJN2s7VMMCXAAKFCmF/sAABqrrss2zOy7ZvaImS00\ns9+b2aAXB5vZbDN71MweN7Ov1POeNdZ5spk9ZGZbzeyQYbZbamaLzGyBmd3byBqr75+0zqKP545m\ndrOZPWZmN5nZoKdLizieSY6Nmf2XmT1R/b49qBF1DVLDsHWa2ZFmtsbM5lcfXyuozovN7Dkz+9sw\n2xR6PEeqsYmO5Uwzu83MFpvZg2Z25hDbpTue7l7zQ9Ixktqqz78j6fxBtmmT9L+S9pA0RtJCSfvX\n87411LmfpFmSbpN0yDDbLZG0YyNrS1tnkxzPCyR9ufr8K5K+0wzHM8mxkXS8pBuqzw+T9NcC/p2T\n1HmkpOuK+D4cUMdbJB0k6W9DfL0ZjudINTbLsdxV0kHV55MUzjHW/f1Z10jc3W9x921rrf1V0sxB\nNiv8IiF3f8zdn5A00tlfU4ErOyass/DjWX2/S6vPL5V00hDbNfp4Jjk275J0mSS5+zxJk82s0avA\nJP03LHw1dne/S9LqYTYp/HgmqFFqjmO53N0XVp/3SnpE4fqb/lIfzyz/g31C0o2DfH6wi4TqnBmZ\nG5c018zuM7PTii5mCM1wPHdx9+ek8I0paZchtmv08UxybAZu88wg2+Qt6b/hm6q/Ut9gZgc0prTU\nmuF4JtFUx9LM9lT47WHegC+lPp4jzk4xs7mS+v8kMIX/nF919+ur23xV0ovufsVI+8tLkjoTOMLd\nl5nZNIXweaT6U77Z6szdMHUO1k8c6ux47sczYg9I2t3d15vZ8ZKukbRvwTWVVVMdSzObJOkqSWdV\nR+R1GTHE3f3YEQr6mKQTJL1tiE2ekdT/VgEzq5/L1Eh1JtzHsurHFWb2B4VfezMNnQzqLPx4Vk8i\nTXf358xsV0nPD7GP3I/nAEmOzTOSXjnCNnkbsc7+/7nd/UYzm2NmU939hQbVmFQzHM9hNdOxNLPR\nCgH+K3e/dpBNUh/PemenzJb0JUnvdPdNQ2x2n6R9zGwPMxsr6YOSrqvnfes0aG/MzHao/oSUmU2U\n9HZJDzWysIElDfH5Zjie10n6WPX5v0n6p2/Ggo5nkmNznaRTq3UdLmnNttZQA41YZ/8+qJkdqjAd\nuKgANw39/dgMx1MapsYmO5a/lPSwu/9wiK+nP551nm19QtJTkuZXH3Oqn58h6X/6bTdb4UzsE5LO\nLeCs8EkKfaYNCleb3jiwTkmvUpglsEDSg81aZ5Mcz6mSbqnWcLOkKc1yPAc7NpI+LelT/bb5kcLs\nkEUaZrZSkXVKOkPhh94CSfdIOqygOq+Q9KykTZKelvTxZjueI9XYRMfyCElb+/2/mF/9PqjreHKx\nDwCUGPdoAYASI8QBoMQIcQAoMUIcAEqMEAeAEiPEAaDECHEAKDFCHABK7P8BvGYG6KXBgGwAAAAA\nSUVORK5CYII=\n", 100 | "text/plain": [ 101 | "" 102 | ] 103 | }, 104 | "metadata": {}, 105 | "output_type": "display_data" 106 | } 107 | ], 108 | "source": [ 109 | "# Plot the generative distribution\n", 110 | "f = plt.figure(figsize=(6,4))\n", 111 | "ax = f.add_subplot(111)\n", 112 | "model.plot(axis=ax)\n", 113 | "f.show()" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 5, 119 | "metadata": { 120 | "collapsed": false 121 | }, 122 | "outputs": [ 123 | { 124 | "data": { 125 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAEACAYAAABF+UbAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGP1JREFUeJzt3XuQHWWZx/HfM5lJMgnJhHBJQmK4CQmscolVwIolBwQJ\nrCLeQSwXUVYs5OIKQolK4p0tqxYWpErdsKUSXBVBcBEkGA4R0Yi5EUIkQIgs2STEQCaTnLkl8+4f\n7xkyGebSfU736dPvfD9Vp6ZnpqfPk57Jb955+u33mHNOAIB8asi6AABA5QhxAMgxQhwAcowQB4Ac\nI8QBIMcIcQDIscY4O5vZBkmtknokdTvnTkqjKABANLFCXD68C86519IoBgAQT9x2ilXwNQCAlMQN\nZCdpkZk9aWaXplEQACC6uO2UU51zm8zsIPkwX+ucezyNwgAAw4sV4s65TeW3W83sXkknSXo9xM2M\nhVgAoALOOavk6yK3U8xsnJntV94eL+ndkp4eoJC6f9x4442Z15CHOjduHPrzy5btrbP/vsN97caN\ne/fp+3agj/V9vt73+273/dhDD/n3b7vNby9c6PTFL/rtiy66UQsX+o8tW+b0jW/4jy9btvdre/9N\nvcdbuHDf5+n9/EA1D1X3cOe279cNdD7jfm9C/9kMrc5qxBmJT5F0b3m03ShpoXPu4aqeHQBQlcgh\n7px7UdIJKdYCAIhpRE4XLBQKWZcQCXUm67jjClmXEEkezmceapTyU2c1CPE6Rp3JOv74QtYlRJKH\n85mHGqX81FmNERniABAKQhwAcowQB4AcI8QBIMcIcQDIMUIcAHKMEAeAHCPEASDHCHGMCCtXSq2t\nWVcBJI8Qx4hw663SkiVZVwEkjxBH8HbtkjZulJ5+w8LJQP4R4gjeyy9LhxwirVmTdSVA8ghxBG/T\nJumMM6TOTum117KuBkgWIY7g7dwpHXywf2zfnnU1QLIIcQSvVJJaWqSJE6UdO7KuBkgWIY7g9YZ4\nSwshjvAQ4ggeIY6QEeIIXqkkTZrkH21tWVcDJIsQR9A6O6WeHmnsWD8S565NhIYQR9BaW6Xx4yUz\n2ikIEyGOoLW1SePG+e2WFtopCA8hjqDt2LFviDMSR2gIcQStrU1qbvbbhDhCRIgjaKWSv6gp+d54\n74VOIBSEOILW3i6NHu23R43y252d2dYEJIkQR9Da26UxY/a+39zsPwaEghBH0PqOxCVCHOEhxBG0\n/iPxsWMJcYSFEEfQBhqJl0rZ1QMkjRBH0Do63hjiXNhESAhxBK29fe8UQ4mROMJDiCNo7e1SU9Pe\n98eN86NzIBSEOILGhU2EjhBH0Do6mCeOsMUKcTNrMLPlZnZ/WgUBSdmzR+ru3redMnYs7RSEJe5I\n/CpJz6RRCJC0vuum9Bo3jgubCEvkEDezGZLOlfSf6ZUDJKdU2ruCYS964ghNnJH4v0u6VpJLqRYg\nUQOFeHMz7RSEpTHKTmb2T5K2OOdWmllBkg2277x5817fLhQKKhQK1VUIVGiwEKedgqwVi0UVi8VE\njhUpxCWdKuk8MztXUrOkCWb2Y+fcJ/rv2DfEgSz1v1tT8jNVurqyqQfo1X+AO3/+/IqPFamd4pz7\nknNupnPuCEkXSFo8UIAD9aSzc9/phZJ/n3YKQsI8cQRroBAfPZqROMIStZ3yOufcY5IeS6EWIFH9\nb/SR/OwUQhwhYSSOYA00Em9q8jcA8TqbCAUhjmANNBJvaPBBznK0CAUhjmANNDtF8h/j4iZCQYgj\nWAONxCUf4ty1iVAQ4gjWQD1xiUWwEBZCHMEaLMRppyAkhDiCNVg7hUWwEBJCHMHq7OTCJsJHiCNY\ng43Ex4xhJI5wEOII1lAhzkgcoSDEESymGGIkIMQRrMF64ozEERJCHMEabIohPXGEhBBHsDo73/hC\nyRIjcYSFEEeQenr8krNNTW/8HCNxhIQQR5C6u30/vGGAn/AxY1jFEOEgxBGkwWamSMxOQVgIcQSp\nu3vwEKedgpAQ4ghSd/fAFzUlbrtHWAhxBGmw6YUSI3GEhRBHkLq6Bh+JM8UQISHEEaThQpyROEJB\niCNIXV2Dt1N4ZR+EhBBHkAa7W1NiiiHCQogjSMNNMWQkjlAQ4gjSUCPxxkb/dvfu2tUDpIUQR5CG\n6olLUnOzH60DeUeII0hDzU6R/OcIcYSAEEeQooR4V1ft6gHSQogjSMOFeHMzIY4wEOII0nA9cdop\nCAUhjiBFubDJSBwhIMQRpKEWwJIYiSMchDiCNNRStBJTDBEOQhxBGupmH4l2CsIROcTNbIyZLTWz\nFWa22sxuTLMwoBrME8dIETnEnXOdkk53zp0o6QRJ55jZSalVBlSB2SkYKWK1U5xzpfLmGEmNklzi\nFQEJiDIS5xXvEYJYIW5mDWa2QtJmSYucc0+mUxZQnSg3+zASRwjijsR7yu2UGZJONrNj0ykLqFzv\nBcve1QoHQjsFoRjix3xwzrkdZvaopLmSnun7uXnz5r2+XSgUVCgUqigPiK+93b/ww1BYOwVZKhaL\nKhaLiRwrcoib2YGSup1zrWbWLOksSd/pv1/fEAey0N4+9EVNiXYKstV/gDt//vyKjxVnJD5N0o/M\nrEG+DfMz59xvKn5mICWlktTUNPQ+jMQRisgh7pxbLWlOirUAiYjaTuGVfRAC7thEcKK2UxiJIwSE\nOILT3h6tnUJPHCEgxBGcUmn4kTg9cYSCEEdwoozEaacgFIQ4ghOlJ86FTYSCEEdwosxOGTPGh7hj\n9R/kHCGO4EQJ8VGjpIYGFsFC/hHiCE6Unrjk9yHEkXeEOIITpScu+dE6IY68I8QRnCi33Ut+lUNC\nHHlHiCM4UXrikh+tE+LIO0IcwYka4o2NUkdH+vUAaSLEEZxSKVqIjx5NiCP/CHEEJ+pInNkpCAEh\njuAQ4hhJCHEEJ2o7pamJdgryjxBHcKKOxOmJIwSEOILDzT4YSQhxBCfOSJwQR94R4giKc9Hv2Gxq\n8oEP5BkhjqB0d/sVChsjvAQ4I3GEgBBHUDo7pXHjou3LFEOEgBBHUDo6/EuvRcEUQ4SAEEdQOjuj\nhzjtFISAEEdQaKdgpCHEEZQ47RRu9kEICHEEhZ44RhpCHEGJ0xOnnYIQEOIISpyeOBc2EQJCHEGJ\n204hxJF3hDiC0tkpjR0bbd+mJqmrS+rpSbcmIE2EOILS0RG9nWLmWypdXenWBKSJEEdQ4rRTJL9k\nLYtgIc8IcQQlzuwUyYc40wyRZ4Q4ghKnnSL5/nmplF49QNoIcQSlvV3ab7/o+zc3005BvkUOcTOb\nYWaLzWyNma02syvTLAyoRHu7NH589P2bm6Vdu9KrB0hbhKXzX7db0r8651aa2X6SlpnZw865v6ZU\nGxBbe7tvp0QdXTMSR95FHok75zY751aWt3dKWitpelqFAZXo6Ig3Eh87lhBHvlXUEzezwySdIGlp\nksUA1aqkncKFTeRZ7BAvt1LulnRVeUQO1I1KLmzSE0eexemJy8wa5QP8J865+wbaZ968ea9vFwoF\nFQqFKsoD4ok7xTBO/xxISrFYVLFYTORYsUJc0h2SnnHO3TLYDn1DHKi1Uil+T5x2Cmqt/wB3/vz5\nFR8rzhTDUyVdJOkMM1thZsvNbG7FzwwkrLtbcs6vhxIVPXHkXeSRuHPuD5JGpVgLUJVSybdHzKJ/\nDT1x5B13bCIY7e3x1k2RmGKI/CPEEYxSKfpa4r3oiSPvCHEEo1SKPxJndgryjhBHMHbtih/i9MSR\nd4Q4glFJT5y1U5B3hDiC0d5OTxwjDyGOYFTTTnEunZqAtBHiCMauXfHu1pT8jUENDf5l3YA8IsQR\njLa2+CEu+QWzdrKUG3KKEEcwKhmJSz7EmaGCvCLEEYydO+OtYNiLkTjyjBBHMKoJ8ba25OsBaoEQ\nRzB27qysnTJ+PO0U5BchjmDs3BnvVX16TZhAOwX5RYgjGJW2U8aPJ8SRX4Q4grFrFxc2MfIQ4giC\nc/7iZKUhTk8ceUWIIwjt7dKoUfFemq0XI3HkGSGOILS2VnZRU6InjnwjxBGEakJ8wgTmiSO/CHEE\nYceO6kbiLEeLvCLEEYTt2ysP8YkT/S8BII8IcQRh2zZp0qTKvnbiRN+OAfKIEEcQtm2TWloq+9qW\nFj8S7+lJtiagFghxBKGaEG9s5AWTkV+EOILw6quVh7jkWzHMUEEeEeIIQjUjcYmLm8gvQhxBqDbE\nW1oYiSOfCHEEIYkQZySOPCLEEYRXX618iqFETxz5RYgjCNu2+b52pRiJI68IceTe7t1+FD1hQuXH\noCeOvCLEkXutrT6ER42q/BiTJnHXJvKJEEfuvfKKNHVqdceYPFl67bVk6gFqiRBH7r3yijRtWnXH\nOOggf3EUyBtCHLm3ZUv1IX7ggf7iKJA3kUPczBaY2RYzeyrNgoC4kminTJjgL5B2dCRTE1ArcUbi\n/yXp7LQKASqVxEjcTNp/f2nr1mRqAmolcog75x6XxKUf1J0keuKSv7hJSwV5Q08cuZfESFzyIc5I\nHHnTmPQB582b9/p2oVBQoVBI+imAffSOxKtdD/yAA6S//z2ZmoChFItFFYvFRI6VaogDaevp8SPx\n6dOldeuqO9aBB/pfCEDa+g9w58+fX/Gx4rZTrPwA6sLmzf7V6it9keS+pk6VXn65+uMAtRRniuFd\nkp6QdLSZvWRmn0yvLCCaF16QDj00mWMdcgghjvyJ3E5xzn0szUKASqxfn1yI947EnfNTDoE8YHYK\ncu2FF6TDDkvmWBMnSg0NTDNEvhDiyLX166WZM5M73owZ/phAXhDiyLV166TDD0/ueDNnSs8+m9zx\ngLQR4sit7m5pzRrpmGOSO+asWdLKlckdD0gbIY7cev553/5IYnphr9mzpeXLkzsekDZCHLm1erU0\nZ06yx5w1S1qxwt9EBOQBIY7cWrUq+RDff38/S+WFF5I9LpAWQhy59fvfS6efnvxxTz9dWrQo+eMC\naSDEkUsbN0rbtyc/Epekc86RHnww+eMCaSDEkUtLlkiFgr85J2lnneWPXyolf2wgaYQ4csc56e67\npY9+NJ3jH3CA9K53Sb/5TTrHB5JEiCN3HnhAGjVKevvb03uOK66QFi70c9GBekaII1d27ZKuukq6\n+up0F6kqFPwc9AceSO85gCQQ4siN7m7p8sv97JE0R+GS/wXxpS9Jv/2t9NBD6T4XUA1CHLmwaZP0\nuc/5cL399to857Rp0jXXSNdfL918s+/FA/WGEEfd+9WvpBNP9I8f/lAaPbp2z33kkdKvfy3dcYd0\n2WV+WiNQTxJ/jU0gKTt3StdeK/3xj9I990hjx0qNGfzEvulN0tKl0pVXShdcIN15p/SWt9S+DmAg\njMRRl5Yv9yPvri6/qmDaPfDhNDdL8+dL8+ZJn/qU9OUvS+3t2dYESIQ46szu3T4oL75Y+va3pVtu\n8WuZ1ItTTvFrtmzbJr3tbX4RLiBLhDjqxvr10vnnS088IT38sPShD2Vd0cAmT/YXV2+4QbrwQunW\nW6U9e7KuCiMVIY7MOecvWJ53nvT+9/spfVOnZl3V0Mykiy7yUxAfe8zPK3/xxayrwkhEiCNTW7dK\nl1ziR7b33OP7zWmsh5KW6dOln//c/wVx0knSz37GVETUVo7+uyA0v/uddPzxfhrf0qXS0UdnXVFl\nGhqkL3zB/3t+8APpgx+UXn0166owUhDiqLlSyfeTr79e+ulP/UyPWs79Tstxx/nb9I84QjrzTJaz\nRW0Q4qip5cv9rI7t26VHHpFOOy3ripI1dqz03e/6i52XXeaXCejoyLoqhIwQR03s3u2nDM6dK33l\nK9L3vie1tGRdVXpOPdVPRWxtlT72Mekvf8m6IoSKEEfqVq2STj5ZWrxYevJJH2ojwaRJ/u7Oz3xG\nOvdc30JiVI6kEeJITWen9NWv+v7w5Zf7ud+HHpp1VbV39tn+F9m6df5C7mOPZV0RQkKIIxV/+IN/\n/ctVq/zjkkvSXf+73k2bJv3iF9JNN/n55ddeK732WtZVIQSEOBL10kvSRz4iff7z0te/7lcgPOSQ\nrKuqH+efL61ZIzU1SbNn+/nxu3dnXRXyjBBHIlpb/VTBOXOkY4/1LYMPfGBkj74H09Iifetb0qJF\n/rVCTzxRKha5SQiVIcRRlVLJtwje8Q7/wg3Ll/sFrJqbs66s/h13nL9B6Gtf89cO3vlO34YC4iDE\nUZGtW6Xvf186/HA/fe7ee6UFC6SZM7OuLF/M/Hoxjz7q55Vfd51fh+WRR6SenqyrQx4Q4ohlxQrp\ns5+VZs2SXnlFWrLEX7B785uzrizfRo3yFzyLRenSS/0NQ7Nn+/n0O3ZkXR3qGSGOYW3d6sNkzhw/\napw6VVq71t+0M2tW1tWFpbHRh/mDD/qXhHv0Uf/Xzcc/7qdosuQt+osV4mY218z+ambrzOy6tIpC\n9jZs8O2RQkE66ijp8cd973v9eunGG6UpU7KuMGxm/jrD3XdLzz/vb5a64QY/0+fTn/ZL4JZKWVeJ\nehA5xM2sQdJtks6W9A+SLjSz2WkVlqZisZh1CZHUqs6eHum556S77vJ/yh9xhA+N1av96nybN/uF\nqs46a+BlYvNyPletKmZdQiT9z+eBB0pXXOHvdn3iCf/6ngsW+F+khYL/pbp4sdTWll2N9SovdVYj\nzkj8JEnPOef+5pzrlvTfkt6XTlnpyss3Nuk6nfMzSJYs8SFwzTXS6af7V6o580zpl7+U3vpW/+ru\nmzdLN98svfe9flGnWtaZlqeeKmZdQiRDnc8jj5SuvtqvYb5xo18JsqvLT++cOtX/1fThD0vf/Kaf\no796tX/B6VrWWE/yUmc14rx2+HRJ/9vn/Zflgx0Z2LPHr8PR1uYvfLW2+re929u2+cDu+3j5ZT/1\n76ij/GPWLB8Cc+ZIBx2U9b8IcU2c6BcUmzvXv797t/+LasUK/1iwwLe/XnzR73vYYX70PmWKD/wp\nU/z3fcIE//kJE/bdDmF54JEgTohH8p73+Ld9b1yIsl3J11T69Rs2+AtGWT1/lK/v6fGhe+edfg2S\nri7/6N3es8ePkCdO9DePTJy47/bkyf5W7+OP92+nTZNmzAh75cCRrrFROuYY/+i7yFhPj//LasMG\nacuWvY81a/xF67a2vY8dO/a+7enxQT56tL/DtO/29u3Sfff55zTzj4aGfd8Ott33Y72PtKxb59tQ\nQ8n6hrTp06v7enMRbxMzs1MkzXPOzS2/f70k55y7qc8+3HMGABVwzlX06yROiI+S9Kykd0naJOnP\nki50zq2t5IkBANWL3E5xzu0xs89Jelj+gugCAhwAshV5JA4AqD9V3bFpZv9mZmvNbKWZ/dLMJg6y\nX6Y3CZnZh8zsaTPbY2Zzhthvg5mtMrMVZvbnWtZYfv6odWZ9Pvc3s4fN7Fkz+62ZDXi5NIvzGeXc\nmNl/mNlz5Z/bE2pR1wA1DFmnmZ1mZtvNbHn58eWM6lxgZlvM7Kkh9sn0fA5XYx2dyxlmttjM1pjZ\najO7cpD94p1P51zFD0lnSmoob39H0rcH2KdB0vOSDpXUJGmlpNnVPG8Fdc6SdJSkxZLmDLHfekn7\n17K2uHXWyfm8SdIXy9vXSfpOPZzPKOdG0jmSHihvnyzpTxl8n6PUeZqk+7P4OexXxzsknSDpqUE+\nXw/nc7ga6+VcTpV0Qnl7P/lrjFX/fFY1EnfOPeKc611r7U+SZgywW+Y3CTnnnnXOPSdpuKu/pgzX\nk4lYZ+bns/x8Pypv/0jS+YPsV+vzGeXcvE/SjyXJObdUUouZ1XoRgajfw8xXY3fOPS5pqNcgyvx8\nRqhRqo9zudk5t7K8vVPSWvn7b/qKfT6T/A92iaQHB/j4QDcJVTkzMjVO0iIze9LMLs26mEHUw/k8\n2Dm3RfI/mJIOHmS/Wp/PKOem/z4bB9gnbVG/h/9Y/pP6ATM7tjalxVYP5zOKujqXZnaY/F8PS/t9\nKvb5HHZ2ipktktT3N4HJ/+e8wTn36/I+N0jqds7dNdzx0hKlzghOdc5tMrOD5MNnbfm3fL3Vmboh\n6hyonzjY1fHUz2fAlkma6Zwrmdk5kn4l6eiMa8qrujqXZrafpLslXVUekVdl2BB3zp01TEEXSzpX\n0hmD7LJRUt+XCphR/liihqsz4jE2ld9uNbN75f/sTTR0Eqgz8/NZvog0xTm3xcymSnplkGOkfj77\niXJuNkp60zD7pG3YOvv+53bOPWhmt5vZZOfcqzWqMap6OJ9DqqdzaWaN8gH+E+fcfQPsEvt8Vjs7\nZa6kayWd55zrHGS3JyW92cwONbPRki6QdH81z1ulAXtjZjau/BtSZjZe0rslPV3LwvqXNMjH6+F8\n3i/p4vL2P0t6ww9jRuczyrm5X9InynWdIml7b2uohoats28f1MxOkp8OnFWAmwb/eayH8ykNUWOd\nncs7JD3jnLtlkM/HP59VXm19TtLfJC0vP24vf3yapP/ps99c+Suxz0m6PoOrwufL95na5e82fbB/\nnZIOl58lsELS6nqts07O52RJj5RreFjSpHo5nwOdG0mfkfQvffa5TX52yCoNMVspyzolXS7/S2+F\npCcknZxRnXdJ+j9JnZJekvTJejufw9VYR+fyVEl7+vy/WF7+OajqfHKzDwDkGC/PBgA5RogDQI4R\n4gCQY4Q4AOQYIQ4AOUaIA0COEeIAkGOEOADk2P8DgT6Izx39xYQAAAAASUVORK5CYII=\n", 126 | "text/plain": [ 127 | "" 128 | ] 129 | }, 130 | "metadata": {}, 131 | "output_type": "display_data" 132 | } 133 | ], 134 | "source": [ 135 | "# Draw some samples with which to do inference.\n", 136 | "data = model.sample(size=100)\n", 137 | "# Plot with samples\n", 138 | "f = plt.figure(figsize=(6, 4))\n", 139 | "ax = f.add_subplot(111)\n", 140 | "model.plot(axis=ax)\n", 141 | "for x in data:\n", 142 | " ax.axvline(x, alpha=0.1)\n", 143 | "f.show()" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": 39, 149 | "metadata": { 150 | "collapsed": false 151 | }, 152 | "outputs": [], 153 | "source": [ 154 | "# Create the Dirichlet Process Mixture Model.\n", 155 | "# Use the Inverse Gamma prior for a Gaussian with known mean and unknown variance.\n", 156 | "mu = 0.0 # Known mean\n", 157 | "alpha = 1.0 # How well we know beta\n", 158 | "beta = 100.0 # 1/typical-variance\n", 159 | "dp_alpha = 0.1 # Dirichlet Process clustering parameter. Set lower to infer fewer components.\n", 160 | "cp = dpmm.InvGamma(alpha, beta, mu)\n", 161 | "dp = dpmm.DPMM(cp, dp_alpha, data)" 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": 40, 167 | "metadata": { 168 | "collapsed": false 169 | }, 170 | "outputs": [], 171 | "source": [ 172 | "# Burn in\n", 173 | "dp.update(100)" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 41, 179 | "metadata": { 180 | "collapsed": true 181 | }, 182 | "outputs": [], 183 | "source": [ 184 | "phis = []\n", 185 | "nphis = []\n", 186 | "# Sample\n", 187 | "for i in xrange(50):\n", 188 | " dp.update(10)\n", 189 | " phis.append(list(dp.phi)) # Need list() to get a copy\n", 190 | " nphis.append(list(dp.nphi))" 191 | ] 192 | }, 193 | { 194 | "cell_type": "code", 195 | "execution_count": 42, 196 | "metadata": { 197 | "collapsed": true 198 | }, 199 | "outputs": [], 200 | "source": [ 201 | "def plot_sample(phi, nphi, axis=None, **kwargs):\n", 202 | " x = np.arange(-1, 1, 0.01)\n", 203 | " y = np.zeros_like(x)\n", 204 | " for ph, n in zip(phi, nphi):\n", 205 | " y += n*np.exp(-0.5*x**2/ph)/np.sqrt(2*np.pi*ph)/sum(nphi)\n", 206 | " if axis is None:\n", 207 | " axis = plt.gca()\n", 208 | " axis.plot(x, y, **kwargs)" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 43, 214 | "metadata": { 215 | "collapsed": false 216 | }, 217 | "outputs": [ 218 | { 219 | "data": { 220 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAEACAYAAABF+UbAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XmQXel53/fve/Zz7rlrb+jGDgwwwyE5HIoySYmSBdmW\nLZuWXbajVORFtkpRYjtxnFJVlIpSJc1UUqmkYpeUKmepkmXLcjmOLCUqW5EsiY4IWgs5Q86QgxnO\nDIDB2ujl9t23c89+8kc3wJnhDIkG0LjdPc+nqgsX3eee87yNxg8v3vOe91VFUSCEEOJg0mZdgBBC\niAcnIS6EEAeYhLgQQhxgEuJCCHGASYgLIcQBJiEuhBAHmLGbg5VSN4EBkANJURSf3IuihBBC3J9d\nhTjb4X2hKIreXhQjhBBid3Y7nKIe4D1CCCH2yG4DuQA+p5T6slLqJ/aiICGEEPdvt8MpnymKYkMp\ntcB2mL9RFMUf7EVhQgghvr1dhXhRFBs7v7aUUr8OfBK4F+JKKVmIRQghHkBRFOpB3nffwylKKU8p\n5e+8LgF/GnjtPQr5wH6srT38e99+jp/92Z+9r3M+zHXf7/rvd86XXrq/Gt7r92//3Luv817nfb9z\nvPTSO8/30ksFv/3b27/e/VhbK/gX/6LgH/2jbxz7kz/5s/c+//b3vf2cd39/99e71337Nd+v1rd/\n3D3+/b7H3+pr3+rPdTefe5Q/O+8+7id/8mf35Ofvg/rxMHbTE18Cfn2nt20A/6Ioit99qKsLIYR4\nKPcd4kVR3ACe3cNahBBC7JJMF9zHLly4MOsSDpXv+q4Lsy7h0JDv5f4hIb6PSYg/Wt/93RdmXcKh\nId/L/UNCXAghDjAJcSGEOMAkxIUQ4gCTEBdCiANMQlwIIQ4wCXEhhDjAJMSFEOIAkxAXQogDTEJc\nfCB86UvQk/2oxCEkIS4OvdEI/tbfgp/7uVlXIsSjJyEuDr1f/mV49ln4tV+DbnfW1QjxaEmIi0Mt\niuAXfgF+5mfgs5+FX/zFWVckxKMlIS4OtVdegY98BJ5+Gv7u34V/9s/gIdfgF2JfkRAXh9rqKnzX\nd22/Pn0aLAs6ndnWJMSjJCEuDrXNTTh37hu/P38e1tZmV48Qj5qEuDjUNja2g/uuc+fgzp3Z1SPE\noyYhLg6tMITxGI4f/8bnzp+XEBeHi4S4OLTW12FxEXT9G5+T4RRx2EiIi0NrbQ2OHHnn586d2/68\nzFARh4WEuDi01tZgefmdn2s0wDS3x8qFOAwkxMWhdefON4c4wNGj8Prrj78eIfaChLg4tN5rOAXg\n2DEJcXF4SIiLQykMt9dJWVj45q+trEiIi8NDQlwcShsbUKu9c2bKXQsLcOvW469JiL0gIS4Opc3N\n7ZuY76XRkGmG4vCQEBeH0sYG1Ovv/bV6XR74EYeHhLg4lL5VT7xSgclke9xciINOQlwcShsb7x/i\nSm1PM2y1Hm9NQuwFCXFxKH2rnjhsh/jW1uOrR4i9IiEuDqVvNSYOEuLi8JAQF4eS9MTFB4WEuDh0\nsmx7vLtWe/9jjh2TEBeHg4S4OHR6ve0AN833P0Z64uKwkBAXh06z+d5rprydhLg4LHYV4kopTSn1\nslLq3+xVQUI8rK2t91698O0kxMVhsdue+N8HZOkgsa/dT4ivrGzvep9lj6cmIfbKfYe4UuoY8OeA\nf7x35Qjx8La2vv1wimVtP7k5GDyemoTYK7vpif8c8F8BsrGV2NdarW8O8WZznclk8o7PLS5Cu/0Y\nCxNiDxj3c5BS6rNAsyiKrymlLgDq/Y597rnn7r2+cOECFy5ceLgKhdiluz3xu/todrsdOp02/X6X\ns2ef4u6PvYS4mJWLFy9y8eLFR3Ku+wpx4DPAX1BK/TnABcpKqV8uiuJH333g20NciFm4uxnE1hYk\nSUSzuc6pU0/Q7bZYX78DnAK2Hwbq92daqviAencH9/nnn3/gc93XcEpRFD9dFMWJoijOAP8R8Hvv\nFeBC7AfdLszPb7+eTNpUKlWKImdh4Qij0YAsS4Htx/IlxMVBJ/PExaGS59s3K+8+ch9FYyzLYTwe\nMR4P8bwS0+kI2D5GbmyKg27XIV4UxReKovgLe1GMEA9rOIRyGQwD8jwnSULyfHseYRzHKKUIw2+E\neK83y2qFeHj3OyYuxIHQ7X5j9cIomlAUOWmaMh4P0HUTXTfu9cRlOEUcBhLi4mD7jd+AtyowsKAa\n032rSl07Cb/7ZcIXM1R7TOc3/4iVxgL9Zz/GdDohyzKSJKHRMGU4RRx4MiYuDpXu2KThJwCEcUCU\nhni2Q8lxWVk5zmjUpygUk8lEZqeIQ0FCXBwqvZFFo5yQ5RlRFpHnCZ5lM5pOmExGuG6JLEsYjUbU\nattj6Hk+66qFeHAS4uJQ6U5M6qWYJE3Jsow8TwHFXLmK55UoCkjTmDAMMQwolaQ3Lg42CXFxqHRH\nJo1yQhjHZFlKQUHdL6NrOqWST61WJwyHTKdTiqKgWpWnNsXBJiEuDpXuznBKlMbESYih29imde/r\nlUqNooDpdEqaptTrEuLiYJMQF4dKb2LQ8BOiJCFMJvh2DU37xo95uVzZmWY4JUliajUJcXGwSYiL\nQ6U3tqn7MaNgQl7k+O47t7w3DAPPqzIej4njKbXa9rriQhxUEuLiUOmODKqlkGk8RaHhmN43HVMu\nzxMEAVEUypi4OPAkxMWhkWSKaWzgmiGTKMK2bAzDIklT1jpbdLsdiqKgVKqTJAlBMJHhFHHgyROb\n4tDojS1qfkySJYRxiO8cJc0S1jpbREnMuLVJmibYdgPDMAiCEdVqwfXr77s8vhD7noS4ODR6o+2b\nmkmaEKcpjukyDDqMphPCJMGcBiRJTJqWcV2X4TCgUklpt81Zly7EA5PhFHFodMcWdT8hiEPyLMc2\nPfqTJv3JiDAOGQ77dDptxuM21WqVIJhQLqdyY1McaBLi4tDYXjclJogCCgV5njGYtEjSlOX6HJVK\njV6vQ6+3hud5RNEEz4tkTFwcaBLi4tDoTUyqXsI4mGAbJsNwQJxOqbg+o3CKYRhYls1gsIVpmqRp\njGFMmUwgSWZdvRAPRkJcHBq9sUm1NCVMQ2zTYhT0iNOYQuVUXB9N09F1nel0yGQyoSggikbU67LD\njzi4JMTFodEfm9RKEUE4JcsKxuEQQzcwNAPHsrBtB8uyiKKIwWCArmtMp2PZ4UccaBLi4tDoT0xK\nTkCcxRRkBNEI07Cpej5FUVCp1HAcF6Wg0+mgadvTDBuNQkJcHFgS4uLQ6E8MPHNEnuUEcUSaRZi6\nQ5wm9EZD1tdXsW0bz6vQ7XZJ04Q4nlKvZ7IcrTiwZJ64ODT6gYVlDRlOM4aTMYZWggI2ei0Aik6L\nLEtRSieKIqJoSp7bOyEufxXEwSQ9cXFo9McGpjYgzVKCOMQ0HIbTLnkBi7V5FhaW6HbbFEVKmqak\naUwQjKjXUxlOEQeWhLg4FPICBhMTTY1J0oQkzVEFxOmI+UqdosgpipxGY57BoIXneUwmAUkSUioF\nMpwiDiwJcXEojEMDz46I02AnxDMG0x6eVUcpKDsenleiXK4RRWMA4jggTVNcdyghLg4sCXFxKAwD\nY3t6YTRlEgVoQBiP8O0qaZYyiSOSJCEMJ5TLC/T7faIoIkkiHGcswyniwJIQF4fCIDColgJG0wlx\nElNoBYZhkxUZWZbRHw/o93tomo7nVen1emiaRhiOMc2BhLg4sCTExaEwDAx8e0QYBSR5yjRKsHSD\nJIvQNI2Pn3mKU6fOMp2OUUqhaRpZlhJFEzRtQK9XzLoJQjwQCXFxKAynJmVnwDRLKPKMURhgW2U0\nXef80ZO4tsPy8lFOnDhLmiZYlkWWZYRhSJ4P6PczCslxcQBJiItDYRAYWFaHJI5Is5QsTbA1i3pp\nEduwWG03abW2OH789M4+mx5hOKEoIsJwgGFkjMezboUQuychLg6F4cTENntMooA0y9ANHTRFyWnw\n1uYquqYxGvVpNteo15fxfZ8gGJMkOePxgGo1pNuddSuE2D0JcXEoDAMDQ+sSxhFREuEaFp5dIsti\njs0tstJY4MyZ8+i6gWF4VCoVIKMoUiaTCeXySDaHEAeShLg4FPoTA6W6hGlCkqXU/SqmYWMaNhXP\nv3fc0tIKmqbhOA6m6RJFU8JwQqnUk564OJAkxMWhMJxAUXTJ4pi0KPBdB1O3KbtzAERJTJZluK5L\npTKHYRiUyw3iOGI8DnDdjoS4OJBk1R9xKHTHkGV94iLDUArbcNC1GoZu0Ox32Op3cW9cYXFxBd+f\no1Qq4boOaVqQJAGG0ZHhFHEgSYiLQ2EwCcjzMVE8ZbG6gGkYlIw6m/1NrqzfwjIsBpvrdDpt0vQZ\n5ufnsawSul4QhhGa1paeuDiQ7ns4RSllK6VeUEp9VSn1qlLqZ/eyMCHuV5YromxEko9JswzbNKiV\ny+QUtAarNPwqpxdXOHXqHHGc0GpdY2lpCdf1UEonDCOKYotOJ591U4TYtfsO8aIoIuD7i6L4OPAs\n8GeVUp/cs8qEuE/DqYGlNojjCKXAsRwqrk931ASlsVyfZ75SY2FhiSNHlun1miRJQr0+j+N4RNGU\noujTakWzbooQu7arG5tFUQQ7L222h2LkGTcxc/2xietuEiUhKCg5PpahM5r2afiLVDwfTdOwbZul\npRUcp0yz2cR1PTyvRprGaNqEVkvGU8TBs6sQV0ppSqmvApvA54qi+PLelCXE/etPTAxtnSANMTSd\naqnMOAoxDZuy2yDNUm4019jcXMd1ParVRYIgQNdNXNcjzzXyfESr1Z51U4TYtd32xPOd4ZRjwKeU\nUk/vTVlC3L/uSMPUm+RJimt5uKZDFMeUbJ8sz7h08yqTcMr6+ipXr75OqVTHNE2yLMd1q0BOmo7p\n9yXExcHzQLNTiqIYKqU+D/wg8Prbv/bcc8/de33hwgUuXLjwEOUJ8e21Rxm66pHkCWWnRFbkGLqO\na5dZbd1huT7P0blFkg8/y2uvvUwQ9KhWq2TZGr5fxTAs4jggDDdI01m3RnwQXLx4kYsXLz6Sc913\niCul5oGkKIqBUsoFfgD4H9993NtDXIjHodkLKeiRU+B7PhRQclyiKAClmCtX6Y6HGNOAEyfO8qUv\nvY7rzgMaum5SKpUYj7tY1gaj0axbIz4I3t3Bff755x/4XLsZTlkGPq+U+hrwAvA7RVH81gNfWYhH\npNkbohihoWGbNppu4JgW42hA2W0wCgOyLCNJYrIsxXXLDAYDfL9GmkaUy3WSJMay2vR62aybI8Su\n3HdPvCiKV4Hv2MNahHggrVGPQk3QDQPbMPEMi2kUoasySRozmIwxDJ180CNJUkzTJgz72LaPpulU\nKj5ZlmKabdrtECjNuklC3Dd5YlMceP3xgIIJjumApjBNkyRLyYqMoshYaSxg6DralbfoDAckr2RE\n46skvQWcO1tk03XUeIBKtmh96Svw49836yYJcd8kxMWB1x1t4poxnl1BVya6YRAl2w/ulL05RtNN\nhtMxhqZT9yt4jkGa58RpgmXYpKaJoXR0xqx3ZK64OFhkFUNxoKVpShC20PQcx7KxTQMy6E9GVL05\nonhClCacmD/CXLnK1qCLpdt4tk0YhxiGiaZpWKaDUgPao/6smyTErkiIiwNtEoZEeR9NgVEYmJrJ\nNJnimg66bqBpimNzi7i2Q82vcKQ2T5InNHyf/mSAKsAyLBzLhjyiO5Zt78XBIiEuDrTWYAIMsHSw\nbBvDMBiHAb5bwjRtXKuCoev3ji97HqZhUvV9LMMkSSNsw8TVHTQjozfeoJAdk8UBIiEuDrQrG2Ng\ngK3b6IaBrhtMk4ilWgNd6dim947jdU3HNhyqnodnukyTENsy0HQNQ8sYBR3iOJ5NY4R4ABLi4kB7\na20ETNANE9swyLIEQ9dZrDdQSmHo5je9xzIcUIrluSPEaUQO2KaNaWgE0YAgmDz2dgjxoCTExYH2\nVrOPwfbu9qZmEEQBS5UFDE3H1K33fI+hmyRpykp9mSzP0HOFYzk4lkZSjOh0Wo+5FUI8OAlxcWAV\nRcGtzS0MlaEpE8Mwt2eiLBwnSuL3DXGlFLZpYpomNb/BNEvRNHAtgAnt9tZjbYcQD0NCXBxYk8mE\nrf4ITQddUyg0LN1krlLfWQRreyglz3OmcUScJvduWjqWRZzGrNSPEqURutJxTBMIuX17fYatEmJ3\n5GEfcWD1eh16kwmmCaZukeUp8+UGuq5j6gZKKYJoyK3WBnGaAIqS7ZBmFWzDIE5jFmvLtIYmhm6g\noQEFV67c4EMfmnXrhLg/EuLiwOp224yCAEPfnnWiiu2blUmaYBsmw6BHa9AiimOqfpkkTemM+jT7\nd0gyDV0zcCyHkuNhGlOUAii4dWuVNE2Rvx7iIJDhFHFgdbttJlGIpWkYmo5t2cxV5ojTmDCJ2Rqs\n49lVnjx2iuX6PCcWjnB+5RRJEnF9cxOlNBSwVG2Apm1PMwS63R6TyXDWzRPivkiIiwNrbe02UZph\nWxqmYeGYNmW3RJqlrHW2KDllFqrHUNtdbABc22Zl/hSj6ZR+MCRJE5ZrDSzDRmkGhp7T7weMRp0Z\ntkyI+ychLg6s9fVVklShyHANG9/z0dDpT/rkec5CZfk932cZDqeXlhiMeozCEXPVOpah45s2upYz\nHo/o9TYfc2uEeDAS4uJACsOQdnuTjALLAsvxqfs1gmjCYDLi5NLKez7oc9dCtYrv+nQGLVzLwTYc\nPNvG0BVJEtLpbDzG1gjx4CTExYHU63UZDoeAwjMNHMuk7tdY725Sdn2qnv8t36+UYqm+SJRFxHFE\nyXJxvRKmVjCd5jSbNx9LO4R4WBLi4kBqtTYYDidAhmdZWIaNbZgMgiFH51feMQ7+fnynRN2rsdZr\nUavU8AwLdIjjhHZ7nTzP974hQjwkCXFxIG1tbTIeT4GEsl/G0A0mUUDJLlFxy/d9nsXaMv3JGMew\ncRwL33JJ04TJpL/T0xdif5MQFwfSxsYdRqMQAw3fcjGVRRCFLM8t3Vcv/K6yW8GxLNIswbVdXNsm\nTXOiaEy73dzDFgjxaEiIiwOp2VxnOo2wdAPLtNF0DU0pql51V+dRSrFcnyfKEjSlKHsOkDKdTtja\nWtub4oV4hCTExYGTpilbW5tEUYxjb4+HUxRU3Aq2+d6LXn0rdb9KyXRJkhTftQGTKApZX5cQF/uf\nhLg4cHq9FpPJhDhO8WwXpYNlWlRKPob+zY/K5zubIqdZ9p679riWRcWvYmhgKR2wmE5DNjYkxMX+\nJ4tDiANna2ud6TQgywqqdYckjai4dVzLfcdxSRozDse0hj0MTacAsjwjiGyyLLt3nKEb+HYJx3Ew\nTROwCMOQzc21ndC//zF2IR43CXFx4LTbawwGA/LcxDM10rRgsXYE09h+uKcoCgbBmCAGy6iyVJu7\n994s3w7v1nBInJYAC0PXt9cW9ypoWhvTdEmSMcNhb2eGyu7G2YV4nGQ4RRw4m5u3ieMpaWrguQWe\n7eC7JQzdoCgK+pMBeZ5TdmpAQW88pD3s0x0NmIQhluHQ8H1G0zFhEqKUwtQNlhsLJGlMqeSRZQVB\nMGV1dXXWzRXiW5IQFwfO5uYqeR6TpgaOntEo19E1HUPTGQRDDF3Hsx1G0x5xGuFYNlXPx3c9dE1j\nEg6YRBFlp8w0CgiTGEPXWarNoTQT2zbIc5PxeMjamoyLi/1NQlwcKEmSsLW1RlHoZJmO5xQcqS+S\n5TlhEqFQGJrBIBjj2WV8p4Zr2ZiGgWWYlByXitdAU4pxOMazPYbBGAoouy4l28OyNIpCIwwD7ty5\nM+smC/EtSYiLA2Vrq0kQDNE0DdCplQzmynPkRU6cxhiazjQOmStXMY3332OzWirhWg5BFFCyHYIk\npCgK5ssLuK5OlmkURc6NGzcebwOF2CUJcXGgbG6uEYYhaZoBJr5r4rklojjE0i3CNKJaqqBr+rc9\nl2u7OJbDNI6xdYvhdELNb+D7NlmmoWkGa2trO7v8CLE/SYiLA2Vt7RZpmpCmCjCpeh5FnuNYDlEa\nUfHuL8Dvci0PyzBRSpFnOWXHx/c94jjDMExarRb9fm/vGiTEQ5IQFwfKtWuXUQomkxDHKVErlUiy\nhKKAku1hvsfDPt9O2fXIixzLtDAMg1rNI8ss0jQnDEM2NmRcXOxfEuLiwCiKgjt3bqLrBlGUUS7X\ncQxje39N08a13W9/kveglKLs+igFlunieTrgkqYxWZZx8+Zbj7YhQjxCEuLiwJhMJjSbm5imTZ5D\nqVTCdV2U0ig533oTiG/H0A3KbgkFNBou4DOdTrEsi2vXJMTF/iUhLg6MtbVVwjAgSTKKwsLzCsqO\nQ9Ur72oc/P3USmVQBbVaFagznU4xTZO1tZvvueaKEPuBhLg4MK5fv4Kua8TxmDwvUSpBvVSi7D5c\nL/wuTdOouFVKJQ+lPIpCwzAMOp0Wo5FsECH2p/sOcaXUMaXU7ymlvq6UelUp9V/sZWFCvNuNG1cx\nDIfJZEyWVahWXRZqtfdcufBBebZPuVylXLbIc5MoiphOJ7Ra64/sGkI8SrvpiafATxZF8WHgu4D/\nTCn11N6UJcQ75XnOrVvXcBybKArIsjpzc5WdHeofXYibhsn8/BKOU2AYHpPJhKLIWV+XcXGxP913\niBdFsVkUxdd2Xo+BN4Cje1WYEG83GAzo99vkeUaeF4Shz/JyA8s0d7Ud27ejawaVSgXf94Aq/X4f\nwzC4devqI7uGEI/SA42JK6VOAc8CLzzKYoR4PxsbG0wmE5IkwTRN0rTMykoVz7Yf6XUM3cCyLMrl\nCkrNE8cxShlsbt4miqJHei0hHoVdh7hSygd+Dfj7Oz1yIfbcrVu3iOOYIAiw7RLgUa26OKb5SK+j\nKQ3P8ymXXfK8hq7r5HnKYNCVJzfFvrSrwUSllMF2gP/zoij+9Xsd89xzz917feHCBS5cuPAQ5QkB\nWZZx9epVlCqIohDDqFIqOfi+j20++n6E63o0Gj43bnjYtk0cx8CYVmuDpaUjj/x64oPn4sWLXLx4\n8ZGca7d3hP4J8HpRFP/L+x3w9hAX4lEIgoC1tTXyHOI4wrZPUalYWJaFaTz6zals26FeLzGZmLiu\nS5ZlTCZ9ms1NPvxh2a5NPLx3d3Cff/75Bz7XbqYYfgb4a8CfUEp9VSn1slLqBx/4ykLcp263S7PZ\nJMsydF1H1xtUKsbOuPWjD1THcanXHSYTG9/3KYqMMExYX5dxcbH/3Hc3piiKPwQe/rE4IXap1WrR\nbDYxTRPDMElTn0pFp1KpQ/vR77xj2zbVqkGS+Ph+meEwoihi1tfXCIIJ4DzyawrxoOSJTbGvbe/k\ns8VoNMK2bRzHJo416nWPUqm0J9fUNI1Syce2LQyjtPPkps76+m0Gg+6eXFOIByUhLva16XTKjRs3\niKIIXTcxTY/p1GZhwcNxvD27ru/XqVY1isJH0xSO49NuN+n1umRZtmfXFWK3JMTFvjYcDrl58yaa\npmHbFrZtkiQ+lYrCdR9s6dn7USo18H2dJPHQNPD9CqPRkK2tTeJYxsXF/iEhLvatoijY2tpibW0N\n3/dJkhjbtplMyhw54qPre3eLplQqU68bxLGDruu4rk+axty5c5MwDPbsukLsloS42LfiOKbdbtNq\ntahWq2RZgWW5TCYWKyvVPb22bXvUahZR5GKaFqCwLJs7d24zGHT29NpC7Majn2QrxCMSBBNu3bpF\nkiQ0Gg16vTv4/jJxDCdP1vfuwr/7uxg3fBrTNpu9lJLdJesllKKQ1le/TN8+TaJex/xLf2nvahDi\nPklPXOxbnc4WrVaLoihwHIeiKNC0Bo4DCwvze3ptpRSLdYtR4GPqBnmWUrZdepMJnWGPqcwXF/uE\nhLjYl7IsY2trk7W1NWzbpigKTNMiy8qUyzm+X97zGk4uePQmPrZlEKQxpZ0NlbvjHu3RaM+vL8T9\nkBAX+9JoNGIyGdHv9/E8j+l0imFYhKFDve5iWdae13B2yaI3VtTKVZIkpOx45AUMgiGb3a5s2Sb2\nBQlxsS+1Wi2GwwFBEOB52/PBTVMRBGXm5/d2ZspdZ5cthqGGb5dQ6KAUuqboTwaMp1PGY1nEU8ye\nhLjYd/I8Z2Njg+l0QpZlVCqVnZ64y3Bos7zc2JM1U97Nd2zqXkGW+li2RZzGlGyHSTBkGsd0OjJL\nRcyehLjYdwaDAVEU0W63AahWq4xGIyqVCv0+HD26hzNT3sYyTRZqOklWQcsKKMA2LaIsJYgims0m\neZ4/llqEeD8S4mLf2dzcBKDXa2HsLDVbFAVzc4v0ejknTzYeSx26pnOkbjMKKmimwTSN8GyHNEsY\n7QynyJCKmDUJcbGvRFFEr9cjSRLG4ymGYZCmKYZh4PsVej2d48f3ZuGr93JywaMfOJQsj8F4QtX3\nyQvFZreLYRg0m83HVosQ70VCXOwrw+GQOI7p9/vkeYxlWTvzwzVct8JkYrOysndrprzb6SM2vcCh\n4TdI8gQKha4UoygiTVMGg8HOzj9CzIaEuNg38jyn2WzieR6tVos4jqhUKgyHQ1zXJQi2d9xx3b2f\nXnjX2SVFf2ziez6GMlA5KBRJmt77B2c4HD62eoR4NwlxsW9MJhPCMCSOY3q9HqCoVqv0+/2dm5sG\nS0uP56bmXWePFPTHLr7lonRFTo5pasRxTHdnSKXdbssNTjEzEuJi3+h2u2iaRqfTIc9zsixF0zTy\nPKdWq9Hvqz1f+Ordzq3kdMY1XMvGMSxGYUDJqRBEEePxmKIoCIKA6VRWNhSzISEu9oU4jhgOh5im\nyWAwYDqdomnavacia7Uag4HNyor/WOtaqOVkWYUcG991GAYTXNMhzTImkwmDwQDTNOn1ZM64mA0J\ncbEvjEYDlFIEQUAcx4zHY1y3RBzH6LpOrVaj2zU4cWLv10x5O6VgqW4zDl0WyjXCJEFXiiTPUUrR\n7XbRdZ0gmMgNTjETEuJi5vI8ZzTqUSqVaLVahGFIGIaUSj7dbhfXdfF9n17P5OTJxzcz5a7lhs1w\n6uK7ProGoGNqGv1+nziOd8JbMRwOHnttQkiIi5kbj8fkeU4cx4xGI9I0JcsyXNdmPB4zPz+PYRh0\nOiVOn35GPLJgAAAgAElEQVR8M1PuOj5v0B+7lL0SCo0oC3Ech8Fg+38PW1tbeJ5Hv9+VG5zisZMQ\nFzPX6XRwXY9er4eu6wyHQ4qiQNdN4jim0Whgmg7NZpkzZ/Z+zZR3O7cMW0OXkuNScl2COMJ3HJIk\nufdw0vbQfUEQTB57feKDTUJczFQcxwRBgG27tNttHMeh0+ngOA5RtD07ZX5+nk5Ho16vsId7I7+v\ncysJneH2oluNUoXxZIKp1M4w0Ahd1xkMurhuSW5wisdOQlzM1GDQw/M8wnDKcDgkCAImkwmWZRFF\nIaVSiXK5zMaGzunTj3d64V1nl0Nawzlc26buV8hJ0A2Doihot9s7Dydt4boeQTAmSZKZ1Ck+mCTE\nxczkeU6/36VarbK1dYdKpUK73UYphW3bDIc95ubmdsbDPc6ccWZS58nFKVv9ORQG9UoVpSBOEizL\notfr7UwxbJOmCY6zPTYuxOMiIS5mZjweo5RC13X6/RalUolOp3NvrZTpNGBpaWlnoSmfs2fNmdTp\nmDlzVYfOyMGzHXzHZ5okmKZJnudsbm5SLpdZXb1Jrdag35ddf8TjIyEuZqbValGrzdHpdAjDENM0\n6XQ6957SVEoxPz+P4zisrbk88YQxs1pPzBf0JzV0pWhU5hgGAZq2/dfnzp07VCoN2u0mprn9D81k\nIntwisdDQlzMRBiGTKdTyuUKt27dYm5uiRs3buw8bp8RRRG+X6ZSqaCU4vbtKmfOzK7eU4spndEC\nmqbRKNdIswwApRT9fh/XdZlMxgwGPRqNeXq91uyKFR8oEuJiJrrdFtVqlSgKabfblMt11tfXcRyH\nPM+ZTqcsLR3ZmS/us7npc+LE7Oo9sZCy2ath2yZVt4yl6xRFQZIkBEFAu92m0Vjg1q3rVKt1omhK\nGIazK1h8YEiIi8cuyzIGgz6Li4usr9/Z2b2nuDeskmUZWZYxN7eEpmkMhxVWVkwewwb37+vUUshG\nbw5LWRi6yWKtRpIkTKdTXNflxo3LrKwcpdVqEkUR5XKNTmdrdgWLDwwJcfHYdTodPK+Epmmsrt7g\n6NGjNJvrxHHMdDpF13V836dSqaHrOltbFc6fn2GCAycWpqy2GniOi6ZrHKnXyfOcPM/RNI1ut3Pv\nJu3Gxh3q9UWGwwHZzrCLEHtFQlw8VkVR0Gq1aDQW6HQ6RFHI3Nwcd+5coVwu0+/3sSyLRqOB4zh4\nnsft2xZPPTWbmSl3rTRCWsMyrlmGPKdaKuF53r2lc4NgzNbWFktLK9y+fR1d1/G80r3NnoXYKxLi\n4rEaDAZomkapVOLGjRtUqzWm0ymdTvMdwxPz8/Pc3RTi8mWXj31stnUbWsHZIwnNwQKapuO7LuXy\n9o3Xra0tXNfnxo3LzM0tEoZT+v02c3OLtNttmW4o9tTs5myJD6RWq8Xi4iKt1oh2u83i4hlu375F\nUcDa2hr1ep1yuYx35QpJuslCdoxLf3SGn/7MF+B3o+2T9Gyo77yee3y1P3NqytXNOsfqPoY+pnz7\nNgQBb1y7hlmUWbt2BXILv91k/bVNnnrmNHl4g8GlS9R+5EceX6HiA0V64uKxCYKAKIqo1Wqsr6/u\njH1XuHz5MpXKHDdu3ODUqVMAVFwX2zQxjAq3tmo8fXz2864/enLIlbUGNb9KnmXUfR/bsmh4Hqu9\nFkmSsd5rs1it0R8PGU+nLNZqNPv9WZcuDrH7DnGl1C8qpZpKqUt7WZA4vLa2NllYWGA8HtPrdZif\nn6ff77G1tUW/30LTNCqVCvV6HV3TqPsV3rjj8/SJCMuc/ZDER06NeONOnYpXxjBNfNelAM4dO8bN\nzTuUbItr66tUvTJKQbPfoey6pFnGaDT7f4TE4bSbnvg/Bf7MXhUiDrfpdMp0OtnZK7PPZDLhyJEj\nXL/+JgDXr3+ds2fPAttbsaVAw6/w+mqDT5zdH7vJf/j4iKvrDQoMGr4PSlF2HOYrFVAandGY9f4W\nwzCg6tUYBCOGQcBSvU6z2Zx1+eKQuu8QL4riD4DeHtYiDrFWq0mjMc94PKbb7VIul8nznGvX3iSK\nIqbTCU888QS2baOUouo4lByPr99e4DvO7o8dczw74/hCzmrbY37nSdJGuQxKsdKYZ6PfYjwNubax\nyly5xigImIQhnmURhiFBIJspi0dPxsTFnouiiPF4SKnkE8cx3W6X+fkj3Lx5k9XV24RhiG1vTyes\n78y/PjY/T14UXLq5xMfP7I8QB/jIiSE3mw0qrkvN80iShLLjcHxuheFkjKZpvHrrLQB812MYBIym\nUxYWFtja2pxx9eIweuSzU5577rl7ry9cuMCFCxce9SXEAdNsNqnV6kynAboeoOs6tm3xm7/5+Z21\nwm+ytHSCer1OmqZUKhX8LGMcmVxeq/HMqf0xnALw0ZMj3rgzRw6cXFxktd3GMQyOzs/zwpUcU+ls\n9TtcXn+LT55bYWuwzlKthmnbBEGT6XQKzGBnC7GvXLx4kYsXLz6Sc+1piAuRJMnOAzzLmKZJs3mN\ner3OF7/4Aq1Wi0rlCG+88RU+9ak/SVEUpGnKqVOnyLtdbm4t8uTRMa69f/at/PiZIf/qD86jKUXN\n86iXSvTGY3ynyqnFZa5trHL++Clevv4qz5xoYNk6kzDECUMqlRrNZhPLOjXrZogZe3cH9/nnn3/g\nc+12OEXtfAhxX9bX13Fdl6LImU63d+0ZDAZcvvwac3NzXLnyBsePH6deb+wsduVy4sQJsqLghctH\n+FPP7q/VAD9+ps9q2yNJy4RpytmVFYqiIE4Tnj52hnE0Jc0yNODSrbdQSrHa6VAqlTBNk+FwuNMb\nF+LR2M0Uw/8T+CPgvFLqtlLqx/auLHEYhGFIr9fDdV18v8L161fIsozV1VXiOCEMQyaT4c6slO0f\nxZMnTwJgGgZ/+MYyP/Ds/nps3TIKPnF2wItXjzONY04tLOBYFrquUSmXWKzV2Rx00NHY7LcJplN6\noxGdTufeuH+zuT7rZohDZDezU/5qURQrRVHYRVGcKIrin+5lYeLgW1tbwzAMyuUyt25dYzDoURQF\nN2/eJM9TsixDKZ3Tp09jGAamaXL+/Hl6vR6GVuPrq2W+9+n9t/Hwp873+fyrR7ENA5TizJEjRElM\n2fVZrNaIw4icgkk4ZbPXw9B1Ll26RFGA7/sMhz2ZNy4eGZmdIvbEeLy9IFS9Xmc4HLK6ehOldDY3\nNwmCAF03aTabLCwscfz4ccJwwuLiIrVajTiOeemt43zi7GBfjYff9ekne3zuawvUfJ/eeMyZI0fw\nHJckS/noySeJshgoUChuNptEaUqaprz11huYponjeKyv35I1VcQjISEuHrmiKLh16xqGYaCUYm1t\njTAMSdOIW7fuhlfOaDTimWc+ThRFGIbF+fPn6ff7VCoV/t0ri3zfh/dfLxzg1GJAliu2+gtMwpD5\nSoW5UoU4TZmv1ViZX2Kju4WpmUzjmGa/T1EUbG1t0u/38bwSnU6bblc2VBYPT0JcPHLNZpO1tdsc\nPXqU0WjEW2+9hePYNJvbU+y2x4U3mJub4+zZD+1sxVZlaWmJ4XBIozHP//uVJS58dH+Nh9+lFPzZ\nT2zxWy8dpVIq0R2POX3kGIauoSudjxw/S5xHFGTohsGNzU3SNEWpgldffRXP8/H9Cq+88oqsNy4e\nmoS4eKRGoxEvvvgiZ8+eZzqd8uUvf5mVlRW63S43b16hVqsRRRH9fo/v/d7vxTCMnWmFTxIEAZZl\n8cUvutRLCU8fH8+6Oe/rR7//Dv/k352gXvKJkoSy47Hg12gNenzs5DnmvQY3W5vM+T4KeOONN3Ac\njyzL+OpXv8Tx46fJsoyXXnpJhlXEQ5EQF49EURT0+32+8pWvsLy8TJalvPzyyxw7dox2u80bb7zC\n4uIKuq5z9epVTp9+gmeeeYbNzTVWVlawLIc0TfF9n1/6JZ0f/4Hbs27St/Q9T3fJcsXXri9i6jpR\nlrA8t4Br2XQmQz7z4U9DUXB1Y4NTO//DeOWVL3PkyBHCMOTSpa9w7tw5ut0ur7/+OnEcz7pJ4oCS\nEBcPLcsyut02t2/fxjAMptMpa2urVCoVwjDkxRdfpFTycV2X69evU61W+fjHP02r1cL3y9RqNQzD\n2Nkx3uHf/lv4axfWZt2sb0kp+PE/dZtf/vxZdF3HtRxMw2C+Wmcabm9s8ZGTZ4jjmBubm5w9e5Z+\nv88XvvAFFhePEMcx165do1Kp0G63WVu7zXi8f//nIfYvCXHxUMIwZGtri/F4TL/fp9PpYBjGvSGC\nL3zhC5imycLCEuvrq3iex9mzZ7FtG8Mw8LwS5XKZWm2eOI75lV9x+Oxnoe4nM27Zt/ejf+IO/88X\njxCEHppSzJVr2KZJySlR5AVLtXmW63WmUcRoNGJhYYlWq8WlSy/dW+jr7oJgQRDQ6/XodjsyTi52\nRUJcPJCiKBgMBvT7ffI8586d29y8eZPl5WUmkwn9fo8XXngBpRTLy8sMBn00TadWq1EqlZibWyCK\nIur1OU6cOEGeZ/R6in/4D01++qdn3br7c6Qe8de+b41/8OsfIUpiPNvh+PwSpqmTptl2qNs2R2o1\nWq0WnuexuLjI6uptbt58i06nQ7VaxbIsLl++xGg0IstSWq0WYRjOunnigJAQF7uWpttBk2UZaZpy\n+fJlVlev8eSTT9Lr9XjzzTdpNjfI85xyuUwYhkRRRLVaJ4oiPvrRj7K5uc7Ro0c5ffocmqYRhgE/\n//MeP/zD8OEPz7qF9++5v3qFX/2j01xZd0jSlGqpzNnl42iaQtc1Ti0sMApDjhw5Qq/Xw7Zt6vU6\ng0GP9fV1Xn31VY4ePYrrlnjppZfY2trEtm0GgwHD4UBueopvS0Jc7Mp0OqHdblMqlej3+1y6dIk7\nd+5w6tRZbt++zUsvbQ8VeJ5PkiQ4jkNRFJTLFba2NvjYxz7GcDjk1KnTnDlzBsdxyPOcr30t5Nd+\nzeMh1gGaiflKzH/7w1f5H371GYbT7ZuT8+Uap5dOkRewXK9TK5fpdrt4noumaRRFQZLEzM3NMRqN\n+NznPke1WqdSqXD58td58803MU2TPM9ptVokyf4fWhKzIyEu7kue53S7XcIwoFqt8tZbb/Hqq68y\nnU6p1+u89dabXL58mdOnT9NoNLh27Q0WFhYwDIPxeEyzuc7580/RaDQ4evQoy8vHqVQqAFy+POX5\n521+6Zc05udn3NAH8Pf+/A2qns1z//IEeV6gaRqNco0nlo9hWRbfcXb75men02YymTA3t3jvxubT\nTz+N67q8+OIf3JtDv7GxwYsvvkgYTimXy3Q6HSaTyaybKfYp2e1efFvb87r7uK6LaZq8+OKLdLtd\ndF0nCAJu/tZv0R0UfPKJRYxXX+X/u3SJhjVP9LU+3cGAkm3zhL3ESrND47WII/U6abaJupVy840a\nf/Pnj/I3PjPlzxd/AL8x69bunq7D//53vs4P/fdnef5f2vzMj6xhmzZlt0JDm2ej3ebZ8ZhXWh2C\n1nX6I42V+Qa9G3d4+do1vvvJJwm2Otz8zctkT303tl1B13VeeeUruO45zpw5w2g0IgxD8ryO9L3E\n28lPg3hfRVEwHA7vPQo/Go14+eUvMBgMyLKMO3fusLGxQZHnfPjoaXRd5/Nf/zquaTJNEoIw5NTi\nIstzc9RLFeb8GkvVKvOVCprS+TcvLPFD/91H+I9/4A4/+v378+nM++W7Gf/XT73FV29q/M2ff5b2\n0KLkuCxWqxyZm2O+WmW+WufUwgL1SoX1TodyqcSdVosXrl7l1MIyxxcWGI+3N47udDpAzuuvv85L\nL72E53mYpkmn0yKKolk3V+wj0hMX7ylNU9rtHrquU61WefPNN7ly5QpJYjEcjun1evR6ParVKueO\nH+f65piv3XyTcRhilUrM+xWOL1rYpgl5TsWb40hjjqW6y9euN/ipX/oQax2H//nH/pC/fiHgtVtz\ns27yQ1uq5vyTv/cG/9u/fYL/8H/6BP/JD97mp/7KNZbranv1xq0MXaU0fIe65xNEmziGwZXVVfpD\njc98+Cgnj56j3b5MlmVkWUKpVGd1dZVer8cnPvEJyuXavf8VlctllJLl/T/oJMTFN5lMJnS7I554\nYntPzIsXL9Lv99E0jcGgjeNM6XQ6zM3Nsby8zNa1a7x87RZJ2ufU4iIfOnaMXmBTdnL6kwlnl5aw\nnVN87mtn+fu/sMLNLY+f+NO3+Lt/7hKbPQ3LtGfd5Eem7rv87R98nR94dsKv/P4Jzv2n389f/FST\nn/gzlzm3bLPaGqDrYBgWp5ZOsNHtcvnOHd5cu4VrT5lfeYbz589z9epVxuOEbrfAdbdviH7+859n\nbu5DfO/3Psl0OqXdblOv15G/xh9s8qcv7tl+8rKHZYHvV7hx4xqvv/46eZ6TpinT6ZTJZMza2hbL\ny8vMz8+zubnJV157jXGg+GNPnuLT586x0e8TpxmbvYDW8GP86xd/gC+8dow//pEO/+VfuM4PfbLJ\nWkeRZCEVbwFIZ930R0bXdEq2S83b4ud+fIhtwS9+7jh//R9+N4ae8pc+/QSO+bvUSgH9ScS55WVO\nLC3xGy+8zmu3b9O49ALf8z0f5/jx41y6dJNms8n8/PaDUKVSiTfffI0kWdvplZdpt9sEQRkozbrp\nYkYkxAUA02lAqzXEtn3CsM8LL3wFTesB22uDZ1nG1tYWrdaQ7/zOY8zNzfH6669z/fp1Fj2P7zjz\nNM+ervLSjVvc3Jzji5eP8fXb38mnnzzB3/gTm/yDH7tBxUtZmYvIsozBJGK54dMZ6hymEAcoOS6a\nihhNA47OG/zXf+UaP/WXr/FvXlzic189xi/8zn9O1f/3/PEnr3H26C2eWHb57Hd+D1fWvsbLvR6/\n93u/xzPPPMOJE2dot6+wurrKwsICAGmqMRqN+P3f/31OnjzJU089Rbs9pd2e3lu+QHywyJ/4B1yW\nZfT7fYIgp163ePPNrxPHdxgMMlw3JAgCptMpGxsbmKbJyspJiiLlhRdeIM9znn32WY43m/zhGwb/\n8vcHXLr5g5xYKPEnn0n4v/+bLscXtgBY79j3rtcZjfDsCrZ5eO+rl12fJG0xDGIqnodS8Mee6PMX\nP9Xkp/6Kw++8NM+vf9HmX/3qpzm5+DJnVlb5Dz61zCfPPkW7fZVXX30V113k6aeP0+/3WVtbo9Pp\n4DjL9Hoxvu9z69Ytms0m9fpTHD++SLvdplwuUypJr/yDREL8g+g3tufxBVHEMAgwdJ32jQm3wtts\n9BQVd0prfYz10ZM7AR9Qq9Wo1Wq88UaLotiev5xli/z2b6e8/KU6hnaUH/6eeX7mR1qcWtzENU9y\nfOGdPew4TWgPh5QchzRzgcM7y0LTNOp+hShp0x+Pqb4tWA2t4LN/LOEvf/eIq2s3+MrV0/zzf3+O\nv/N/rHJi4Sqf+VDI2SMJa7eucGP1deZrNZbynN54zPXea2RLHp5t0/B9Kq7LVvhl4osW51dWGBsG\noa6Thg0MXYe5CH7oh2b4nRB7TUL8AyjNMvqTCWEcE8YxN5tNbrVT5v2cdr/P7eYW7SEUNws8z+PE\niRNEUUSr1aLXy7h6VfHGGyUmk4Tv//5z/K9/u8W5ZZOaf4cszzm7vExrYHI3pPM8ZxhMCJOII/US\njmUxDGb7PXgcNE1jvlJhMJmw1e8zjevv+PpcpYKmFJ6zyqc/NIeu2fzj3/n/2zv32LiuO79/zn2/\n5s3XcChK1FsyYzt246yzcXaD3W7sYIHsYo2mBYq+gG232P7dLdAC7Z/Nf0Vb9I8Wi8UW26RYFNjG\ndhI0++p2N4lj2YoVyXqRokhRJIcczuvO675mbv+YIS3ZlixLsiTS9wMc3DOcQ94fzz33e8/5nd85\n9wR/fr7Jd/4qx9xUjadmysQ00TSVfCpF1O9TaWyjKgqNVgvbNEnpRVrdiJ8tLTGZzVLM5ai4NVKG\nTRxLJPEr+5tExD9DxHE83DWvWiWMIjYbDaquiyrL1FyXqzdvsFHzccwBHU/wuSMv4Hkei4trLC4K\nzp8fUKk4fPGLs/zu747zta/NMD5eIPfmm1y44VGQZY4Ui0jS0E3ihyE938cLQyDFWCqLoX22lpAL\nIcg6wxdHVJoeWw2PjpfG0DQAcqkUqqLw1tU6x0sa//Rlg9/5usvVNZ0/+r9TnFnI89qZCidm2pyc\ndsmn+pycmaLebrPlurS7XRa9Bu0wz8lSie1mk1q7jcQEYdSnXBtQCAK00fkS9h+JiH9G8H1/9yXF\nbq1GtdVCUxTcbpdzS0vcqHTJ2SBJOuOpFKrQeOONS1y/rnHzZpoTJ0xeffUgR45M8+yzGlNTU5RK\nJaIoYq1axTbGKOY0Op5H2O+z1eygKhGmpjFhWQwG5uOugseKrqrkUw5ZW6ba6lNtNdHVEF1V0RSF\n2bEiXX8dtzvg84cnyKd6GJrL77zcY3Ejw2tv6fzFuQxtv8/xmTXmD/Z59tAk1ZZLo9dnaX2dhbU1\nTpRKPHPoENutGo1uC0PVaS0vUygUyOVyuw/YhP1DIuL7nDAMKZfL1Go1fN/HdV2iTodWu82ZxUVW\nt7cxNY2MlcXQ+yyuWvzVhT7LFZXZ2RIvvpjhq18dZ2amgCzLVCp55ucPIUkSS0tLeJ7HlGky6Mr0\nggBVljE1jULKZjyzv6JOHgaaqpKxhpO8OaczHK0EAR2/z3jGYH0Qcm55malsliOTM+i6haKUOTwp\n6PxawP87H3F26SDf/UnA62dqHC8qnCwZ5DMS/SDgwsoK762uMls4zKkDh9mod7HLZVqtFltbW0xN\nTZHNZpNFQvuIRMT3KVEUsbm5SXn0kt5er4fv+2xvb/PWO+9wo1IhZRgUUimuVyQurcLqdhbHMHnh\nhOCbL2k8/VunEUIghEDXdWZnZwlDlZWVFcIwpFgsDl+tVqlQrpvkU+9PVMrS/gsdfNioioI6Cgn0\nAp2pnIEfymQti5VKhWvlJY6XdE5MT7PZaHBze5tTBxR+8VRIL5T5yeUUb15x+OMfD0ibAw5P9nnq\ngEPKjPn5ygIXVq/x4vEDPPX88wRBQK/Xo91uY1kWpVKJbDb7mGsg4WGQiPg+IwxDtra2uHnz5u4i\nnWazyebmJmfPnmV9fZ1Uq8UgzPHj5YjViowiT3BqRuPrf6tNLhVxdNKm3lFptVrMzc1x9OhRLMui\n0WjQ63WYnZ1lfHwcWZaHJ02G6A8FSZLQVY1cOk0uncYxB0CZrWaTbCpFKZ/n3HKXRnsJLwp56SmL\n54/6XF3rsLpl8bOVAn/013XyVo0DE2nmxiPeXVri3Le/zalTpzh9+jS5XG53Q7NUKoWqzlIsZpKe\n+R4mEfHHwesfsVVfVR+Gg+0cP/jzu1HV6X3zJTY2NiiXy4RhyGAwoFqtsrKywpUrV6hUtnFdwY0b\nEkuLAxTg5OwYf/+rMvmUS79vcupAjkGcJWX3yacm+IKmEa+v01hYwBeCsVSKI5slps6sPNTqSPho\nLN1kujBNz/PYcl0qrRaO6fDs3Odwez0W1tdRVZVDEw4HxmvMTVfoI3N1eZo3FzucW26SM3WOhAO6\n3XNcvHiR2dlZTpw4wczMDFEUUa026XYtDh48SC6Xe//BfDdef/32dnm3NpqEN37qJCK+hxkMBrS6\nXa6ulVHesQiCgDAM2dzcZGlpiWvXrrGyUuXGjZDV1ZDBIOLgwRyv/kKB0wcN/DDAsSxmxg6iy1NM\nFwZsuzpHp1W2mzq1Toe0ZXFofBzbHE5Mrm7fw02e8FAxDYODhkG/3+fyaoTb28YLAuampugFAQoh\nU4UC1WaTtVoNS64xN63jBROcW25zebHO2297ZDIa09MNLl5c4NChEocPHyaVOkQmk8V13V03y9jY\nGIZhPO5/O+EeSUR8D+IFAbVWi3K9TtV12WzIqGtpbt68ybVr13j33WWWl9tUKiFRZDAzo/KrvzrD\nc88dxLIMovfew9BUjk0XmR0fJwZuVgdE/T5pyyJra+hqnmPTUjLMfoKQZZlcymK6oBNGEc1Oh2a3\ni2kGtL0qtmnyzKFDHJ+e5q3FLepui6993uTXP/cCN25s8fbba1y7VuXSpR62vUWpdJnZ2TGef36O\nubk5pqamqNVq2LbN+Pg4xWIRx3GSpfxPOMnV2SNE/f4wNrjRYLNep9puDxfgNJucW6lz5c0zLCzU\nqFZDJEmhWHR46aUMc3MpJicnkCQJSZLQNI3jMzNkHAdNlumGIWnT5ESpxFOzOtuuQ2ksYL2qI8T+\nXVG511EVZbRHeQZNUcja45TrddaqVaLBgCNTB+jmPPygzJrvMzmZ4ZVXUnS7XdbWKiwsuFy/3uLq\n1RV+9KMVpqff4dSpCZ599jDT09Nks1kWFhYoFApMT08ThmPEsZM81J9AEhF/gvF8n42qS7m+xVq1\nStvzaHW7dHyfqxstzlxxubbRotELUVWZ2dkUTz89RrFoYRgGuq6jqipBEDA5OUmxWGRubo702bMY\nqoquqhTSaWzDYL2qo2t+cpPuQWRJJmVZpCyLw1NTNDodrtwMifp9MnaW8smTLC8vUy6XASgWC+Tz\nDvPzHSoVn3K5y82bLm+8cZ3vfW+ZmZkc8/MzvPDCDBMTea5cuUIYOqytDXvn480m/f7e3/99v5CI\n+BPAYDAgiEI6nsd2s4vbq93iKhHoSovtVpe3FwLOLtVY2ezQp0taCxjPSJwoSkzOF3f+GgCGYTA9\nPc3Jkyc5duwYpVIJx3Hodrv4165h6zqOaSaivc+QZZlCOk2pINP2uoylVY5++ct85StfoVarsbS0\nxIULF1hZWSEIAmTZ48ABwaFDeYIgpFYLuHGjzp/92RY/+MF5crksR4+WmJ+fJQy3WFtbQz13Dj9w\nODnrMJFO4wUFcs4ARZZRZDlpU4+YRMQfIf1+nyiKiDyPqN8n7PcJwpC253G9PGBxo8rKVp+s1WOz\n3uHd5YifXu6ysrVN03fRaJFNhRwt9hnPSChCgCQRRgqapjE9Pc3p06c5duwYx48fZ3JyElVV8X2f\nTskzkAwAAA0OSURBVKdDvV7HcRwymUyycm+foyoKOSdNPiXT8n2CICCVSvGlL32Jl156Cdd1WVlZ\n4ac/vcrW1hUWFxcpl8tYVp8TJ3SOHJHp9SIqlTJXr65z5sxZ/uAP8szMjPH0uMnJmS5C1FkzTdzu\nOtstnaxtY5smtq7vCrra66EoCoqiJOL+KZGI+MNkbY3YG+6XHe0Idr9PGA6HtkIIZEkirFRoCUGj\n3abaahFEEe9e91ne7HBmscNapUbTa6HQxFQ6ZOyIYqGPo6vomoYsyViGwWQmw+GpKaZyR/jCb/8W\npVKJVCqFYRgMBgN6vR71eh0hBLZtk8vlhjdSIuCfGVRFIZ/PE0URnU6HSqWCpmlYlsX8/DzZ7Gly\nueFbgtbX1zl//jxXrlxheXmZRqOBpgkmJyO63T6+v4HrrvGX5wO+/7MckKKYz3K4UOALJ1SeO2aR\ns3RMXSdv2+RSKaxWCyEEURQhSRKqqu6KuizLu8eE+ycR8fsgjmOiKHq/Zz3KV945gzCbyJKELEmo\nioIUxwx8n2a3S9V1KdfrbC2vcclNcf6mx/V1j9VqlQEuGi662sXWPcYmYkxNBXQyjknaMMg4DhOZ\nDKcPHODkzAxT+TyFVIpmJ83cM88QxzGe51GtVomiCMMwyGazyeZHn3Vefx0FyADpOKYXBLR9n0a/\nT7vtMDEFBxWFUr/PfDZLa36eyuwsy1tbvLe6ysrGBqthlxCftuMTmH06XhkvWKXrq5y9bvCTBZsB\nGbJmhrnJNEemDE7N6hy5eJHJTIaJTIaC46CYJkJVCQYDosEwIiqOY7Y//wqGoSQCfx8kIv4RDAaD\n0YtqPzoNBoPdRqYoCqqqomkahqYTxzFbjQbbrstGtcZCucuV1YDrlYj1Wo9yvYcf1ZFpoqtdNNVj\nLBWgq2AbGpLQyaeyZGybnGVhaAVOzeY4NDlJqVBgMpMh4zgYo42T+oMBtdZwYU+/30fXdRzHQdf1\nZPia8CGEEFi6jqXr9Pt9Wl1Bq9cm6vfRFAVD05jM55nI5ThSLPKlU6fYbjZ5b7VDp7fG9c1NNhoN\n1rY7+KGL2+nQ6HoosUsnXKMbwOK6zuUbNv/rTRsokDVSTOcNDk0pHJnSOD6jc6CQZjKXI59OU7Bt\nDMNC14cvlA6CYLdzFMcxsix/KO0IfCLyn1DEhRAvA/8BkIDfj+P4W5+KVZ8iOyJ8qyDfeoyi4X4f\nOw1kx3cshNhtOFEU0Wg0aDab1Ot11ta2uXhxk/f+8iLVRputVkit0aHpe8DQl60oPVQ1xNYCxo0I\n3RxGjli6Ts7KoaoZigUTS81xeHrYwGdyOeJ4nFOzBpauo8oyQpLww5CO79PodFBkmRibTCaDqqqJ\ncCfcM7IsYxs6Y2mJwWCAF4b4YUirO9zsXVNVbMMgY1loqspYeha322W71eK9Gy38YJOtep1rZZee\nX6fV61Fvt2l6HkHoEoRVfH+JXiRzc1vjWlnj+wMbSCPjkHMsJrIWxZzGxPwyTz89xokTk7s7Lmaz\nWWzb3hXqnRHwzorknftWkqTb7tcP5nfCa/cr9yziQggJ+M/ArwDrwBkhxHfjOL78aRl3LwwGg7um\nDwo1cJvQ7eR3NnqSZZkwDGm1WriuS6PRpFxusbi4zfXr26yublOp1KnXW7hui1arC7SADhIeqhSi\nSBGKHDDuxGi6iibL6LKMpulk7Dx5SSKVz5NPpZjM5Sik08giz8kZG+IJjk7L6KrKjy5d4mhxDEXu\n0fV9BnGMqijoikLKNNFGk0VhpCcuk3vgx5fP8aWTzzxuM55IJEna7aHDcF1CEEUEYUgnimj1FDJ2\nn5RlkbFtLiy7/OaLL9ILAi7d8JBEhU3Xpea6VBoNtppNmp0OjVFyPY8w9PGjDn60jucP8EKJ5Q2F\nxZsawfkf8Z3vGICFqjqk0w6ZTJqJCYdSaYJSqcDc3ASHDmUYG0uTTqdJpVK7I86de3yHOI6BoT7E\ncfwhQf9g+uB3e6kz9El64i8AC3EcrwAIIf4n8A3gvkU8jmPiON6t6LsddzZz+qCrY2TLbcdb8zvn\nCIIAz/PodnvUam02NzusrtbY2mpSLjfY2qrv9q7b7Tbtdosg6DF8O40PhKPUR5JiFEWgKAPSaVDV\n4YSN5EVYpooh66SsHFnbJmfbZByHQio1HDo6DhnPIzM7S95xcAwDXVWpuCbFXMBGXSPGJxoM+OuL\nFzk+/Ty2YaAmQ8cH5ieXf56I+D2yE12yI+phpJG1u4T9PmEU8ebVC3ztuUPIksRENs9UzuapOCaI\nIrwwHAp4q4Xb61FttXZTrd0eumDabRqdDt1eDz/u0xUecdwmiiJ8v0+3K6jXBUtLAPIoqYAG6EiS\nhWUZ2HaKTCZLNpsml8tQKGSYns4zNpbhwIEcxaJFJjN0L5qmuSvY8L427HDr550O3a1p53eFELtC\nf6vo7xxvTY/kWn2CsiVg9ZbPNxkK+22Uy+XbhPeDQrzTI96ZDAzDiMEAfL9PEPRpt31836fbDen1\nPNptf/giA3cowJ1OiOf5tFodul2PXq+H57Xpdn2CoIfnden1uvT7Pp63I8DRKMXsCPEwz+g4uOX4\n/gXRdQlVlVFVBUmSUBQFTbMwTRPHcTBNk3w+TzqdxnEcpPUGB3I6k6kUzqjH4ujD2XpT01BGvWi9\nWkUulZAlCWkUseKFJjlHI4wsivnh229MTcMxLQwtWTmZ8HgRQry/da6uY2o6k7kccRwTRipZp0e0\nM+odDBhLp+lPThJG0W6P3o8iekFA1/No93q0ul2qnQ7VZpMbdpHBoEGn08F1XZrNJq1Wa3R/eyM3\nSnvk8uwTRQO63QHtdszm5gB2X0InGHp7d9wnOx0flaHcSYCGLBtomo6q6liWja5b2LaBYVhYlo1p\nGpimiWVpmKaBbdvYto7jmNi2huOYOI6CYejouoJp6iO9UFAUeTekcscFe6cRgBCCqampB7o2D31i\ns1gsfnyhR4qCLEujyRBpJMjDuOqdSUnTNDEMA8uySKfT2LZNKpUinU7vplQqxdjYGI7jYBjGbck0\nTTRNo/Ln73Ew5+9eNMFwmCqNnsq7wzQhIJ2+zUq3q6Op7GvfXcL+QwiBIg/df7qq3rHcbmdulHbz\now7d8ulfYWwsvGXE3N3Nt9vtkWuzQbvdptls4rouruvSarVGWyT3bvu9MAx3/ef9/oAg8IjjAVH0\nvnu114NeD1z3EVbYRzA/H398obsgbh1O3LWgEL8A/Ls4jl8eff5XQHzr5KYQ4sGsSUhISPiMEsfx\nfflfPomIy8AVhhObG8BbwN+L4/jS/Zw4ISEhIeHBuWd3ShzHfSHEvwB+yPshhomAJyQkJDxG7rkn\nnpCQkJDw5PFAs2hCiFeFEBeEEH0hxHN3KfeyEOKyEOKqEOL3HuSc+xkhRE4I8UMhxBUhxP8RQmTu\nUG5ZCHFOCPEzIcRbj9rOJ5l7aWtCiP8ohFgQQrwrhHj2Udu4l/i4+hRC/JIQoiGEODtK/+Zx2LkX\nEEL8vhBiUwjx87uU+eRtcyc28n4ScAI4BvwF8NwdykjAInCQYZzPu8DJBznvfk3At4B/Ocr/HvDv\n71BuCcg9bnuftHQvbQ14BfjeKP9F4M3HbfeTmu6xPn8JeO1x27oXEvBl4Fng53f4/r7a5gP1xOM4\nvhLH8QLvB2l+FLuLhOI4DoGdRUIJH+YbwB+O8n8I/MYdyu0Ewybczr20tW8A/x0gjuOfAhkhxOSj\nNXPPcK/37t5Z3vgYieP4b4D6XYrcV9t8FELwUYuESo/gvHuRiTiONwHiOC4DE3coFwN/KoQ4I4T4\n7Udm3ZPPvbS1D5ZZ+4gyCUPu9d59cTT8/54Q4vSjMW1fcl9t82OjU4QQfwrc+jQQDEXkX8dx/Pon\nNPIzz13q86N8iXeadf7FOI43hBDjDMX80ugpn5DwqHkHmI3juCuEeAX438Dxx2zTZ4qPFfE4jv/2\nA55jDZi95fPM6GefSe5Wn6NJj8k4jjeFEFPA1h3+xsboWBFC/AnDYW8i4vfW1taAAx9TJmHIx9Zn\nHMftW/I/EEL8FyFEPo7j2iOycT9xX23zYbpT7uQXOwMcFUIcFEJowN8FXnuI591PvAb8o1H+HwLf\n/WABIYQlhHBGeRv4NeDCozLwCede2tprwD+A3VXIjR0XVsKH+Nj6vNVnK4R4gWHYciLgd0ZwZ628\nr7b5QHunCCF+A/hPwBjwhhDi3TiOXxFCFIH/Fsfxr8fJIqFPwreAPxZC/BNgBfg7ALfWJ0NXzJ+M\ntjhQgP8Rx/EPH5fBTxJ3amtCiH82/Dr+r3Ecf18I8XUhxCLQAf7x47T5SeZe6hN4VQjxzxnuLNcD\nvvn4LH6yEUJ8G/hloCCEuAH8W4bbMj5Q20wW+yQkJCTsYZIwtYSEhIQ9TCLiCQkJCXuYRMQTEhIS\n9jCJiCckJCTsYRIRT0hISNjDJCKekJCQsIdJRDwhISFhD5OIeEJCQsIe5v8DVV0GPE2ZUwsAAAAA\nSUVORK5CYII=\n", 221 | "text/plain": [ 222 | "" 223 | ] 224 | }, 225 | "metadata": {}, 226 | "output_type": "display_data" 227 | } 228 | ], 229 | "source": [ 230 | "# Plot with samples\n", 231 | "f = plt.figure(figsize=(6, 4))\n", 232 | "ax = f.add_subplot(111)\n", 233 | "model.plot(axis=ax)\n", 234 | "for x in data:\n", 235 | " ax.axvline(x, alpha=0.1)\n", 236 | "for phi, nphi in zip(phis, nphis):\n", 237 | " plot_sample(phi, nphi, axis=ax, alpha=0.1, c='k')\n", 238 | "ax.set_xlim(-1,1)\n", 239 | "ax.hist(data, 20, alpha=0.3, color='r', normed=True)\n", 240 | "f.show()" 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": 52, 246 | "metadata": { 247 | "collapsed": false 248 | }, 249 | "outputs": [], 250 | "source": [ 251 | "# Try a large-ish data set.\n", 252 | "# For WL, we can concatenate the e1 and e2 samples together after deshearing. Then we have 20000 e samples per\n", 253 | "# GREAT3 field.\n", 254 | "data = model.sample(size=20000)\n", 255 | "mu = 0.0\n", 256 | "alpha = 1.0\n", 257 | "beta = 100.0\n", 258 | "dp_alpha = 0.1\n", 259 | "cp = dpmm.InvGamma(alpha, beta, mu)\n", 260 | "dp = dpmm.DPMM(cp, dp_alpha, data)" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": 53, 266 | "metadata": { 267 | "collapsed": false 268 | }, 269 | "outputs": [ 270 | { 271 | "name": "stdout", 272 | "output_type": "stream", 273 | "text": [ 274 | " " 275 | ] 276 | } 277 | ], 278 | "source": [ 279 | "prun dp.update(100) # about 35 sec" 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": 54, 285 | "metadata": { 286 | "collapsed": true 287 | }, 288 | "outputs": [], 289 | "source": [ 290 | "# a few minutes to generate 500 samples and store every 10th one (i.e., store 50).\n", 291 | "phis = []\n", 292 | "nphis = []\n", 293 | "# Sample\n", 294 | "for i in xrange(50):\n", 295 | " dp.update(10)\n", 296 | " phis.append(list(dp.phi)) # Need list() to get a copy\n", 297 | " nphis.append(list(dp.nphi))" 298 | ] 299 | }, 300 | { 301 | "cell_type": "code", 302 | "execution_count": 55, 303 | "metadata": { 304 | "collapsed": false 305 | }, 306 | "outputs": [ 307 | { 308 | "data": { 309 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAEACAYAAABF+UbAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcVNWd9/HP6X1fWJtFVnEBVFBEBQ2tRgMqokYNib58\nNI6ZZDLGoHk0GRMh8yROzDLRjOOWmMQ8CS5xxNAGBAXbVsOOsosge8vW0Au9d1ed+aO62KShuuve\nunWrv+/Xq14WVbfO+aVSfLl16txzjLUWERHxpySvCxARkc5TiIuI+JhCXETExxTiIiI+phAXEfEx\nhbiIiI+ldORgY8w2oBoIAi3W2rFuFCUiIpHpUIgTCu9ia22lG8WIiEjHdHQ4xXTiNSIi4pKOBrIF\n3jLGLDPG3ONGQSIiErmODqeMt9buNsb0JBTmG6y177tRmIiInFqHQtxau7vtv/uNMbOAscDhEDfG\naCEWEZFOsNaazrwu4uEUY0yWMSan7X42cDWw9gSF6ObQbfr06Z7XkEg3vZ96L+P1Fo2OnIn3Bma1\nnW2nAH+x1s6PqncREYlKxCFurd0KjHKxFhER6SBNF4xjxcXFXpeQUPR+OkfvZfww0Y7HHNOYMdbJ\n9kREugJjDNbtHzZFRCT+KMRFRHxMIS4i4mMKcRERH1OIS5dQX19Pa2ur12WIOE4hLgkvGAxSWVnJ\noUOHvC5FxHEKcUl4dXV1rFixgk8//ZRAIOB1OSKOUohLQrPWsm3bNtauXcuqVauoq6vzuiQRR3V0\nKVoRX2lsbGTNH//IuwvWMbBnOmNqaznvvvu8LkvEMQpxSWgHDhzgvfUbmb9qL0X56Vxy9qeMDARI\nTk72ujQRR2g4RRLahg0beHP5fmA7e6p3s3D1Jpqbm70uS8QxCnFJaMuWrWJbxQ5y0puAg8xbsY29\ne/d6XZaIYxTikrCstbzyyrtAFQO65ZObHmB/bTlbtmz1ujQRxyjEJWEFAgE2bVoNNHPFeecxpE9v\n4BDz5i3zujQRx+iHTUlYtbW11NfvISM5i+vHjSM/O5tV27axcOE/vC5NxDE6E5eEVV5eDjRTVFjA\nxWecweSLLwZg48bVUe9rKBIvFOKSsJYuDe3jfdmZg8jNymLU4MFACocO7aOlpcXb4kQcohCXhFVS\n8i4A1140FoD0tDS6ZeYBDVRWVnpYmYhzFOKSsBYtWgLAJcOHH37srH5FQFAzVCRhKMQlIQWDQfbt\n2wEkU1RQcPjxq0ePAODNNxd7VJmIsxTikpCampoIBqvIzMwlLS3t8OPXXnghAPPnv+tVaSKOUohL\nQgqNebfSv/+gYx4/o39/ANavXxP7okRcoBCXhLRu3WYArrjikmMez83OBjKoqdGl95IYFOKSkObO\nDf2oef311x3zuDGG3Nw8oJ5gMOhBZSLOUohLQlq8OHRV5oVtY+BH69dvEBDUXHFJCApxSUibN28E\noHv37p977vzzzwPg4MGDMa1JxA0KcUlI1dUVgCEp6fMf8WuvvRyA1as/iXFVIs5TiEtCam6uAbJP\n+Nyll44HYM6c92NYkYg7FOKSeEpKgCZyU1La7h+rqKgIgMWLF8W4MBHnKcQl4QQCAQAKC/JO+Hz4\n4p+tW7fErCYRtyjEJeHU1tUBMGrgoBMf0HZ2Xr2//IRn6iJ+ohCXhLNj734AvnTemSc5KpNmGmNT\nkIiLFOKScP6+fBMAE8eMbveYgpR0QLvei/8pxCXhLPhwPQD9evZs95hu3QoBdNWm+F6HQtwYk2SM\nWWmMme1WQSLR2rBnBwDp6entHnPBoCEA1Dc0xKQmEbd09Ez8PmC9G4WIOOVgVdUpj7lmdGijiK2f\n7XG7HBFXRRzixpj+wDXA79wrRyR6DdQDqSc95uoLQ5fev754QwwqEnFPR87Efw38X0DbhEucayLL\nZJ30iKIePQAoXa0vluJvKZEcZIy5Fthrrf3IGFMMmPaOnTFjxuH7xcXFFBcXR1ehSIdZCvJOfMl9\nWHhNlY37dseiIJFjlJaWUlpa6khbxtpTn1gbYx4FbgdagUwgF3jNWnvHccfZSNoTcYu1lqSkJMad\ncQEf/HL6SY81119PTlI3DgUOxKg6kRMzxmCtbffk+GQiGk6x1v6btXaAtXYIMBVYeHyAi8SD8Brh\nFw0dEsHRydQFNTtF/E3zxCWh7NxZDsD1F4485bHJZGLRxhDibx0OcWvtu9ba690oRiRab7zxDgBj\nh5/skvuQnLQ0QiOEIv6lM3FJKO+88wEAWVknn50CkJ934lUORfxEIS4JZf36jyM+dkDbNEP9GC9+\nphCXhFJRUR7xseOGngGgDZPF1xTiklAaGmqJ9GP91UvPB6C8XHPFxb8U4pJQGhvrifAaNoYPGwTA\nq6/Oda8gEZcpxCXBNJNOZkRHhrdpW7DgAzcLEnGVQlwSTIC8nFPPTDna5s0bXapFxH0KcUk4PQq6\ndej4gwf3u1SJiPsU4pIwwlMFR/Yd2IFXJVFXV+1OQSIxoBCXhNHYGNr4eFLbhg+RSaO5WRsmi38p\nxCVhLF++CoAp486N+DUZJh1ocqkiEfcpxCVhzJz5NwAKC/Ijfk1edjagzZLFvxTikjA+/HAFEFqb\nOVK9Crq7VY5ITCjEJWHs2rWrw68557TBLlQiEjsKcUkY1dVVnGTnwBO65vyzAWhubnahIhH3KcQl\nYYTWTUnv0GumjAvtev/BB8tcqEjEfQpxSRiBQAuQ2qHX5ObmAPDqq3NcqEjEfQpxSSDNZGWdfJf7\n9qxatdrhWkRiQyEuCSRIfn7HLrkP27Vrp8O1iMSGQlwSSvfufTr1uqqqCocrEYkNhbgklDPPPKsT\nr0qnoaHe8VpEYkEhLgkhGAxddXn55Zd24tUpWj9FfEshLglh3boNAEydek2HX5uWlglonrj4k0Jc\nEsIrr8wGoHv3vA6/Ni+vAAg4XJFIbCjEJSEsWrSi068tLOzpYCUisaUQl4Swc2fH100JO/30YQ5W\nIhJbCnFJCAcP7u30a8eMGetgJSKxpRCXhFBXdwhI6dRr7777ZgD27t3nYEUisaEQl4TQ0NBIR9dN\nCRuwagkAf3nghw5WJBIbCnFJEE0kJ2d26pXhTSTeX7/ZyYJEYkIhLv5XUgK0kpuc0na/czbu2+Nc\nTSIxohCXhJGXkxvV6ysOVTlUiUjsKMQlYfTvXhTFqw2Hahscq0UkVhTikjBGDRoYxatTaQxq/RTx\nH4W4+F59fWgFwpvHnN/pNpJIx2r9FPGhiEPcGJNujFlijPnQGLPGGDPdzcJEIvXWktD+mOMvGNrp\nNrJT04GgQxWJxE7EIW6tbQIut9aOBkYBk4wxutRNPPfGh+sASEtL7nQb+dn5TpUjElMdGk6x1oZX\nzk8ndHmcdbwikQ5as2N31G0Ude/ctm4iXutQiBtjkowxHwJ7gLestcvcKUskcp9VHYy6jXP7R/Oj\nqIh3OnomHmwbTukPXGSMGe5OWSKRO1RfF3UbUy4YAUBDg6YZir90asUga22NMeYdYCKw/ujnZsyY\ncfh+cXExxcXFUZQncmp1jU10dt2UsCsvDJ2PLFz4Htdee7UDVYm0r7S0lNLSUkfaMtZGNqxtjOkB\ntFhrq40xmcA84GfW2jlHHWMjbU/EKcakASnY2S93ug1rLUlTpnDPPQ/w3HO/dK44kQgYY7DWms68\ntiNn4n2AF4wxSYSGYV4+OsBFvNNCusmJqoXwIlhr1qw/xZEi8SXiELfWrgE6fzWFiItys6IL8bDy\n8p2OtCMSK7piUxJCYb4z87xrag440o5IrCjEJSEM69HPgVZSqKvT7BTxF4W4+FpraysAXzjLic2O\nU2ltbXKgHZHYUYiLr61duxaA6y46L+q2MskALYIlPqMQF1+bN+9tAM4a0jPqtrKys4BA1O2IxJJC\nXHztvfeWApCcHP1HuWd+QdRtiMSaQlx8bcuWrY61NaRnX8faEokVhbj42sGD+x1ra9xZZwNHfiwV\n8QOFuPhadXUN0Pl1xI92w8WhRbDWrFnrSHsisaAQF19ramoG0hxpa2i/QgDefHOhI+2JxIJCXHzN\n2maSo1zBMCwjIwOARYuWO9KeSCwoxMXnWshKzna0xU8+2exoeyJuUoiL7+UV5Dna3sGD+xxtT8RN\nCnHxvdPyeznYWjI1NdHvFCQSKwpx8a1AIHR15Yj+gx1sNY3m5kYH2xNxl0JcfGvr1tCFPledP9Kx\nNlNTM7BWi2CJfyjExbfefbcMgPHD+zjWZmZmDtDiWHsiblOIi2+VlS0GoF9v537YLCiIfiEtkVhS\niItvrV//MXBkf0wn9Ovn5Pi6iPsU4uJb5eW7HW/zggtGAUd+NBWJdwpx8a1Dh6px+iM8ceJVAOzY\nscPRdkXcohAX32poaMSpdVPCxo4NbfP21ltaP0X8QSEuvhUINGJMhqNtdu8e2hgivNmESLxTiIuP\ntZCVleNoi0lJob8S69dr/RTxB4W4+FpubndX2t2zp9yVdkWcphAXX+vv6CX3YUnU1FS60K6I8xTi\n4kvhLdTOPfdcF1pPo6GhwYV2RZynEBdfKi8PDXdcffVVjrdtTCaBQLPj7Yq4QSEuvlRWFlo35dJL\nhzvbcEkJWaQCTVBS4mzbIi5QiIsvLVwYWjelb99Cx9vOyXN2xouImxTi4kvr1jm/bkpYUZ47M15E\n3KAQF/8pKeGzTR8fvu+0EacNAI78eCoSzxTi4ks19fWA82fhAFcMD20yse/AAVfaF3GSQlx8qb65\nCUh3pe3LRw0CYOHata60L+Ikhbj4UoBGkkhxpe1B/bsB8PaK1a60L+Ikhbj4lCUnLd+VlpOSQsM0\nH25xfr1yEadFHOLGmP7GmIXGmHXGmDXGmO+4WZjIqfQqcHcWyV5dei8+0JHvo63A/dbaj4wxOcAK\nY8x8a+3HLtUmckLhWSPD+w1xsZdkKuvqXGxfxBkRn4lba/dYaz9qu18LbAD6uVWYSHvCs0auOGeE\ni72k0WwbXWxfxBmdGhM3xgwCRgFLnCxGJBJvrVoFwFWj3TsTTyeT0JdPkfjW4RBvG0p5Fbiv7Yxc\nJKbe+WgdAMMGOH/JfVh+XoFrbYs4qUNztIwxKYQC/P9ba/92omNmzJhx+H5xcTHFxcVRlCfyeR9t\n+QyA1FR3phgCDCzow76aLQSDwcO7/Yg4pbS0lNLSUkfaMtbayA825k9AhbX2/naetx1pT6QzeucU\nsa9uL3b2bNf6+JcnXufpBb+nvr6ezMxM1/oRgdAaQNbaTl2C3JEphuOB24ArjDEfGmNWGmMmdqZT\nkWhU1dcCya72Mfmi0GYTK1eudLUfkWhF/H3UWvsBbv/NEYlAs20C0lzt4+KzegGwYME7jB8/3tW+\nRKKhwT7xoVYyyHa1h4L8UPvvvbfM1X5EoqUQF1/Ky8tztf3wOuWffPKpq/2IREshLr4SDAYBGFzQ\nNwa9Gaqq9segH5HOU4iLr4R3ob/gzLNj0FsatbW69F7im0JcfGXJktBFwteMcT/Ek0kjGGxyvR+R\naCjExVfmzZsPwLizi1zvKyc9H116L/FOIS6+UlYWmi0Snj3ipqJuoX8owuPwIvFIIS6+smVLaLaI\nG7vcH++S04cDUF1d7XpfIp2lEBdfqaqqpINL/nTaDReHrtosKyuLSX8inaEQF19pbm7A7as1wyac\n0weA+fMXxKQ/kc5QiIvPtJCTFptlYvPzcwBYtGhpTPoT6QyFuPhG+AfGft36xKS/0Li7Yfv2XTHp\nT6QzFOLiGzU1NQBcNCwWF/qEpVFTox82JX4pxMU3wovo33TJqJj1mZycR2trQ8z6E+kohbj4xrx5\nbwMwYWRshlMACgp6AIGY9SfSUQpx8Y3Fi5cDR35wjIWhQ0NDNy0tLTHrU6QjFOLiG9u37wRMTC70\nCbv88tDmVfv3azVDiU8KcfGN0A+bsZkjHnbDDcUAzJ07N6b9ikRKIS6+EQg0kJLi7mYQxxs9eiAA\n8+a9E9N+RSKlEBdfsNYCgbYfGmMnPT105r9y5eqY9isSKYW4+EJ4M4hhw0Z70HsKe/Z85kG/Iqem\nEBdfCG8Gcd11k2PbcUkJkEpdXWXbfZH4ohAXX3jppZcBuPXWL8S87/y0XkCwbUhHJL4oxMUX3n8/\nNEd8yBD3d/Q53un9TwegobEx5n2LnIpCXHxhx8ZNQDJJf/97zPv+6riLAViydm3M+xY5FYW4+EJt\n4BCxniMedtP4oQDMLPvAk/5FTkYhLnEvNBZtyU/r7Un/A/t0A+D9DZs96V/kZBTiEvfC0wuHDxjm\nSf9JSUlACjsPVHrSv8jJKMQl7i1atAiAqZeO87CKNOoCtR72L3JiCnGJezNnvgTAl8cP9qyGgvRe\ngNU0Q4k7CnGJe2VloQt9+vSMzd6aJzJy4JkA1NbqbFzii0Jc4t7OnTuBlLaxaW/cNuFSAN58803P\nahA5EYW4xL2mpiqSyPW0hpsvDU0z/NOf/uJpHSLHU4hLXAvPTBnc05uZKWE9CkO7CX3wwVJP6xA5\nnkJc4tr8+fMBuK14gseVAGRSWVnhdREix1CIS1x77rnnAfjG1SM8rgR65w4CWmhubva6FJHDIg5x\nY8zzxpi9xhitji8xs2jRciCJfr27eV0Kk8eMB2DNmjUeVyJyREfOxP8AfMmtQkROpLJyPxC73e1P\nZtqNFwHw5JNPeVyJyBERh7i19n1A1x1LzLS0tACt9Onj7Y+aYWcN6AnAnDnvelyJyBEaE5e4tXp1\naOTuppu+5nElIaF56pns21fudSkih6U43eCMGTMO3y8uLqa4uNjpLqSLePTRXwAwbdpUWL/C42pC\nCjP6Utn4KY1//SsZGRkwOcbbxUlCKC0tpbS01JG2TEfWgjDGDARKrLXntvO81doS4pTc3G7U1tYQ\nDLZg3njD63IA+NYTc3hmwTP89YEHuHnCBIW4OMIYg7XWdOa1HR1OMW03EVe1zppFbW0lqfSImwAH\n+OHU0C4/j748y+NKREI6MsVwJvAP4AxjzA5jzF3ulSVd3dK2rdAmnR/7jZFPJjTVMY2Pdm33uhQR\noANj4tba+Ph1SbqE6TNfA+BXd0/0uJLPK8oZzJ7ajTQ2NpLhdTHS5Wl2isSldz/+GEjm9NN6el3K\n59w/5csA/H7OXI8rEVGISxyqr6+nhUbyUvt7XcoJ3TtlNAD/8frfPa5ERCEuceiRR6YDMG3KTR5X\ncmIZGelAIbuq9mkdFfGcQlzizpNPPgsk8/BXL/W6lHbdedkNADz77LMeVyJdXYfmiZ+yMc0Tlyjt\n3r2bvn370idnJJ/NfNTrctrV0NBI1lduJS+vB9XV+70uR3wulvPERVw1deodALz6g295XMnJZWZm\nkJ82gJqaCiortaSQeEchLnGjtraWsrK3gVzGnXOa1+Wc0u+/800AJk26weNKpCtTiEvcmDw5FIYP\nPviYx5VE5qYvjMSYnixZUkZ5uRbFEm9oTFziwt69eykqKgJ6EAzui6tL7U9mbkpvrrnmIgYMGMb2\n7Z94XY74lMbExX9KSg7fWmfN4owzzgPg9ddnYYx/lueZNGksBQXD2bFjEz/9qT++QUhi0Zm4eKOk\nBIBAIMAl9/0by3ZsYOTISaxZM+eY5/3gwMFqetx5F9DK++9/wPjx47wuSXwmmjNxhbh4o6SE5pYW\nLr3/hyzbvoHslF7UvvY7r6vqtIVL13HlT34AGGbNep0pUyb76huFeEvDKeIrwWCQtZs20+u2f24L\n8J7s+/N/eV1WVK4YO4Lf3PEdwHLjjVO45ZavU19f73VZ0gXoTFxiprm5ma1bt3L77feyfPlbAJze\ncxgfP/dzkpOTPa7OGUvXbeSiHzwMNANZPPjg93n44fvIy8vzujSJYxpOkbgVCASora3l979/mX//\n919QVbUZgGQyeXHavdxyefxeWt9ZjY2NTHrkV5R+vKTtkXSKzxzD068/z7BhpyfMP1jiHIW4xBVr\nLU1NTZSWLuKb3/w+27d/CLQAkJHRg0cffYTvDD4t4cNsX0UFdz7+NHNXrwQCAGRkFPH1r3+Dn/50\nGvllZceOm2urty5LIS5xobW1lU2btnL77dNYufIdIDQmnJSUww1jzuWnt0/lzIEDu9wPfo1NTfz5\nrbf5wWvzqKjYdvjxvJS+PHzLjdz35ctJT0tTiHdhCnHxjLWWiooD3H33DykpeRGoaXsmlXHjLuOJ\nJx5j9OjRJM+Z42WZceNAZSW/eu11fjNnAXUt4ffK0DNrIE/+4Sm+/OWrE/4binyeQlxirqWlhfuv\nv4dn3vwbrVS1PZrEiBGjePrpxxl34IDC6CSstew/eJAf//llfregjGbCM1nSGDJkLC+++BQXXjiy\ny31r6aoU4hITwWCQZ599nYceup9Dh45sFNwvtw+/vutWbvzVL0lJadu21UcX63jNWsveAwf412f+\nyN+WLqGVprZnMhk16kreeON5+vXr5WmN4i6FuLiqrGwtt9xyO/v2rQZC//8WFBTx/264lm9Mvoa0\n1NTQgUeP6SrEO8Vay669e7nzP59h4cfr4HCg5/DFkRfz+sP3kJ2dGXpIY+gJQyEujtu58wBf+tJU\nNmx4l/DMknSyuP/Ga3jka18hIz3d2wK7AGstm7dv56s/f4oVuzYS/gc0iTzuuGwiv104k5QUDVkl\nAoW4OKKxsZVrr72HhQtfITyzJDk5i1tvncpvf/sbshcu9LbALsxay/I1a/na48+xueLIUFZGRi8e\neODH/OQn3/SwOomWQlw6zVq4667pvPDCE0B126OpTJhwBbPuvo3CggIvy5N2zH3/A+565o/srdl7\n+LG8vEE88cTT3HnnRA8rk85QiEuHWAs//vFM/uPH99FMxeHHzywaSsn372XYkCEeVicdYa3lhbnz\nuf/lWVRWftb2qKFnzzN58cU/c+WVF3han0RGIS6nZC385Ccv8+ij02hs3H348T75vfmf736LSy44\n38PqxAmBQICfzHyVn/9PCfXBI3PQ+xcMYU7ZAs45Z6Cn9Un7FOLSruk3T+Pnr/2ZRnvkjLt79378\n+tc/4/b8PM1DTlCtra1896nf8+zbC2k9PAc9lYEDx1BS8grnnNPf0/rkWApxOay1Ncj99z/J00//\njNbWI2fc3bO78+Rdt/GVq65UcHcxTU1NfOM//5u/LFpCgIa2R1Po2/dcXnrpz1x22dme1icK8a7l\n+PnXkydTVVXPlCnfpKxsNkd+nIQePQbwzO03c9PlExTcAoQC/Z7Hn+KlpctpaTnU9mgSublDmD79\nMaZNu5GkJH1WYk0h3pW0hfirC9fz3ef/QPmhT4HWticNQ3v254/fvJPxY8YouKV9kycTCAR46KEf\n8cwzz1JXd/CoJwu5/PIb+etfH6d791zPSuxKFOKJrC20P92+lzue+BP/2LwSqDv8dAppXHHOubz4\nvXvpVljoUZHid/a663j11f9h2rR/o7z8UyDY9kwK+fkDuffeh3jkkbtITU3xssyEpRBPQDt3VvDt\nb/+I+SV/pYkDRz2TRJ+8Hjx802S+dcNkkpK0w54478DBg9z5m+d4a+Uqmo46aYBU+vQ5iwcf/AHf\n/vbNpIaXXJCoKMR9LhgM8t57q3nggR+yckUp9pi/NJCbXsDNY8fwX9/+J7KzsjyqUroqay3vLV/O\nd//wEqt27SB4eD0XgFTy8/tz00238YtffI/u3fM9q9PPFOI+EgwG2bBhK9///s8pLZ1Lbe0ewmuT\nhBgKs7px69gLeeyfbidfezNKnLHXXcfChe/wve/NYO3aj2htPXTcEVkMLhzI3VddwX03XkbO1K94\nUqefKMTjVF1dPS+8MI/f/vZ3bNiwkqamAxwb2JCSksugwu58+6pL+ZebbiAtLc2bYkWiUL57Nw/+\n8SXeXLWWg/UHODKmHpZNjx79mTDhan70o3sZMWLwkWWLJXYhboyZCDwOJAHPW2sfO+75LhfiLS0t\nbHj8Wf577mLeXvMh5RW7aaKW48MaIIVMBvYs4u7LLuY7t1xPdnZ27AsWiYFgMMi6Tz5h+qtv8N6W\nLVRUVACNxx1lgCwKCnoxYsRobr/9q0ydehX5XfAitJiEuDEmCfgEuBL4DFgGTLXWfnzUMQkV4sFg\nkLq6esrKVvHCC6+yZs1KPvtsJzU1lUAtR6b2HSuZTHoWdOPqs0bw4Jev4uxhwzr1A2TpmjUUn3NO\ndP8j5DC9n87p7HtZV1fHX95+h98tXMInn+2kuqmG9v4eQRppaXl0796LIUOGM3HiRL7ylasYNKhP\nwv2gGqsQvxiYbq2d1Pbn7wP26LPxeA9xay2NjU1s3vwZs2e/zdKly9i8eTMHDuyhpqaKxsYGrG0g\n9KE6/uvg0ZJIJYseBfmMPm0Id3xxLNeNHeP4mfWMmTOZ8bWvOdpmV6b30zlOv5fBYJDNW7fy/IL3\nmP/RBrZX7KOqsQ5LK+2HPEAKkEJaWhaZmbkUFvagqKgfZ599Fl/4wmV88Ytj6NWrW9wPU0YT4h0Z\nlOoH7Dzqz7uAsZ3pNCwYDBIIBGloaKKyspbKykPs3LmH6uo6Pv10GxUV5VRU1FBdfYDKympqa6uo\nr2+gubmRhoYmWlubaG1tprU1QGtrK4FAM0cCuJXwIvqRSyaZNDKSsynMy2FYjwF88ZzB3DrhQgYP\nGKA9I0VckpSUxBlDh/LY0KE8doLnrbXU1tby/uo1vPaPtazYtp1dVfs5dKiJRuppbq6iufkg1dXb\n2bZtBYsXz+YPf/j5KXpNbrslETrrTyY1NZWUlDRSUjJIS0sjPT2N9PRc8vJyyMvLJi+vG4WF3ejd\nu5Dc3HwGDOjLaaf1paioBz16FJCTk0VKSnJMp/46/suCN2NZpu2WTBIppJJBWmoKmekZZGdm0DM7\nl34FuQzuU8SoQUVcdPoghgzWDysifmGMITc3l0njxzFp/LiIXtPU1MTajz+hbP1WPtq6l20H97H/\nUDU1DfU0NDXS2NRCc7CFAK1YGmhuDtDcbOn4yV90Rg6sPvVBJ9HR4ZQZ1tqJbX8+4XBKVNWIiHRR\nsRgTTwY2EvphczewFPiqtXZDZzoWEZHoRTyeYK0NGGP+FZjPkSmGCnAREQ85erGPiIjEVlQ/oRpj\nbjbGrDWFkW0IAAADDUlEQVTGBIwx7e7vZYyZaIz52BjziTHmoWj6TGTGmEJjzHxjzEZjzDxjzAkX\nojDGbDPGrDLGfGiMWRrrOuNZJJ81Y8xvjDGbjDEfGWNGxbpGPznV+2mMmWCMqTLGrGy7/dCLOv3A\nGPO8MWavMWb1SY7p+GfTWtvpG3AmMAxYCJzfzjFJwGZgIJAKfAScFU2/iXoDHgMebLv/EPCzdo7b\nAhR6XW+83SL5rAGTgL+33b8IWOx13fF6i/D9nADM9rpWP9yAS4FRwOp2nu/UZzOqM3Fr7UZr7SZC\n8/vaMxbYZK3dbq1tAV4CpkTTbwKbArzQdv8F4IZ2jjNE+S0qQUXyWZsC/AnAWrsEyDfG9I5tmb4R\n6d/drnWNfCdZa98HKk9ySKc+m7EIghNdJNQvBv36US9r7V4Aa+0eoFc7x1ngLWPMMmPMPTGrLv5F\n8lk7/pjyExwjIZH+3b2k7ev/340xw2NTWkLq1GfzlLNTjDFvAUf/a2AIhcjD1tqSE79K2nOS9/NE\nY4nt/eo83lq72xjTk1CYb2j7V14k1lYAA6y19caYScDrwBke19SlnDLErbVXRdlHOTDgqD/3b3us\nSzrZ+9n2o0dva+1eY0wRsK+dNna3/Xe/MWYWoa+9CvHIPmvlwGmnOEZCTvl+Wmtrj7o/1xjzlDGm\nm7X26E07JTKd+mw6OZzS3rjYMuB0Y8xAY0waMBWY7WC/iWQ2cGfb/f8D/O34A4wxWcaYnLb72cDV\nwNpYFRjnIvmszQbugMNXIVeFh7Dkc075fh49ZmuMGUto2rICvH3hNUJOpFOfzagWDzHG3AD8F9AD\neMMY85G1dpIxpg/wW2vtdVYXCXXEY8ArxpivA9uBWwGOfj8JDcXMalviIAX4i7V2vlcFx5P2PmvG\nmH8OPW2fs9bOMcZcY4zZTGjH6bu8rDmeRfJ+AjcbY75FaAH9BkDb+LTDGDMTKAa6G2N2ANOBNKL8\nbOpiHxERH9M0NRERH1OIi4j4mEJcRMTHFOIiIj6mEBcR8TGFuIiIjynERUR8TCEuIuJj/wvrHS2a\ntW/uBQAAAABJRU5ErkJggg==\n", 310 | "text/plain": [ 311 | "" 312 | ] 313 | }, 314 | "metadata": {}, 315 | "output_type": "display_data" 316 | } 317 | ], 318 | "source": [ 319 | "# Plot with samples\n", 320 | "f = plt.figure(figsize=(6, 4))\n", 321 | "ax = f.add_subplot(111)\n", 322 | "model.plot(axis=ax)\n", 323 | "# for x in data:\n", 324 | "# ax.axvline(x, alpha=0.002)\n", 325 | "for phi, nphi in zip(phis, nphis):\n", 326 | " plot_sample(phi, nphi, axis=ax, alpha=0.1, c='k')\n", 327 | "ax.set_xlim(-1, 1)\n", 328 | "ax.hist(data, 100, alpha=0.3, color='r', normed=True)\n", 329 | "f.show()" 330 | ] 331 | }, 332 | { 333 | "cell_type": "code", 334 | "execution_count": 56, 335 | "metadata": { 336 | "collapsed": false 337 | }, 338 | "outputs": [ 339 | { 340 | "data": { 341 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4VFX+x/H3mZn0nhASAiFUQ1UQsYAlohRXumUpFnAF\npLmCouIKgqIC7v4UpSiIIrioqIAgSlOjC7rSpYWWBFIISSC9zCQzc35/kEVE6mTIJJPv63nmYZLc\ne+ebQ/LJmXPPPVdprRFCCOFeDK4uQAghhPNJuAshhBuScBdCCDck4S6EEG5Iwl0IIdyQhLsQQrgh\nh8NdKRWklPpcKZWglNqnlLrJmYUJIYRwnKkS+84CvtFaP6CUMgG+TqpJCCFEJSlHLmJSSgUCO7XW\nTZ1fkhBCiMpydFimMXBSKfWhUmqHUmq+UsrHmYUJIYRwnKPhbgKuB+Zora8HSoDnnVaVEEKISnF0\nzD0NSNVab6v4+AvguXM3UkrJwjVCCHGFtNaqssdwqOeutc4EUpVS11R86i5g/wW2lYcTHi+99JLL\na3Cnh7SntGd1fThLZWbLPAn8WynlASQBQ51TkhBCiMpyONy11r8BHZ1YixBCCCeRK1RriLi4OFeX\n4FakPZ1L2rP6cWie+2UfXCl9NY8vhBDuRimFdtUJVSGEENWbhLsQQrghCXchhHBDEu5CCOGGJNyF\nEMINSbgLIYQbknAXQgg3JOEuhBBuSMJdCCHckIS7EEK4IQl3IYRwQxLuQgjhhiTchRDCDUm4CyGE\nG5JwF0IINyThLoQQbkjCXQgh3JCEuxBCuCEJdyGEcEMS7kII4YYk3IUQwg1JuAshhBuScBdCCDck\n4S6EEG5Iwl0IIdyQydUFCFEjrF79x4979XJNHUJcJofDXSl1FMgH7EC51vpGZxUlxHlJwApx2SrT\nc7cDcVrrXGcVI4QQwjkqM+auKrm/EEKIq6Qy4ayBDUqprUqpYc4qSAghROVVZlims9Y6QykVzumQ\nT9Bab3JWYUIIIRzncLhrrTMq/s1WSq0AbgT+FO5Tpkw58zwuLo64uDhHX1IIIdxOfHw88fHxTj+u\n0lpf+U5K+QIGrXWRUsoPWA9M1VqvP2c77cjxhTgvV86WkZk6oooopdBaq8oex9GeewSwQimlK47x\n73ODXQghhOs4FO5a62SgnZNrEUII4SRyhapwX2cPpcgwiqhlZJ66EEK4IQl3IYRwQzIsI4QjZMhH\nVHMS7qJWsdvtlJeXA+Dh4YHBIG9ehXuScBduy2qzUW61nv43J4fy8nLsdjtGoxGlFFarFYPBgIeH\nxx8eRqPR1aULUWkS7sK1zr046GyXOdyhtaa8vByr1Up5efmZh6GgALTGqjWln39OkdlMXnExOQUF\nqJtuIjQ0lKCgIPz9/fHx8cHDwwOtNXa7/U+Bb9IapSp9XYkQVUbCXdQoWmvKrFbKrVbKc3MpLy/H\nZrNhNBqx2WyUlZVRXFxMQUEBuYmJ5BcXn96+YhsPgwGjwQAWC2lpaSQlJZ0Jcy8vLwIDAwkLC8Pf\n3x8/Pz88PT1PHzs3F6PBgIfJhIfRiIfJhKfJJIEvqi0Jd1FjlFgsFJaUYLfbsWuNOSfndIjn5pKX\nl0dpaSlmsxmlFCaTCVNREXa7naxcC4dPQOKJYtKyS8nItWDwTqdevSDq1w+mefNQGjf2JTIyiJyc\nHDIzM7Hb7Wit8fb2xtfXl9D0dAJ9fQnw8cHb0xOPiqGdAB8ffFzdMEKch4S7qFoXG4a5ALPZTGFh\nIeaCAlKys0nJzsZSXo7VZsNgMGCq6EHn55dw+HAeSUmFpKTkkZGUzqkiG3bAx6OMQJ8ifEzF1A+1\nogMDKC31YPduDzZv9qW0VGE0GggL8yMqKpSYmCAaNQoiNjYQm81GYVYWVpsNu9Z4enjgZTIREx5O\nVFgYPtnZBAYG4uXl5fz2EsJBEu6i2iorK6OgoIDS0lKOHz9Owu7dFBUVYTB5kJJVzrGsNJKTc0lL\nyyEnp5jSUiuenhpfXzN+fmWE+J0kMtSKwkK5zYpSCl8PD3y9vSHcg+LiYvz9LdSpY8doNAHelJYq\nsrM9OXrUk2++8aa83Iivr5FQLwP163jSpK4vjSItRIcZOZmXx6Hjx2l57bVERkbi6+tLYGAgHh4e\nlfvGZZqlcAIJd1HtWG02CkpKKMnK4sSJExw4cIDc3FxSjuWyYnMBidkWwIa/fzJ+fmV4eZURE2PD\nZCrDZjs9A8bT0xO/Yis+Xl6EBtShTmAgfj4++Hp64uvlhb1VK8xmMyUlJRQWFpKVlUVBQQElJSWU\nlBRhsVjQWmM0emCzmSjOtJB8wpN9RwMpKvcFNLGR3vS7uZzCLVsIDg6mVatWhIeH4+fnR0BAACaT\n/HoJ13Foyd/LPrgs+SvOdZFhGZvNRmFpKYUlJZwqLGR/3brk5ORw9GguK1ceIDExjVCfbEIDCzAa\n7BiCg/Hy8sLHxwcfHx+Cg4OpW7cufn5++Pv745ucTICvL94mE95eXgT7+hLo54eftzfccQdFRUUU\nFhaSm5uLxWKhtLT0zDuF/4X+yZMnyc3NpSQ1leKyMqxWK3abDas2cbLAn/zSCFq0iKFPn+Y0aBBM\neHg4sbGxhIeH4+/vT0BAwJXPpXfCDCJRc7l6yV8hnMZut1NkNlNYUkJ2fj4H0tPJzs8n9XgBK1ce\n5vDhFIKCcmgYmkmArxeNIusRHRKCT5s2BAQE4OPjc+ZfX19fgoKCCAoKwm/bNvy9vU+fBPXwwMNk\nwmQ0YjIa4brrsFqtZ6ZPlpaWUlhYSHFxMYWFhRQWFpKfn38m6Av++19KysootVgoKCoi7dQpvA2Z\nBPrkcvx4DjNmpNCiRQP69GlOZmYmkZGRtG7dmuDgYAIDA/Hz85MLpkSVknAXLqO1pthspqCkhNyi\nIvalppKRm0taZjFf/ZrH4cz9BHpmER2aS7CPF40jG9IqJoamkZH4+/gQdNddBAYGEhgYSEBAAH5+\nfnh7e/9+MdKpUxd+caXObOfj40NgYCAREREAfwh9s9lMUVERBTYbRaWlFBQXk1dcTFFpKYkZGSSk\npJBosZCXl0xaWhYzZhyjRYuG9OnTnKysLKKiomjZsiXBwcFnapTpk6IqSLiLKqe1Pj2tsbSU3KIi\nDqWmcuzUKVIyC/n613wOZubh55FJ/eB8QgJ8aV6vCa0bNKB5w4a0atCAmLp18fX2xtSli+NLCFzk\nzkomkwmTyYS3tzcBAQGEh4dD48bYbDasdjvlVislZjNJmZm0iI7mgK8vCQkJJCYmkpt7jLSkE8yY\nkUTrBqHce0Mgx+/qRHR0NC1atCAoKIjAwEB8fX0r2YpCXJyEu6hS5rIy8ouLyS8u5kB6Oseysjia\nmcc3Wws4kJGHn+kE9YKLCPX3ITaqKa0aNSK2fn1aNmhAs6goAn19f+/5rq/am38ZjUaMRiNeHh74\n+/hQJyiIFg0acCQjgwSLhQSTiQOpqSSQTl5JDkknApm5MoI2O/Lo0S6QlIhAmtStS/NHHjkzXOPt\n7V2l34OoPSTcRZXQWlNQUEBmZiaJmZkcycggOSOHtTsLSUjPx9uQQWRQMWEBPrSKvoYW0dG0io6m\nTUwMjSIi8PP2rnbDGQaDgWB/fzo0a0aLBg1oGxPDvtRUEtLS2J+czMGMDHIKDnMoLYC9KeG0bRhG\nt3b5HN2wgaZNm9KsWTPq1q1LYGCgq78V4YYk3MWVceBG0Xa7nVOnTnH48GF27NrFkeMn2bi7mH2p\nJ/FQWUQGlBIe5EOrhrG0jI6mVUwM1zVqRHR4OD5VdWFQJW6ArZTC38eHdk2bck39+nTIzua3hg3Z\nl5rKvpQUDqSmkpWfRELKCfakRHDtUTvdu2eQnJxMhw4daNKkCaGhoXLCVTiVhLuoVLBdSnl5OVlZ\nWezatYtNm35m5deHOZCRj4ks6viXEhHsS8v6zWjTqBFtYmJo17QpDcLC8PL0dFoNVcnX25sW0dE0\njojghubN2ZWYyJ5jx9h39CgJaWmcyE9m796T7N6dRJs2DendO53bb7+Na6+9lvDwcJkbL5xGfpLE\nVWM2m0lPT2fLli1s2LCJf//7V8rK0gnzK6N+iC8tG7agbUwMbRs35vpmzYgMCTk9TbE6cGCZhLN5\neXrStF49GoaH0/Gaa9iRlMSuxET2Hj3KgaIiMjIy2Ls3k6SkdLKz88nPz6djx45ERUUhixgIZ5Bw\nF1dFYWEhR48eZdOmTXz55Y98991OjMZsmoZ70DG2Fdc1bky7Ro1o17QpdYOD3XZIwsNkomHdutQP\nC+Oma65hR2IiOz092bt3L1u2bOXYsSQWLCggLS2X3NxcbrvtNmLM5tMXWglRCRLuwqm01uTm5pKQ\nkMB3333H0qX/4eDB/Xh7l9KyZSN6Nm1K3HXXcW2jRoQFBla7k6RXi9FopF5oKPcEB3Njp07s3LmT\nZs2asWrVKg4dSuPbb+NJSTlFbm4ud5lMNK9fnyCZEy8qQcJdOI3NZjszvr5mzXqWLNlEQUEyISEG\nbrzxRvr160dfb2/qhoTU2tAyGAyEh4fTtWtX2rZtS4MGDfjiiy/Ytm03+/ZtZebMXDI7hXPPDR24\nrkkTwoOC3PZdjbi6JNyFU5SVlZGens4vv/zCZ599w6pVvwLZREcH0bXr3QwaNIjOnTvjvWGDq0ut\nFpRS1KtXjyFDhtC4cWM+/vhj4uN/Ii3tAG+tOcXxXDP3dSrmpthYosLC8JATreIKyU+MqLSSkhKS\nkpLYuHEjixevY+fO3zCZSmjZsiH9+/dn0KBBNG/evNb21i/G29ubrl270rBhQ2JiYli+fAWHEo6y\n7OcSjmUW8kiXfLq0a0ejiAi8a+gMIuEaEu6iUvLz89m/fz+rV3/NBx+sJzPzCD4+Rjp1uoGHHnqI\n/v37y0U6l2AwGGjZsiXjx4+nUaNGLPm//2Pr4TR+TTxE6qkiMvPy+csNHWgVE0OAq4sVNYaEu3CI\n3W7nZH4+2zZv5pNPvuCTT+Kx2U4SGRlAt253M2LECG688UZM337r6lKrv4ppl6HAIyEhNB88mPlr\n17Lht70czzvGjM+LSDuZx4A7OtH+5EnCwsLkXZC4JAl3ccWsNhvHT53ix717eX/HYX76aQtQSosW\n0QwePIhHH32U6OhoV5dZI3l6enLHtdcSHR5Oo4j1LPvPzxzJyuKDH0pIPpHP8NhYbr31VurVq4ex\nulwTIKqlSoW7UsoAbAPStNa9nVOSqM4s5eUkHj/Oqi1bmL9mG8k5x/H0hJtvvp5Ro0bRu3dvfHzk\nltGV1aRePV548EFaRkczb803bD2cyg8JB0mdNJuRI4/Tq1cvYmJi8JRxeHEBle25/x3YD8igai1Q\nXFzM3iNH+Hf8T7y/YQ+l1pMEB/vSu/dfePLJJ2nfvr1M23MiPx8fBt5xB83r1ePNlSv5duc+jhzZ\nxZQpmaSlZTBw4IO0bt1alg8W5+VwuCulGgB/AV4FxjutIlEtFRUV8eOPPzLns6/4dsdBoJgmYUH8\nbfyTPP7449StW9fVJbolo9HITS1b8mZ4OM3XrGHx1p2kpKTz5ptLSE5OZ+TIIXTu3Bk/Pz9Xlyqq\nmcp0s94EJgByk1Q3V1RUxLp163jmmdf4dsdvKIrp1KwBb474G88884wEexWoV6cO/xgwgJkzX6Nj\nx7ZAIStXfs1zz00jPj6e4uJiV5coqhmHeu5KqXuBTK31LqVUHHDBU/dTpkw58zwuLo64uDhHXlK4\nSHFxMevWrWP8+FdISTmAn8mD/rfezAsPPEBsgwYoGfOtMt5eXjzYsyexsbG8/vrrrFq1jl27/stT\nQ5/i/4Y9yF3t2uH7wAOuLlNcofj4eOLj451+XKX1lXe8lVKvAQ8BVsAHCACWa60fOWc77cjxRRW7\nwJK/xcXFfPPNN4wbN4n09CSCg8N4vldXhnfvTsj/5q5fanngSq6u6JbObTMH2uhkXh6zv/6af61c\nS1FZCY1CY3hz+AN0mzRJxuBrOKUUWutKz3V1aFhGa/2C1rqh1roJMAD4/txgFzVbcXExq1evZuzY\niaSnJxISUoe33prO0/36/R7swmXqBAfzjwEDeGPIAAK9/Diac4zR733KunXrKC0tdXV5ohqQee7i\nT0pKSli1ahVjxvyDnJwUwsIief/92fTu3RvDmjUX31l66lXGw2Ri2D33EODry9h3P+J4bgqjRk3g\nnXds3HvvvTIltZar9Lw1rfWPMsfdfZSYzSx/4QWeGPIUOTnHqBcYwpIlC+jTp49Mc6yGjEYjA+Pi\n+HD8SMJ8QzhxIokRIybw9ddfSw++lpPfVnFGqcXCFz/9xMg5SygoO0nD0AhWPD+eHj16yOXu1ZjB\nYKD3TTfx2XNPEhERQ07OMR5/fByrVq3CbDa7ujzhIhLuAjgd7J/88APD5iyhyJpH44j6rHpxAje1\naSPBXgMopejSrh2rVy+jQYNrKCg4zuOPP8WKFSsk4GspCXdBqcXCku++Y8TcJZTpQlpGNWLDlBe4\nrlkzV5cmroBSio4dO7J27Zc0adKaoqIshgwZy+effy4BXwvJCdXa6KyTnuayMj5Yu5YnFy7FjpkO\nDWP5avIz1JcLk66eq3zSuXXr1mzY8BX33ns/Bw7s4rHHxmK1Whk0aBBeXnL77dpCeu61mLmsjPfW\nrGHMwo+xU8qtsdeyftoLEuxuoEmTJnz//Rquv/42rNZ8hg37O4sXL8Zisbi6NFFFJNxrKXNZGe+s\nWMFTH34MWOhxXUfWvPQcocHBri5NOEm9evXYuHElnTp1w2YrZPjwcSxcuFACvpaQYZlayFJWxszP\nv+Clz5YBmgduvoNF40bh68i8aJnXXq2FhISwdu0X/PWvQ/n22y8ZPXo8ZrOFMWNGy3LBbk7CvZax\nWCy8+u+lvLJiJQB/69KDuaMfx9PDw8WViati9erTa4M8NoBH8nP4/OcfePrp5ygrK2P8+HES8G5M\nwr0WsVgsTJr0Em+sWAEoxvXux8whgzGZLvFjIL3zGs/by4uPnx6Df2wbPvxwDhMnvojFYmbixIkS\n8G5KxtxrCYvFwoQJz/HGGzMBE5MffIh/PvbwpYNduA1PDw8WLHiT8eP/AWimTJnKSy9Noby83NWl\niatAfrNrgbKyMsaM+Tvvv/8e4MG/hjzGU33vkeUEaiGj0cgbb0whMNCXKVMmMX3661j37WP6Y0P/\nfE/WS634Kao1CXc3Z7fbef75f1QEuxcLF37A0Dr+ctVpLWYwGJg8+TkCAoJ5+ukn+efqVfj5+jJ5\n4AD5g+9G5H/SjWmtefXVV3nzzX8CnnzyyacMHTpQgl2glGLcuBG8N3w0YGLqZ58ya/kK5P4L7kPC\n3U1prZk1axaTJ08GPFi48GMGDOgrwS7OUEox7N47+dcjTwCK8Ys/YvG6da4uSziJhLubWrRoEePG\njQcMvP32Ah57TG6/Jv5MKcW4+7ry0oN/A2DI3Hl8+eOPLq5KOIOEuxtavnw5jz02DNBMnTqLsWMf\ndXVJohpTSjF5UE+e6fkooLn/X2/y3bZtri5LVJKcUHUzGzdu5L77/grYmDBhJpMmjZZ56uKSDAYD\nMx7vR0GJhfnff0q3l1/jP92706lTJ1eXJhwkPfeaavXq3x8Vfv31V7p2/QtgZcSIKUyf/rSMsYvL\nZjAYmPfkAAbc3B87VuLi7mbfvn2uLks4SHrubmLv3r3ceuudQDkDBz7P3LmTZFqbuGIGg4GPn3uY\nnMkW1u9ZQ8eOt5CQsIeYmJg/vwOUefDVmvz2u4Hk5GQ6duyE1VpKjx6jWLJkmgS7cJjRaOSblx+n\nU5MulJYW0rp5SzIWLnR1WeIKSc+9hsvIzqZt2/aYzYV07vwIX3/99p+vNBS1h5POrxiNRn785xiu\nH21lT8ZPtB7zFIffm0NYSIhTji+uPune1WC5eXm0Hj2W4uJ82rbtR3z8Qgl24TQmk4ntc56iSfAN\n5JqLaDVqDAWFha4uS1wm6bnXUAWFhbQYOYZccwlNm/Zg+/bPfl8ETGbHCCfxMJnY//7zRD88lazi\nPbQZNZaDC97Fx9vb1aWJS5Ceew1UXFxMy1FjySouINz7Ovbv/woPWY9dXCVenp4cXTQJP9WC1Pwc\n2o4aQ5msJFntqau5loRSSstaFc5VVlZGixatSU4+gh+tyF42FR9vuemxuEznznC5gnd5RSWlhA54\nnnKSaVcvmi3HEqVTcRUopdBaV3oOs/Tca5Dy8nLat7+R5OQjGGnCiU+nSLCLKuPv68OJxdOABuzK\nSKVz51tlLfhqTMK9higvL+eWWzqxf/9vQGOylryKv6+Me4qqFRocQMaHM4BItm7dQrdu3bHZbK4u\nS5yHhHsNYLVa6dq1G9u3bwPqkZ6+ldAgP1eXJWqpyLAADr3zGhBGfPwP3HfffRLw1ZBD4a6U8lJK\n/aqU2qmU2qOUesnZhYnTbDYbffv25ccf44FwDh7cSlRUmKvLErVc85g67N69Awjiq6++YvDgwdjt\n9gvvcPZyGTKbq0o4FO5aawtwp9a6PdAOuEcpdaNTKxPY7XYGDRrEmjVrgGB27drKNdfUd3VZQgDQ\n9uhv/GfaTCCAzz77jOHDh1884EWVcnhYRmtdUvHUi9Pz5WVajBPZ7XZGjBjBsmXLgAB+/PG/XHdd\njKvLEuIPbr22Hqsmvgr4sHDhQsaPHy93c6omHA53pZRBKbUTOAFs0FpvdV5ZtZvWmnHjxvH+++8D\nfqxc+T233x7r6rKEOK9etzRi0eiXAe+z7v4lXM3hK1S11nagvVIqEFiplGqltd5/7nZTpkw58zwu\nLo64uDhHX7LWeOGFF3j77bcBbxYuXE2fPje4uiQhLuqRbtdwsvEynnnmPqZNm0ZAQADPPvusq8uq\nEeLj44mPj3f6cZ1yEZNSahJQrLX+v3M+LxcxXaFp06YxadIkwIOZM7/kmWd6nn9NdjkpJaoZ3bMn\n//jHJ7z++iOAndmz32H06NGnvyjLBV82l17EpJSqo5QKqnjuA3QFDlS2mNru7bffZtKkyYCR55//\n4MLBLkQ1pJTi1VcHMmLEbADGjn2Sjz76yMVV1V6ODsvUAz5SShk4/QfiM631N84rq/ZZsGABTz01\nHoChQ9/itdcGS7CLGkcpxdy5w8nOzmH58n/wtyFD8NqzhwF33OHq0modh8Jda70HuN7JtdRan3zy\nCSNHjkZrG/37v8b774+SYBc1lsFgYNmy5+h+7WG+27+IR//1Fn6envS65RZXl1aryMJhLrZq1Soe\neOBBysosxMU9zcaNMy5vTXYZcxfVnNVq5ea/L2V76hd4YWTNSy9yV4cOp78oY+4X5Kwxdwl3F1q/\nfj29e/fFYimlXbsn2Lr1nd/XZD+XhLmogcrKy2k94n2OnPwWP6MHa6e+xK3XXivhfhES7jXBRQL5\np6Ag7rmnJyUlhTRpMoiEhA/x9PR06FhCVGelZgtNHpvHiaLvCfTy5vtXXqbDhAmuLqvakiV/a7Bf\nDxygV6++lJQUEhHRk717F1482IWowXy8vTi0YAQBhpsosJjpMXkyv/32m6vLcnsS7lVsx8GD3Dt5\nMgUFufj7387Bg5/g4yNL9wr3FuDnQ9JHT+HJtZw0m+nWrQcJCQm/byALizmdhHsV2puYyF9emsIp\nsxlPzxtJSvqKoCB/V5clRJWoE+THkQXPAs3JyjpB9+7dSUxMPP/GEvaVJuFeRQ6npvKXl14is6QY\naM6hQ6sJDw92dVlCVKnoiED2vzMRaEZqairdu3cnNTXV1WW5JQn3KnD0xAnumTKF1IICoBF7Z00k\nJqauq8sSwiVaxtTh55+/BmJITEyke/fupGdnu7ostyPhfpWlnDjBXyZPJjE7G6jPpteeo3XjOq4u\nSwiXuuWWWL75ZgUQSUJCAr2nTpWAdzIJ96so6fhx7pkyhYQTJ4AIVk+cQOc2F7jZhowxilrmnnva\n8+9/LwPC2ZGSQu+pU0k5ccLVZbkNCferJDExkV5Tp7L/+HGgLh8++RQ9b2ni6rKEqFYGDbqN2bM/\nAMLYkZJCv1de4ZgEvFNIuF8FCQkJ9OrVi/0ZGUAE80c9xaN3tbqyg0hPXtQSo0bdyxuPPgmEsSM1\nlb6vvELS8eOuLqvGk3B3sn379tG3b18SEg5wOtif5PHurWUhMCEuQCnF0/3bVQR8HXZVBPwRmUVT\nKbL8gBPt3LmTAQMGcOjQYSCCeSPHMKJHWwl2Ic517toyq1ejteZfy3cy4aPZwEna1q/P5xMnEtuw\nYa1ai0bWlqlmtm3bxuDBgyuCPZJ58xYyIqpcgl2IK6C15l8rtzPhw3lANq2jovj02Wdp8/e/X/5B\navhdn2RtmWpk69atDBo0qCLY6zFnzgJGjOghwS7EFVJK8XTfDswc8gRQl33Hj/PgjBns2LHD1aXV\nOBLulaC15pdffmHAgAEcPnwEiGLevPcZOfIvEuxCOOj3gB8BRJCQkcGgQYPYvn27q0urUSTcHaS1\nJj4+nsGDB5OUlAxEM3fuu9JjF8IJDAYDT/ftwIyKgD948BCDBg1i27Ztri6txpBwd4DWmu+//56h\nQ4eSnHwUaMicOXN54ol7JdiFcBKDwcAzfa9nxqMjgHocOnSYgQMH8uuvv1JbzuVVhoT7FbLb7Wzc\nuJHHHnuMY8dSOR3sc2QoRoirwGAw8Ey/65k+/S2gHkeOJDJo0CA2b94sAX8JDt0gu7ayrVzJuu3b\nGTFvHmmnTgGNmDv3HZ54QoJdiKvFYDAwYcJ9aG1j4sRnSEo6ysMPP8yiRYu4/fbbUV9/7eoSqyXp\nuV8mm83G2q1bGT53LmmncoBGzJkzixEjJNiFuNoMBgPPPvsgr746E6jP0aNpDBkyhI0bN2Kz2Vxd\nXrUk4X4ZbDYbq1evZvi8eaTn5AIxzB02hCee6InBIE0oRFUwGAw8//wAXn11OhDF0aPpDBs2jHXb\ntknAn4ck0yVYrVZWrFjB6NGjOZ6bBzRizuOPMuLedhLsQlSx/wX8tGmvA1EcO5bByHffZd3WrRLw\n55Ax94uwWCysWLGC8ePHk5FxCmjMnMcf4YmeFwh2WeBLiKvudMD/FdC8+OKLpJw6xhPvvstsu50e\nHTvi6eHA3EXjAAARnElEQVTh6hKrBQn3CygsLGTp0qW8/PLLZGTkAI2Z/bcBFw52IUSVMRqNPP/8\nALSGSZNeJDXnGKPfe49pRUXcf9tt+Pn4XP7BavhyBRci4X4eJ0+eZPbs2cyaNYu8PDPQmLfemsbI\nRiYJdiGqCaPRyMSJAyBhD5OWLiM9N4VnFy/meH4+T/ToQUhAgKtLdKnaF+6X+CudkpLC66+/zscf\nf0xRkQ1ozptvTmbs2H4Y1qypujqFEJdkNBqZ+MDNgGbS0tVkFSQy/bPPyMrJYcL99xMVFubqEl3G\noXBXSjUAFgMRgB1YoLV+25mFVTWtNQcPHmTSpEl88803lJQojMZ2LF36Cvfff6f02IWopk4H/C1E\nhfryt9lfUVB2kPnr15OZm8srjzxCE63/OF25lpwbc7TnbgXGa613KaX8ge1KqfVa6wNOrK3K2Gw2\ndu3axbhx49iyZSsWizfBwbewatXL3HprB5nHLkQ1ZzQaGXJ3O6JCfXlw+koKy/aw4r9bOVVYyIyb\nbqJt27YYjUZXl1mlHOqOaq1PaK13VTwvAhKAC9z5uXorKyvj+++/Z8iQIfz3v1uwWPxo3LgHv/wy\nS4JdiBrEYDDQvUMs8a8NoF5wJ8x2P37Ys4+hQ4eyadMmysvLXV1ilar0zTqUUo2AeKBNRdCf/TXX\n36zjIm/BikpL+aq8nEmTJpGSchybLZSOHfuwcuVkoqLq1Zq3b0JUexebwXKe39OUE5n0fPkb9qRt\nwmQqoEmTGKZNm8a9996L73ffOf5aVcBZN+uo1AnViiGZL4C/nxvs/zNlypQzz+Pi4oiLi6vMSzpN\nTmEhi7/7julr1pCVlY/WkfTpM5hFiyYQHBzs6vKEEJXQMDKCH6ffx1//6cOGXf/hyJEUxo0bx6lT\npxgQGEhwNZpJEx8fT3x8vNOP63DPXSllAr4GvtVaz7rANtWy53781CneWbWKed9+S4FFoXVDxowZ\nxsyZo/A5e36s9NyFqB6usOf+PyVmMyO/PszixYtQKpWwMH9G3303I++5h4iQkCt/rSpQHXruHwD7\nLxTs1ZHWmsSMDF775BM+/+UXisoMQCzTp49n/PgBeMiVbUK4FV9vb+bPf5oGDUJ57bW5nDyZyL+W\nLycrN5dn+vencWSk255Xc3QqZGdgMLBHKbUT0MALWuu1zizOmWw2G/tSUpi0aBEb9u6jtNwLT1qz\n8KkeDIwNwLh2rcv/YgshLtMVvKv28vJi6tTHiYkJZ8SIf1JUtoePvvuBk7m5vPTQQ7Rs2NAtpzo7\nFO5a681AjZlXVFZeztaDB5mwaBE7jiRjsfsR7H0tX07sSdx117jlf6wQbqWSQ6Qmk4nHH+9HVFQo\ng+6bTGHZb6zevousoiKmP/ooNzRvjslk+vNr1eAOn9unWklpKd9u28awefPYfigRiz2IxuGd+en1\n+7mzXawEuxC1hMFg4N574/huWn+igjthtgXw8/6DPDFvHht37KDUbHZ1iU7l1smWk5PDpz/+yN/f\ne48jqemUEcmNze4g/tW+tG3ayG3H2oQQ56eUomOLZvz0ej/aNOxMOXXZdzSV0fPns3zzZvKLzjvp\nr0Zyy3C32+0cOXKEd999l+c//pi0nCLKaUyvG7rw7dT+NIyMcHWJQggXalo/iu9euY+ubeOw0oBj\nWTk889FHLFy/nuQTJ9zi/qxuF+6lpaVs2bKFF198kRkzZnCqwIKNZozs1pVPJvQktBrNbxVCuE7d\nkBA+n9iLh+PuxkYTsvIsvLJsGVOWLGH7kSNYyspcXWKluNWqkFlZWWzYsIGZM2dy5MgRSku90LTi\n1cF38nS/Tnh5erq6RCFENRLk7897o7oTHeLLayt+pKAkic+3bGFfWhoT+vfnrtJS6gQF/b5DDTrB\n6hbhbrVaOXz4MIsWLWLx4sXk5eVjNocRFtaeNx9qx6C462vdokFCiMvj4+3NlIfvpEm9QMZ9sJlC\n8wH2HUvh6YULeaxrVx7q0oWmkZE1LkNqfLgXFhaybds2Zs6cyebNmyktVVitTbj11jjmzRtJq8Qj\nMiNGCHFRHiYTQ7t15PqmdRgxJ4ytSXvJyktl1ldfsf/YMZ7q04frmzfHtwbdtanSC4dd9OCXu/yA\nAw2mtSY9PZ3Vq1fz1ltvkZKSgtnsj8HQkmeffYBnn32IkJAQWUJACHFFcgoKePnTzcz6ejtwCG+P\nUhrXrcOzffvSrWNH6oWE/D7T7iqEe3VYfsBlysrKSEhIYP78+SxbtozCwhIslkgaN+7InDkjuPvu\n22UpASGEQ0IDA5k59G5ub12XsfNDOJ67n8T0TJ5dvJhdKSk83q0bLerX//2ip2qqeld3Hrm5ufzy\nyy+8/vrrbN++HbPZA62voW/f7rz11kgaNmwo89eFEJXi6eFBv07Xc13jeoxdEMq32/eSU5jEgnXr\n2JOUxPP33ceNLVoQdOlDuUyNGYzWWpOcnMz8+fMZOXIk27Zto7Q0ED+/m3jrrfF8/PEkYmJiJNiF\nEE6hlKJpVBTLJvThtUFd8DB2oKTMn80HDjFi3jyWfPcdx44dq7Zz4mtEz91sNrN7925mzZrFmjVr\nKCqyYrNF067drcybN5Ibb+wgJ02FEFeFv68vzz4QR+fWUYyaG8K+9N2kZR9n8tKlbLfZGDt2LK1b\nt8bLy8vVpf5BtU/E7Oxsli9fzvDhw1m+fDn5+XZstpaM6BbH+qe7cPPNHSXYhRBXldFo5Pa2LVn/\nSh8evvNOymlFbqmBTz/9jFGjRrFmzRpOnjzp6jL/oNr23K1WK0eOHGHJkiV88MEH5OTkUVZWh/CA\nWGYNu4X+ndrJRUlCiCoVVacO80fdw51tInjm/RByShPYufM3xo4dy8iRI/nrX/9K48aNq8XJ1mo3\nFVJrTdZNN7F7927eeecdvv/+e8xmAzZbDF26xDGnXwtiG0Zf/lQkmQophHAyrTV7k5IZ9eUhNm2K\nx2hMwddXcffddzNmzBjatm1LnTp1HDoH6KypkFUT7peax17x9cKSEg6kpvJFdjbLli3jxIlMzGY/\nPIjluQeu55l+txDk73/xF73AsYUQwtlyb72V6dMX88Ybn6P1Qby9i6hfvz4DBw6kX79+xMbG4ufn\nd0XHdKtwL1u+nKQTJ/h+1y4WrFvHoZMnKS83Ul4eQbNmNzFvcAvirmtdLd7qCCHEGb16YbVaWbfu\nB0aNmk9Kyk48PLLw8tK0bNmSkSNH0rlzZ5o0aXLZ+eUW4f6/q0y3zZ3LgrVr+TkhAbPFhlmHY6QJ\ng+OaM21wJ6IjZIleIUT1lpyRwcTvU/j88/9gtyfh43MKb29vbrvtNoYNG0aHDh2IvIx7ttb4cM/L\ny2Pfvn0sXryY1Z9+Sm5JKWZrANCEuNbNmPRgazq3biknTYUQNYa5a1d++GET06Z9xc8//wok4+1d\nTEhIMPfddx+DBg2iVatWBAVd+PKnGhvulrIyDl1zDatXr2bRokWkpqZiMRvRxBAb1YwXH2xD75va\nEniF41RCCFFd5BQUsGLzHl5bd5SkpN0olY6XVxmNGjViyJAh9OzZk+bNm+N5ns5rjQt3m81GSnY2\nP+3Zw3vbtvHbb79hNhuw28Op49eEJ3u2YvhfbqBucLBcZSqEqPG01qS1b8+8eSuYN28deXkHMBgy\n8fVVtG/fnieeeILOnTsTHR19+lqdik6w6t275oT7yY8/ZueRI8xbu5b43bspLAerNRCTqSUPP9yF\nZzuE0rx+/Rq3XrIQQlxUr17YbDYOHDjAtGmfsWzZD9jtBzGZiggO9qNLly4MHz6c9u3bE7p5M1CD\nwn3btm0snjqVL//zH04WFGKx+wKxdOnSmSlTHuTmmzvisXbtVatBCCFc5qyZgRaLhZ9//pXJkz9j\n06bNQApeXqXUrRvOgAEDGBgVRYv69fF98MGaEe4tW7Yk6dARLDYPIJoW9Zsx5V/D6dnzrt/nf8pc\ndCGEOzrPRZaFhYV88cU6pk37gqSkbSiViZeXlebh4Yy85x5GzZ9fM8JdKR+0DqOO3zU81acNI3rc\nQJ2HH/7jhhLuQgh3dJEr6DMyMnj33ZXMnr2anJw9KDLw9zJRaLHUjHD38LiNIXfE8tz9N9A4MvL0\niQO5ilQIURtcYnkUu93O4cOHmTbtE5Z+vBY7B4D8mnEnpp9++ic3ZKTL1aVCCHEOg8FAbGwsH3zw\nD4a38uHFJTv4KWGZU4591RP35uxMkGAXQog/OmvEwgO4rU0rvpnSGP+/ujDclVILgZ5Aptb6WqdU\nIoQQ7uYKh5z9fHyc9tKOdqk/BN4BFju0t4yxCyHEVeXQLYy01puAXCfXIoQQwknk/nRCCOGGJNyF\nEMINXfVpLFOWLj3zPK5tW+Latr3aLymEEDVG/J49xO/Z4/TjOnwRk1KqEbBaa33BtFZKab1qlWOV\nCSFELeSshcMcGpZRSi0FfgauUUqlKKWGVrYQIYQQzuPQsIzWepCzCxFCCOE8ckJVCCHckIS7EEK4\nIQl3IYRwQxLuQgjhhiTchRDCDUm4CyGEG5JwF0IINyThLoQQbkjCXQgh3JCEuxBCuCEJdyGEcEMS\n7kII4YYk3IUQwg1JuAshhBuScBdCCDck4S6EEG5Iwl0IIdyQhLsQQrghCXchhHBDEu5CCOGGJNyF\nEMINSbgLIYQbknAXQgg3JOEuhBBuSMJdCCHckIS7EEK4IQl3IYRwQxLuQgjhhhwOd6VUD6XUAaXU\nIaXUc84sSgghROU4FO5KKQMwG+gOtAYGKqVaOLMw8Ufxe/a4ugS3Iu3pXNKe1Y+jPfcbgcNa62Na\n63LgU6CP88oS55JfHueS9nQuac/qx9Fwrw+knvVxWsXnhBBCVANyQlUIIdyQ0lpf+U5K3QxM0Vr3\nqPj4eUBrrWecs92VH1wIIWo5rbWq7DEcDXcjcBC4C8gAtgADtdYJlS1ICCFE5Zkc2UlrbVNKjQHW\nc3poZ6EEuxBCVB8O9dyFEEJUb5U6oaqUClFKrVdKHVRKrVNKBV1gu4VKqUyl1G5H9q8trqA9z3sB\nmVLqJaVUmlJqR8WjR9VVXz1czsV1Sqm3lVKHlVK7lFLtrmTf2saB9mx/1uePKqV+U0rtVEptqbqq\nq69LtadSKlYp9bNSyqyUGn8l+/6J1trhBzADeLbi+XPA9AtsdyvQDtjtyP615XE57cHpP8hHgBjA\nA9gFtKj42kvAeFd/Hy5svwu2zVnb3AOsqXh+E/Dfy923tj0q054VHycBIa7+PqrL4zLbsw7QAXjl\n7N9lR34+KzsVsg/wUcXzj4C+59tIa70JyHV0/1rkctrjUheQVfosew12ORfX9QEWA2itfwWClFIR\nl7lvbVOZ9oTTP4sy3fp3l2xPrfVJrfV2wHql+56rsg1fV2udWVHUCaBuFe/vbi6nPS51AdmYirfH\n79fCYa7LubjuQtvIhXl/5kh7pp+1jQY2KKW2KqWGXbUqa47K/Ixd8b6XnC2jlNoARJz9KU7/p714\nns0re3bW7c/uXuX2nAu8rLXWSqlpwP8Bf3Oo0NqjNr/Tudo6a60zlFLhnA75hIp38aIKXDLctdZd\nL/S1ipOkEVrrTKVUJJB1ha9f2f1rHCe0ZzrQ8KyPG1R8Dq119lmfXwCsdkLJNckF2+acbaLPs43n\nZexb21SmPdFaZ1T8m62UWsHpoYXaHO6X055O27eywzKrgCEVzx8FvrrItoo/95KuZP/a4HLaYyvQ\nTCkVo5TyBAZU7EfFH4T/6Q/svXqlVksXbJuzrAIegTNXWudVDIVdzr61jcPtqZTyVUr5V3zeD+hG\n7ft5PNeV/oydnZdX/vNZybO/ocBGTl+tuh4Irvh8PeDrs7ZbChwHLEAKMPRi+9fWxxW0Z4+KbQ4D\nz5/1+cXAbk6fSV8JRLj6e3JBG/6pbYARwPCztpnN6ZkHvwHXX6pda/PD0fYEGlf8HO4E9kh7Xl57\ncnrINhXIA3Iq8tL/Qvte7CEXMQkhhBuSaUpCCOGGJNyFEMINSbgLIYQbknAXQgg3JOEuhBBuSMJd\nCCHckIS7EEK4IQl3IYRwQ/8PLnzfv0obBikAAAAASUVORK5CYII=\n", 342 | "text/plain": [ 343 | "" 344 | ] 345 | }, 346 | "metadata": {}, 347 | "output_type": "display_data" 348 | } 349 | ], 350 | "source": [ 351 | "# Zoom in\n", 352 | "f = plt.figure(figsize=(6, 4))\n", 353 | "ax = f.add_subplot(111)\n", 354 | "model.plot(axis=ax)\n", 355 | "# for x in data:\n", 356 | "# ax.axvline(x, alpha=0.002)\n", 357 | "for phi, nphi in zip(phis, nphis):\n", 358 | " plot_sample(phi, nphi, axis=ax, alpha=0.1, c='k')\n", 359 | "ax.set_xlim(-0.1, 0.1)\n", 360 | "ax.hist(data, 1000, alpha=0.3, color='r', normed=True)\n", 361 | "f.show()" 362 | ] 363 | }, 364 | { 365 | "cell_type": "code", 366 | "execution_count": 57, 367 | "metadata": { 368 | "collapsed": false 369 | }, 370 | "outputs": [ 371 | { 372 | "data": { 373 | "text/plain": [ 374 | "[[9818, 10182],\n", 375 | " [9929, 10069, 2],\n", 376 | " [9938, 10062],\n", 377 | " [9834, 10166],\n", 378 | " [9880, 10119, 1],\n", 379 | " [9839, 10122, 39],\n", 380 | " [9893, 10087, 20],\n", 381 | " [9839, 10137, 18, 6],\n", 382 | " [9946, 10018, 36],\n", 383 | " [9943, 9975, 82],\n", 384 | " [9842, 9942, 216],\n", 385 | " [9771, 9855, 374],\n", 386 | " [9966, 9805, 229],\n", 387 | " [9739, 10083, 178],\n", 388 | " [9863, 10015, 122],\n", 389 | " [9870, 10006, 106, 18],\n", 390 | " [9954, 9924, 82, 35, 5],\n", 391 | " [9993, 9900, 62, 33, 12],\n", 392 | " [9776, 10006, 53, 18, 63, 84],\n", 393 | " [9865, 10009, 1, 54, 71],\n", 394 | " [9773, 9988, 81, 158],\n", 395 | " [9936, 9882, 29, 153],\n", 396 | " [9842, 10060, 2, 96],\n", 397 | " [9944, 10037, 19],\n", 398 | " [9895, 10093, 12],\n", 399 | " [9838, 10162],\n", 400 | " [9798, 10202],\n", 401 | " [9857, 10137, 6],\n", 402 | " [9857, 10143],\n", 403 | " [10015, 9985],\n", 404 | " [9999, 10001],\n", 405 | " [9832, 10168],\n", 406 | " [9932, 10068],\n", 407 | " [10013, 9985, 2],\n", 408 | " [9911, 10087, 2],\n", 409 | " [9918, 10073, 9],\n", 410 | " [9893, 10107],\n", 411 | " [9989, 10011],\n", 412 | " [9926, 10074],\n", 413 | " [9907, 10093],\n", 414 | " [9968, 10029, 3],\n", 415 | " [9872, 10128],\n", 416 | " [9927, 10070, 3],\n", 417 | " [9905, 10081, 14],\n", 418 | " [9951, 10044, 5],\n", 419 | " [9902, 10098],\n", 420 | " [9870, 10130],\n", 421 | " [10000, 9994, 5, 1],\n", 422 | " [9994, 10005, 1],\n", 423 | " [10000, 9999, 1]]" 424 | ] 425 | }, 426 | "execution_count": 57, 427 | "metadata": {}, 428 | "output_type": "execute_result" 429 | } 430 | ], 431 | "source": [ 432 | "nphis" 433 | ] 434 | }, 435 | { 436 | "cell_type": "code", 437 | "execution_count": null, 438 | "metadata": { 439 | "collapsed": true 440 | }, 441 | "outputs": [], 442 | "source": [] 443 | } 444 | ], 445 | "metadata": { 446 | "kernelspec": { 447 | "display_name": "Python 2", 448 | "language": "python", 449 | "name": "python2" 450 | }, 451 | "language_info": { 452 | "codemirror_mode": { 453 | "name": "ipython", 454 | "version": 2 455 | }, 456 | "file_extension": ".py", 457 | "mimetype": "text/x-python", 458 | "name": "python", 459 | "nbconvert_exporter": "python", 460 | "pygments_lexer": "ipython2", 461 | "version": "2.7.11" 462 | } 463 | }, 464 | "nbformat": 4, 465 | "nbformat_minor": 0 466 | } 467 | --------------------------------------------------------------------------------