├── .gitignore
├── INSTALL
├── Lib
├── __init__.py
├── mcmc.py
├── mcmc_utilities.py
├── mcmcplots.py
└── regtools.py
├── MANIFEST
├── README
├── Src
├── stochsearch.f
├── testreg.py
├── timeseriesfunc.f
└── wishart.f
├── __init__.py
├── data
├── count.txt
└── yld2.txt
├── dist
├── pymcmc-1.0.tar.gz
└── pymcmc-1.0.win32-py2.6.exe
├── doc
├── .gitignore
├── PyMCMC.tex
├── chris.bib
├── ex_loglinear.pdf
├── example1_section3.1.py
├── example2_section3.2.py
├── example3_section3.3.py
├── example4_section4.py
├── example5_section5.R
├── example5_section5.py
├── flowchart.pdf
├── jss.bst
├── jsslogo.jpg
├── mpdreg.pdf
└── rho.pdf
├── examples
├── ex_AR1.py
├── ex_loglinear.py
├── ex_variable_selection.py
├── loglinear.f
├── matplotlibrc
└── using_pymcmc_efficiently.py
└── setup.py
/.gitignore:
--------------------------------------------------------------------------------
1 | build
2 |
3 |
--------------------------------------------------------------------------------
/INSTALL:
--------------------------------------------------------------------------------
1 | INSTALL instructions
2 | ====================
3 |
4 | Pymcmc should build relatively easily under most linux/
5 | unix platforms. For example, under ubuntu, to prepare,
6 | running the commands:
7 |
8 | sudo apt-get install python-scipy libatlas3gf-sse2
9 | sudo python setup.py install
10 |
11 | should be all you need to do.
12 |
13 | For other platforms, we recommend you install the binary versions,
14 | but if you need to build from source, see the instructions later
15 | in this file.
16 |
17 | Requirements:
18 | =============
19 | *Python*
20 | Pymcmc was built using Python 2.6. You will also need
21 | the following python packages:numpy, scipy, matplotlib.
22 | Each of these should be relatively easy to install under
23 | most platforms. See the web pages for more information.
24 |
25 | *ATLAS*
26 | Some functions require blas/lapack. We recommend installing
27 | atlas for your platform to get the best performance. Most
28 | distributions will have precompiled versions available,
29 | but if you need to build your own, there are numerous web
30 | pages with instructions. See, for example, the scipy wiki
31 | at http://www.scipy.org/Installing_SciPy/Linux
32 |
33 | If you install the windows or Mac binary of pymcmc, you
34 | will not need to install atlas unless you want to create
35 | new functions using fortran or f2py which require ATLAS.
36 | See the windows build instructions further down for information
37 | on building ATLAS on windows.
38 |
39 | *optional packages*
40 | The python package pysparse (http://pysparse.sourceforge.net/)
41 | is used in one of the examples.
42 | If you prefer to analyse the posterior samples in R, you will
43 | need to install R. Rpy2 allows you to call R from within python,
44 | and can be useful.
45 |
46 |
47 | INSTALLING
48 | ==========
49 | Most users should just need to run:
50 | python setup.py install
51 |
52 | If your ATLAS libraries are in places that aren't detected
53 | by python, you may have to modify setup.py. See the comments
54 | in that file for more information.
55 |
56 | WINDOWS USERS
57 | =============
58 | The easiest way to get started using pymcmc is to install
59 | python2.6, numpy, scipy, and matplotlib using windows binaries
60 | (see the websites of each of these for information on this).
61 | Then install the windows binary of pymcmc. If you need to
62 | build pymcmc from source, here are some guidelines:
63 |
64 | *building from source under windows*
65 | Install python, numpy,scipy,matplotlib. For building ATLAS,
66 | look at http://www.scipy.org/Installing_SciPy/Windows, and
67 | follow http://nipy.sourceforge.net/nipy/stable/devel/install/windows_scipy_build.html
68 | and also http://www.mingw.org/wiki/msys/.
69 | I installed 1.0.11. The DTK provides everything you need (bzip2,
70 | diffutils, gawk,make,binutils)
71 |
72 | Install cygwin, and build atlas from there.
73 | To use gfortran, you might have to change your path, for example:
74 | export PATH=/usr/local/bin:/bin:/c/Program\ Files/gfortran/libexec/gcc/i586-pc-mingw32/4.6.0:/c/Program\ Files/gfortran/bin:/c/python26
75 |
76 | Unpack the pymcmc archive, and modify the setup.py file to make sure
77 | the library_dirs are set correctly for your system, for example
78 | library_dirs = ["/d/tmp/pymcmc_win_install/BUILDS/lib/"].
79 |
80 | Run
81 | python setup.py build
82 |
83 | This may fail, due to the order of some of the libraries, so you may
84 | need to have the python linking first.
85 | So, when it fails, you can manually do
86 |
87 | ## (modify the paths to suit your environment):
88 | p1=build/temp.win32-2.6/Release/build/src.win32-2.6/
89 | p2=build/temp.win32-2.6/Release/Src/
90 | p3=build/lib.win32-2.6/
91 | gfortran.exe -Wall -mno-cygwin -Wall -mno-cygwin -shared \
92 | ${p1}/stochsearchmodule.o \
93 | ${p1}/fortranobject.o ${p2}/stochsearch.o \
94 | ${p1}/stochsearch-f2pywrappers.o \
95 | -L c:/python26/libs -lpython26 \
96 | -L/d/tmp/pymcmc_win_install/BUILDS/lib/ \
97 | -llapack -lcblas -lf77blas -latlas -lg2c -o ${p3}/stochsearch.pyd
98 |
99 | python setup.py build
100 |
101 | gfortran.exe -Wall -mno-cygwin -Wall -mno-cygwin -shared \
102 | ${p1}wishartmodule.o \
103 | ${p1}/fortranobject.o \
104 | ${p2}/wishart.o \
105 | -L c:/python26/libs -lpython26 \
106 | -L/d/tmp/pymcmc_win_install/BUILDS/lib/ \
107 | -llapack -lcblas -lf77blas -latlas -lg2c -o ${p3}/wishart.pyd
108 |
109 | python setup.py build
110 |
111 | then
112 |
113 | python setup.py bdist
114 |
115 | or
116 |
117 | python setup.py bdist --format=wininst
118 |
119 | To run example ex_AR1.py, you will also need the python module pysparse.
120 | To build a windows binary, modify the setup.py to include the library_dirs_list
121 | as for pymcmc. You will also have the same compile problem as before.
122 |
123 | p1=build/temp.win32-2.6/Release/src/
124 | p2=build/lib.win32-2.6/pysparse/
125 | dllwrap.exe -mno-cygwin -mdll -static --entry _DllMain@12 \
126 | --output-lib ${p1}/libitsolvers.a \
127 | --def ${p1}/itsolvers.def \
128 | -s ${p1}/itsolversmodule.o \
129 | ${p1}/pcg.o ${p1}/gmres.o \
130 | ${p1}/minres.o \
131 | ${p1}/qmrs.o \
132 | ${p1}/bicgstab.o \
133 | ${p1}/cgs.o \
134 | -L/c/python26/libs -lpython26 \
135 | -L/d/tmp/pymcmc_win_install/BUILDS/lib/ \
136 | -llapack -lcblas -lf77blas -latlas -lmsvcr90 -o ${p2}/itsolvers.pyd
137 |
138 | dllwrap.exe -mno-cygwin -mdll -static --entry _DllMain@12 \
139 | --output-lib ${p1}libprecon.a \
140 | --def ${p1}/precon.def \
141 | -s ${p1}/preconmodule.o \
142 | -L/c/python26/libs -lpython26 \
143 | -L/d/tmp/pymcmc_win_install/BUILDS/lib/ \
144 | -llapack -lcblas -lf77blas -latlas -lmsvcr90 -o ${p2}/precon.pyd
145 |
146 | dllwrap.exe -mno-cygwin -mdll -static --entry _DllMain@12 \
147 | --output-lib ${p1}/libjdsym.a \
148 | --def ${p1}jdsym.def \
149 | -s ${p1}jdsymmodule.o \
150 | -L/c/python26/libs -lpython26 \
151 | -L/d/tmp/pymcmc_win_install/BUILDS/lib/ \
152 | -llapack -lcblas -lf77blas -latlas -o ${p2}/jdsym.pyd -lg2c
153 |
154 |
155 | python setup.py build --compiler mingw32
156 |
157 | p3=build/temp.win32-2.6/Release/
158 | dllwrap.exe -mno-cygwin -mdll -static --entry _DllMain@12 --output-lib ${p1}/libsuperlu.a --def ${p1}/superlu.def -s ${p1}/superlumodule.o ${p3}/superlu/dcolumn_bmod.o ${p3}/superlu/dcolumn_dfs.o ${p3}/superlu/dcomplex.o ${p3}/superlu/scomplex.o ${p3}/superlu/dcopy_to_ucol.o ${p3}/superlu/dgscon.o ${p3}/superlu/dgsequ.o ${p3}/superlu/dgsrfs.o ${p3}/superlu/dgssv.o ${p3}/superlu/dgssvx.o ${p3}/superlu/dgstrf.o ${p3}/superlu/dgstrs.o ${p3}/superlu/dlacon.o ${p3}/superlu/dlamch.o ${p3}/superlu/dlangs.o ${p3}/superlu/dlaqgs.o ${p3}/superlu/dmemory.o ${p3}/superlu/colamd.o ${p3}/superlu/dpanel_bmod.o ${p3}/superlu/dpanel_dfs.o ${p3}/superlu/dpivotl.o ${p3}/superlu/dpivotgrowth.o ${p3}/superlu/dprunel.o ${p3}/superlu/dreadhb.o ${p3}/superlu/dsnode_bmod.o ${p3}/superlu/dsnode_dfs.o ${p3}/superlu/dsp_blas2.o ${p3}/superlu/dsp_blas3.o ${p3}/superlu/superlu_timer.o ${p3}/superlu/dutil.o ${p3}/superlu/dzsum1.o ${p3}/superlu/get_perm_c.o ${p3}/superlu/icmax1.o ${p3}/superlu/izmax1.o ${p3}/superlu/lsame.o ${p3}/superlu/memory.o ${p3}/superlu/mmd.o ${p3}/superlu/relax_snode.o ${p3}/superlu/sp_coletree.o ${p3}/superlu/sp_ienv.o ${p3}/superlu/sp_preorder.o ${p3}/superlu/util.o ${p3}/superlu/xerbla.o -Lc:/python26/libs -lpython26 -L/d/tmp/pymcmc_win_install/BUILDS/lib/ -llapack -lcblas -lf77blas -latlas -lmsvcr90 -lg2c -o build/lib.win32-2.6/pysparse/superlu.pyd
159 |
160 | python setup.py bdist --format=wininst
161 |
162 |
163 | MAC Users:
164 | =============
165 | The easiest way to get started using pymcmc is to install
166 | python2.6, numpy, scipy, matplotlib using mac binaries
167 | (see the websites of each of these for information on this).
168 | Then install the macintosh binary of pymcmc. This is a simple
169 | zip file, and you should unzip it and move the archive to the
170 | appropriate place. If you unzip it from / it will install into
171 | ./Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/site-packages.
172 |
173 | *building from source*
174 | If you need to build pymcmc from source, here are some guidelines:
175 |
176 | Get a recent version of python, I used python2.6. It is generally
177 | recommended to not use the mac inbuilt version of python. Then follow
178 | the instructions to get scipy installed, including getting developer
179 | tools. I'm using 10.5.8, so I downloaded 3.1.3 (994.6MB!)
180 |
181 | You also need a fortran compiler, I got gfortran after following
182 | links on building R for mac (http://r.research.att.com/tools/).
183 |
184 | I couldn't get gfortran to link without using the dynamic version of
185 | libgfortran. In the end, I temporarily moved the dynamic libs and
186 | created a link from libgfortran.a to libgfortran_static.a and put libs
187 | = ['libgfortran_static'] in setup.py
188 |
189 | Then,
190 | python setup.py bdist
191 | and distribute.
192 |
193 |
--------------------------------------------------------------------------------
/Lib/__init__.py:
--------------------------------------------------------------------------------
1 | ## Figures out which function we expose
2 | ## eg
3 | ## from stream_function_wrapper import zm_msf
4 |
--------------------------------------------------------------------------------
/Lib/mcmc_utilities.py:
--------------------------------------------------------------------------------
1 | #MCMC utilities for PyMCMC - A Python package for Bayesian estimation
2 | #Copyright (C) 2010 Chris Strickland
3 |
4 | #This program is free software: you can redistribute it and/or modify
5 | #it under the terms of the GNU General Public License as published by
6 | #the Free Software Foundation, either version 3 of the License, or
7 | #(at your option) any later version.
8 |
9 | #This program is distributed in the hope that it will be useful,
10 | #but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | #GNU General Public License for more details.
13 |
14 | #You should have received a copy of the GNU General Public License
15 | #along with this program. If not, see .# file containing mcmc_utilites used by pymcmc.
16 |
17 | import numpy as np
18 | import timeseriesfunc
19 |
20 | def hpd(x, alpha):
21 | '''
22 | highest posterior density interval
23 | '''
24 | n = len(x)
25 | m = max(1, np.ceil(alpha * n))
26 | x2 = x.copy()
27 | x2.sort()
28 | a = x2[0:m]
29 | b = x2[(n - m):n]
30 | i = np.argmin( (b - a) )
31 | return [a[i], b[i]]
32 |
33 |
34 | class InefficiencyFactor:
35 | def __init__(self):
36 | self.IFactor = 0.0
37 | self.Bandwidth = 0.0
38 | self.MCSE = 0.0
39 |
40 | def calculate(self, mc):
41 | if mc.ndim == 1:
42 | try:
43 | return self.compute(mc)
44 | except:
45 | return -9999
46 |
47 |
48 | elif mc.ndim == 2:
49 | ifvec = np.zeros(mc.shape[1])
50 | for i in xrange(mc.shape[1]):
51 | try:
52 | ifvec[i] = self.compute(mc[:, i])
53 | except:
54 | ifvec[i] = -9999
55 | return ifvec
56 |
57 | else:
58 | ifmat = np.zeros((mc.shape[1], mc.shape[2]))
59 | for i in xrange(mc.shape[1]):
60 | for j in xrange(mc.shape[2]):
61 | try:
62 | ifmat[i,j] = self.compute(mc[:,i,j])
63 | except:
64 | ifmat[i,j] = -9999
65 |
66 | return ifmat
67 |
68 |
69 | def compute(self, mc):
70 | self.Bandwidth = np.ceil(self.calc_b(mc)) + 1
71 | QS = self.QSkernel(self.Bandwidth)
72 | corr = np.zeros(self.Bandwidth)
73 | timeseriesfunc.acf(mc, corr)
74 | product = QS * corr
75 | sumproduct = sum(product)
76 | IF = 1.+2.*(float(self.Bandwidth)/(float(self.Bandwidth) - 1.)) * sumproduct;
77 | return IF
78 |
79 |
80 | def QSkernel(self, B):
81 | ind = map(lambda x: x/B, range(1, int(B) + 1))
82 | ind = np.array(ind)
83 | d = 6.*np.pi * ind/5.
84 | a = 25./(12.*np.pi**2 * ind**2)
85 | b = np.sin(d)/d
86 | c = np.cos(d)
87 | QS = a * (b - c)
88 | return QS
89 |
90 | def calc_b(self, mc):
91 | n = mc.shape[0]
92 | xmat = np.vstack([np.ones(n - 1), mc[0:n - 1]]).transpose()
93 | yvec = mc[1:n]
94 | xpx = np.dot(xmat.transpose(), xmat)
95 | xpy = np.dot(xmat.transpose(), yvec)
96 | beta = np.linalg.solve(xpx, xpy)
97 | res = mc[1:n] - np.dot(xmat, beta)
98 | sigsq = sum(res**2)/float(n - 2)
99 | a = 4.*beta[1]**2 * sigsq**2/((1.-beta[1])**8)
100 | b = sigsq**2/((1 - beta[1])**4)
101 | alpha = a/b
102 | B = 1.3221 * (alpha * n)**(1./5.)
103 | return B
104 |
105 | # def __init__(self):
106 | # self.IFactor = 0.0
107 | # self.Bandwidth = 0.0
108 | # self.MCSE = 0.0
109 |
110 | # def calculate(self, mc):
111 | # if mc.ndim == 1:
112 | # try:
113 | # return self.compute(mc)
114 | # except:
115 | # return -9999
116 |
117 |
118 | # else:
119 | # ifvec = np.zeros(mc.shape[1])
120 | # for i in xrange(mc.shape[1]):
121 | # try:
122 | # ifvec[i] = self.compute(mc[:, i])
123 | # except:
124 | # ifvec[i] =-9999
125 | # return ifvec
126 | #
127 | # def compute(self, mc):
128 | # self.Bandwidth = np.ceil(self.calc_b(mc)) + 1
129 | # QS = self.QSkernel(self.Bandwidth)
130 | # corr = np.zeros(self.Bandwidth)
131 | # timeseriesfunc.acf(mc, corr)
132 | # product = QS * corr
133 | # sumproduct = sum(product)
134 | # IF = 1.+2.*(float(self.Bandwidth)/(float(self.Bandwidth) - 1.)) * sumproduct;
135 | # return IF
136 | #
137 |
138 | # def QSkernel(self, B):
139 | # ind = map(lambda x: x/B, range(1, int(B) + 1))
140 | # ind = np.array(ind)
141 | # d = 6.*np.pi * ind/5.
142 | # a = 25./(12.*np.pi**2 * ind**2)
143 | # b = np.sin(d)/d
144 | # c = np.cos(d)
145 | # QS = a * (b - c)
146 | # return QS
147 |
148 | # def calc_b(self, mc):
149 | # n = mc.shape[0]
150 | # xmat = np.vstack([np.ones(n - 1), mc[0:n - 1]]).transpose()
151 | # yvec = mc[1:n]
152 | # xpx = np.dot(xmat.transpose(), xmat)
153 | # xpy = np.dot(xmat.transpose(), yvec)
154 | # beta = np.linalg.solve(xpx, xpy)
155 | # res = mc[1:n] - np.dot(xmat, beta)
156 | # sigsq = sum(res**2)/float(n - 2)
157 | # a = 4.*beta[1]**2 * sigsq**2/((1.-beta[1])**8)
158 | # b = sigsq**2/((1 - beta[1])**4)
159 | # alpha = a/b
160 | # B = 1.3221 * (alpha * n)**(1./5.)
161 | # return B
162 |
163 |
--------------------------------------------------------------------------------
/Lib/mcmcplots.py:
--------------------------------------------------------------------------------
1 | #Plotting tools for PyMCMC. PyMCMC is a Python package for Bayesian Analysis
2 | #Copyright (C) 2010 Chris Strickland
3 |
4 | #This program is free software: you can redistribute it and/or modify
5 | #it under the terms of the GNU General Public License as published by
6 | #the Free Software Foundation, either version 3 of the License, or
7 | #any later version.
8 |
9 | #This program is distributed in the hope that it will be useful,
10 | #but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | #GNU General Public License for more details.
13 |
14 | #You should have received a copy of the GNU General Public License
15 | #along with this program. If not, see .import matplotlib.mlab as mlab
16 |
17 | import matplotlib.pyplot as plt
18 | import scipy.stats as stats
19 | import timeseriesfunc
20 | import numpy as np
21 |
22 |
23 | latexsymbols = ['alpha', 'theta', 'tau', \
24 | 'beta', 'vartheta', 'pi', 'upsilon', \
25 | 'gamma', 'gamma', 'varpi', 'phi', \
26 | 'delta', 'kappa', 'rho', 'varphi', \
27 | 'epsilon', 'lambda', 'varrho', 'chi', \
28 | 'varepsilon', 'mu', 'sigma', 'psi', \
29 | 'zeta', 'nu', 'varsigma', 'omega', \
30 | 'eta', 'xi', \
31 | 'Gamma', 'Lambda', 'Sigma', 'Psi', \
32 | 'Delta', 'Xi', 'Upsilon', 'Omega', \
33 | 'Theta', 'Pi', 'Phi']
34 |
35 |
36 | def PlotACF(x, maxlag, title):
37 | plt.plot([-1, maxlag], [0, 0], '-', color ='k')
38 | plt.grid(True)
39 | # maxlag = 30
40 | corrv = np.zeros(maxlag)
41 | aa = timeseriesfunc.acf(x, corrv)
42 | for xtick, yval in enumerate(corrv):
43 | plt.plot( [xtick, xtick], [yval, 0], '-', color ='b')
44 | plt.title(title)
45 | plt.xlabel("lag")
46 | ## add the ci
47 | ci = 2/np.sqrt(len(x))
48 | plt.plot([0, maxlag], [ci, ci], 'b--')
49 | plt.plot([0, maxlag], [-ci, -ci], 'b--')
50 | plt.ylim(ymax = 1.2)
51 | return None
52 |
53 | def PlotIterates(x, title,maxntick = 10):
54 | ## try only showing some of the iterates
55 | ## on the axis
56 | ##go through tick sizes until you get the right one
57 | for ticksize in [1,10,100,200,500,1000,2000,5000,10000,100000]:
58 | ntick = len(x)/ticksize
59 | if ntick <= maxntick:
60 | break
61 | xticks = np.arange(ntick)*ticksize
62 | plt.plot(x, 'k-')
63 | plt.xticks(xticks)
64 | plt.title(title)
65 | plt.xlabel("Iteration")
66 |
67 |
68 | def PlotMarginalPost(x, title, plottype ="both", maxntick=20):
69 | '''
70 | Plot the marginal posterior density.
71 | type can be both, line, histogram
72 | '''
73 | plt.grid(True)
74 | ## see if we want histogram
75 | if plottype.startswith('b') or plottype.startswith('h'):
76 | n, bins, patches = plt.hist(x, 50, normed = 1, facecolor ='green', alpha = 0.75)
77 | if plottype.startswith('b') or plottype.startswith('l'):
78 | ind = np.linspace(min(x) * 1.0, max(x) * 1.0, 101)
79 | gkde = stats.gaussian_kde(x)
80 | kdepdf = gkde.evaluate(ind)
81 | plt.plot(ind, kdepdf, label ='kde', color ="k")
82 | xticks = np.round(np.linspace(min(x) * 1.0, max(x) * 1.0, maxntick),2)
83 | plt.xticks(xticks)
84 | plt.title(title)
85 |
86 |
--------------------------------------------------------------------------------
/Lib/regtools.py:
--------------------------------------------------------------------------------
1 | # A Bayesian regression module for PyMCMC. PyMCMC is a Python package for
2 | # Bayesian analysis.
3 | # Copyright (C) 2010 Chris Strickland
4 |
5 | # This program is free software: you can redistribute it and/or modify
6 | # it under the terms of the GNU General Public License as published by
7 | # the Free Software Foundation, either version 3 of the License, or
8 | # (at your option) any later version.
9 |
10 | # This program is distributed in the hope that it will be useful,
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 | # GNU General Public License for more details.
14 |
15 | # You should have received a copy of the GNU General Public License
16 | # along with this program. If not, see .
17 | # python file for conjugate priors
18 |
19 | import types
20 | import os
21 | from os import sys
22 | import numpy as np
23 | from scipy import special
24 | from stochsearch import*
25 | import matplotlib.pyplot as plt
26 | import scipy.stats.distributions as dstn
27 | import wishart
28 | import pdb
29 |
30 |
31 | class StochasticSearch:
32 | """
33 | StochasticSearch is a class that is called from RegSampler and used
34 | when the user wishes to use the stochastic search to select the
35 | regressors in a regression
36 | """
37 | def __init__(self, yvec, xmat, prior):
38 | self.nobs = yvec.shape[0]
39 | self.kreg = xmat.shape[1]
40 | self.yvec = yvec
41 | self.xmat = xmat
42 | self.ypy = np.dot(yvec.T, yvec)
43 | self.xpy = np.dot(xmat.T, yvec)
44 | self.xpx = np.asfortranarray(np.dot(xmat.T, xmat))
45 | self.xgxg = np.zeros((self.kreg, self.kreg), order = 'F' )
46 | self.work2 = np.zeros((self.kreg, self.kreg), order = 'F' )
47 | self.xgy = np.zeros(self.kreg)
48 | self.gam = np.zeros(self.kreg, dtype = 'i')
49 | self.gam[0] = 1
50 | self.ru = np.zeros(self.kreg)
51 | self.rn = np.zeros(self.kreg)
52 | self.work = np.zeros((self.kreg, 6), order = 'F')
53 | self.ifo = np.array(0, dtype = 'i')
54 | self.ifo2 = np.array(0, dtype = 'i')
55 | if prior[0] == 'g_prior':
56 | self.work[:, 0] = prior[1]
57 | self.g = prior[2]
58 | self.__samplegam = self.__sim_gamma_gprior
59 | elif prior[0] == 'normal_inverted_gamma':
60 | self.nu = prior[1]
61 | self.nuo = self.nu + self.nobs
62 | self.nus = prior[2]
63 | self.R = np.asfortranarray(prior[3])
64 | self.D = np.asfortranarray(prior[4])
65 | self.logdetR = 2.0 * np.sum(np.diag(np.linalg.cholesky(self.R)))
66 | self.vxy = self.xgy
67 | self.vobar = self.xgxg
68 | self.vubar = self.work2
69 | self.nuobar = self.nu + self.nobs
70 | self.__samplegam = self.__sim_gamma_nig
71 | self.__samplegam_cond_beta = self.__sim_gamma_nig_cond_beta
72 | else:
73 | raise NameError("prior incorrectly specified")
74 |
75 | # internal storage for stochastic search
76 | self.store = [[], []]
77 |
78 | def __sim_gamma_gprior(self):
79 | ssreg(self.ypy, self.g, self.xpx, self.xpy,
80 | self.xgxg, self.xgy, self.gam, \
81 | self.ru, self.work, self.work2,
82 | self.ifo, self.ifo2, self.nobs)
83 |
84 | def __sim_gamma_nig(self):
85 | self.initialise_vubar()
86 | ssreg_nig(self.ypy, self.logdetR, self.nus, self.vxy, self.vobar,
87 | self.vubar, self.gam, self.xpx, self.xpy, self.D,
88 | self.R, self.nuobar, self.ru)
89 |
90 |
91 | def __sim_gamma_nig_cond_beta(self,sig, beta):
92 | """samples gamma conditional on beta"""
93 | self.initialise_vubar()
94 | ssregcbetas_nig(beta, sig, self.vxy, self.logdetR, self.vubar,
95 | self.gam, self.D, self.R, self.ru)
96 |
97 | def initialise_vubar(self):
98 | initialise_vubar(self.vubar, self.gam, self.D, self.R)
99 |
100 | def sample_gamma(self, store):
101 | it = store['iteration']
102 | burn = store['length_of_burnin']
103 | # self.gam = gamvec.astype('i')
104 | self.ru = np.random.rand(self.kreg)
105 | self.__samplegam()
106 | if it >= burn:
107 | self.update_store()
108 | return self.gam
109 |
110 | def sample_gamma_cond_beta(self, store,sig, beta):
111 | it = store['iteration']
112 | burn = store['length_of_burnin']
113 | self.ru = np.random.rand(self.kreg)
114 | self.__samplegam_cond_beta(sig, beta)
115 | if it >= burn:
116 | self.update_store()
117 | return self.gam
118 |
119 |
120 | def update_store(self):
121 | """function updates internal storage for gamma"""
122 | gammai = int("".join([str(i) for i in self.gam]))
123 | if gammai in self.store[0]:
124 | index = self.store[0].index(gammai)
125 | self.store[1][index] = self.store[1][index] + 1
126 | else:
127 | self.store[0].append(gammai)
128 | self.store[1].append(1)
129 |
130 | def __extract_position(self,i,ind):
131 | modstr = str(self.store[0][ind[i]])
132 | modnum=len(modstr)*[False]
133 | for j in xrange(len(modstr)):
134 | if modstr[j] =='1':
135 | modnum[j] = True
136 | return modnum
137 |
138 | def extract_regressors(self, model_number):
139 | '''returnrs a design matrix containing just the regressors
140 | correponding to the specified model_number
141 | '''
142 | arrind = np.array(self.store[1])
143 | ind = np.argsort(arrind)[::-1]
144 | modnum = self.__extract_position(model_number, ind)
145 | tmpxmat = np.compress(modnum, self.xmat, axis = 1)
146 | return tmpxmat
147 |
148 | def output(self, destination):
149 | """
150 | produce additional output for StochasticSearch
151 | This is an example of a custom output. The requirement
152 | is it needs to have a destination which is handled by
153 | the generic output function.
154 |
155 | """
156 |
157 | colwidth = 12
158 | sigfigs = 7
159 | arrind = np.array(self.store[1])
160 | ind = np.argsort(arrind)[::-1]
161 | total = sum(arrind)
162 | hline = "-".ljust(5 * colwidth, '-')
163 | print >>destination
164 | print >>destination,\
165 | "Most likely models ordered by decreasing posterior probability"
166 | print >>destination
167 | print >>destination, hline
168 | print >>destination, """\
169 | {0: <{colwidth}s}| \
170 | {1: <{colwidth}s}""".format("probability", "model", colwidth = colwidth)
171 | print >>destination, hline
172 | for i in xrange(min(10, ind.shape[0])):
173 | modnum = self.__extract_position(i, ind)
174 | modstr = []
175 | for j in range(len(modnum)):
176 | if modnum[j]:
177 | modstr.append(str(j))
178 | print >>destination, """\
179 | {1: <{colwidth}.{sigfigs}g}| \
180 | {0: <{colwidth}s}""".format(
181 | ', '.join(modstr),
182 | float(self.store[1][ind[i]])/total,
183 | colwidth = colwidth,
184 | sigfigs = sigfigs)
185 | print >>destination, hline
186 |
187 |
188 | class BayesRegression:
189 | """
190 | BayesRegression is a class for Bayesian regression. By default this class uses
191 | Jeffrey's prior. Arguments:
192 |
193 | yvec - Is a one dimensional numpy array containing the dependent
194 | variable.
195 | xmat - Is a two dimensional numpy array conting the regressors.
196 | kwargs - Optional arguments:
197 | prior - a list containing the name of the prior and the
198 | corresponding hyperparameters. Examples:
199 | prior = ['normal_gamma', nuubar, Subar, betaubar, Vubar]
200 | prior = ['normal_inverted_gamma', nuubar, Subar, betaubar, Vubar]
201 |
202 | prior = ['g_prior', betaubar, g].
203 | If none of these options are chosen or they are
204 | miss - specified then BayesRegression will default to
205 | Jeffreys prior.
206 |
207 | """
208 | def __init__(self, yvec, xmat, **kwargs):
209 | self.nobs = yvec.shape[0]
210 | self.yvec = yvec
211 | if xmat.ndim == 1:
212 | self.xmat = xmat.reshape(self.nobs, 1)
213 | else:
214 | self.xmat = xmat
215 | self.xpx = np.dot(self.xmat.T, self.xmat)
216 | self.xpy = np.dot(self.xmat.T, yvec)
217 | self.kreg = self.xmat.shape[1]
218 | self.vobar = np.zeros((self.kreg, self.kreg))
219 | self.betaobar = np.zeros(self.kreg)
220 | self.updateind_xmat = 0
221 | self.updateind_yvec = 0
222 | self.calculated = False
223 | self.nuobar = 0.0
224 | self.sobar = 0.0
225 | self.vbobar = np.zeros(self.kreg)
226 | self.cholvobar = np.zeros((self.kreg, self.kreg))
227 | if 'prior' not in kwargs:
228 | # default: Jeffreys prior
229 | self.res = np.zeros(self.nobs)
230 | self.__calculate = self.__calculate_jeffreys
231 | self.__sample_scale = self.__sample_standard_deviation
232 | self.__log_cand_prob = self.__log_cand_pr_sig_jeff
233 | self.prior = ['Jeffreys']
234 | self.__posterior_variance_scale = self.__posterior_sigma_var
235 | self.__posterior_mean_scale = self.__posterior_sigma_mean
236 | self.__log_marginal_likelihood = self.__log_marginal_likelihood_jeff
237 | else: # Normal - gamma prior
238 | self.prior = kwargs['prior']
239 | if type(self.prior[0])!= types.StringType:
240 | print "Warning: Jefferys prior used as prior was \
241 | incorectly specified"
242 | self.res = np.zeros(self.nobs)
243 | self.__calculate = self.__calculate_jeffreys
244 | self.__sample_scale = self.__sample_standard_deviation
245 | self.__log_cand_prob = self.__log_cand_pr_sig
246 | self.__posterior_variance_scale = self.__posterior_sigma_var
247 | self.__posterior_mean_scale = self.__posterior_sigma_mean
248 | self.__log_marginal_likelihood = self.__log_marginal_likelihood_jeff
249 | else:
250 | ptype = self.prior[0]
251 | if ptype not in ['normal_gamma', 'normal_inverted_gamma',
252 | 'g_prior']:
253 | print "Warning: Jeffery's prior used as prior was \
254 | incorectly specified"
255 | self.res = np.zeros(self.nobs)
256 | self.__sample_scale = self.__sample_standard_deviation
257 | self.__calculate = self.__calculate_jeffreys
258 | self.__log_cand_prob = self.__log_cand_pr_sig
259 | self.__posterior_variance_scale = \
260 | self.__posterior_sigma_var
261 | self.__posterior_mean_scale = \
262 | self.__posterior_sigma_mean
263 | self.__log_marginal_likelihood = self.__log_marginal_likelihood_jeff
264 | else:
265 | self.vbubar = np.zeros(self.kreg)
266 | if ptype =='normal_gamma':
267 | self.__calculate = self.__calculate_normal_gamma
268 | self.__sample_scale = self.__sample_precision
269 | self.__log_cand_prob = self.__log_cand_pr_kappa
270 | self.__posterior_variance_scale = \
271 | self.__posterior_kappa_var
272 | self.__posterior_mean_scale = \
273 | self.__posterior_kappa_mean
274 | self.__log_marginal_likelihood = \
275 | self.__log_marginal_likelihood_nig
276 | self.nuubar = self.prior[1]
277 | self.subar = self.prior[2]
278 | self.betaubar = self.prior[3]
279 | self.vubar = np.atleast_2d(self.prior[4])
280 | self.lndetvubar = 2.0 * \
281 | np.sum(np.log(np.diag(np.linalg.cholesky(self.vubar))))
282 |
283 | elif ptype =='normal_inverted_gamma':
284 | self.__calculate = self.__calculate_normal_gamma
285 | self.__sample_scale = \
286 | self.__sample_standard_deviation
287 | self.__log_cand_prob = self.__log_cand_pr_sig
288 | self.__posterior_variance_scale = \
289 | self.__posterior_sigma_var
290 | self.__posterior_mean_scale = \
291 | self.__posterior_sigma_mean
292 | self.nuubar = self.prior[1]
293 | self.subar = self.prior[2]
294 | self.betaubar = self.prior[3]
295 | self.vubar = np.atleast_2d(self.prior[4])
296 | self.lndetvubar = 2.0 * \
297 | np.sum(np.log(np.diag(np.linalg.cholesky(self.vubar))))
298 | self.__log_marginal_likelihood = \
299 | self.__log_marginal_likelihood_nig
300 |
301 | else:
302 | # g - prior
303 | self.betaubar = self.prior[1]
304 | self.g = self.prior[2]
305 | self.betahat = np.zeros(self.kreg)
306 | self.betadiff = np.zeros(self.kreg)
307 | self.res = np.zeros(self.nobs)
308 | assert(type(self.g) == types.FloatType)
309 | self.gratio = self.g/(self.g + 1.)
310 | self.__sample_scale = \
311 | self.__sample_standard_deviation
312 | self.__calculate = self.__calculate_g_prior
313 | self.__log_cand_prob = self.__log_canc_pr_sig_gprior
314 | self.__posterior_variance_scale = \
315 | self.__posterior_sigma_var
316 | self.__posterior_mean_scale = \
317 | self.__posterior_sigma_mean
318 | self.__log_marginal_likelihood = self.__log_marginal_likelihood_gprior
319 | self.vubar = self.xpx / self.g
320 | self.lndetvubar = 2.0 * \
321 | np.sum(np.log(np.diag(np.linalg.cholesky(self.vubar))))
322 |
323 | def update_prior(self, prior):
324 | if prior[0] == 'normal_inverted_gamma' or prior[0] == 'normal_gamma':
325 | self.nuubar = self.prior[1]
326 | self.subar = self.prior[2]
327 | self.betaubar = self.prior[3]
328 | self.vubar = self.prior[4]
329 | self.lndetvubar = 2.0 * \
330 | np.sum(np.log(np.diag(np.linalg.cholesky(self.vubar))))
331 |
332 | elif prior[0] == 'g_prior':
333 | self.vubar = self.xpx / self.g
334 | self.lndetvubar = 2.0 * \
335 | np.sum(np.log(np.diag(np.linalg.cholesky(self.vubar))))
336 |
337 |
338 | def log_posterior_probability(self, scale, beta, **kwargs):
339 | return self.__log_cand_prob(scale, beta, **kwargs)
340 |
341 | def __calculate_jeffreys(self):
342 | self.calculated = True
343 | if self.updateind_xmat == 1 or self.updateind_yvec == 1:
344 | self.xpy = np.dot(self.xmat.transpose(), self.yvec)
345 | if self.updateind_xmat == 1:
346 | self.xpx = np.dot(self.xmat.transpose(), self.xmat)
347 | self.updateind_xmat = 0
348 | self.updateind_yvec = 0
349 |
350 | self.betaobar = np.linalg.solve(self.xpx, self.xpy)
351 | self.vobar = self.xpx
352 |
353 | self.nuobar = self.nobs - self.kreg
354 | self.res = self.yvec - np.dot(self.xmat, self.betaobar)
355 | self.sobar = np.dot(self.res, self.res)
356 |
357 | def __calculate_normal_gamma(self):
358 | self.calculated = True
359 | self.vbubar = np.dot(self.vubar, self.betaubar)
360 | if self.updateind_xmat == 1 or self.updateind_yvec == 1:
361 | self.xpy = np.dot(self.xmat.transpose(), self.yvec)
362 | if self.updateind_xmat == 1:
363 | self.xpx = np.dot(self.xmat.transpose(), self.xmat)
364 | self.updateind_xmat = 0
365 | self.updateind_yvec = 0
366 | self.vobar = self.vubar + self.xpx
367 | self.vbobar = self.xpy + self.vbubar
368 | self.betaobar = np.linalg.solve(self.vobar, self.vbobar)
369 |
370 | self.nuobar = self.nuubar + self.nobs
371 | self.sobar = self.subar + sum(self.yvec**2)+ \
372 | np.dot(self.betaubar, self.vbubar)- \
373 | np.dot(self.betaobar, self.vbobar)
374 |
375 | def __calculate_g_prior(self):
376 | self.calculated = True
377 | if self.updateind_xmat == 1 or self.updateind_yvec == 1:
378 | self.xpy = np.dot(self.xmat.transpose(), self.yvec)
379 | if self.updateind_xmat == 1:
380 | self.xpx = np.dot(self.xmat.transpose(), self.xmat)
381 | self.updateind_xmat = 0
382 | self.updateind_yvec = 0
383 | self.betahat = np.linalg.solve(self.xpx, self.xpy)
384 | self.betaobar = self.gratio * (self.betahat +
385 | self.betaubar/self.g)
386 | self.vobar = 1./self.gratio * self.xpx
387 | self.nuobar = self.nobs
388 | self.betadiff = self.betahat - self.betaubar
389 | self.res = self.yvec - np.dot(self.xmat, self.betahat)
390 | self.sobar = np.dot(self.res.T, self.res)+ \
391 | np.dot(self.betadiff.T, np.dot(self.xpx, self.betadiff))/(
392 | self.g + 1.)
393 |
394 |
395 | def sample(self):
396 | self.__calculate()
397 | sig = self.__sample_scale()
398 | beta = self.betaobar + np.linalg.solve(self.cholvobar.T,
399 | np.random.randn(self.kreg))
400 | return sig, beta
401 |
402 | def __log_cand_pr_sig(self, sigma, beta, **kwargs):
403 | """
404 | calculates the log of the candiate probability given scale = sigma
405 | """
406 | loglike = self.loglike(sigma, beta)
407 |
408 | dbeta = beta - self.betaubar
409 | kern = -self.kreg * np.log(sigma) -0.5 / sigma ** 2 *\
410 | np.dot(dbeta, np.dot(self.vubar, dbeta))
411 |
412 | kerns = -(self.nuubar + 1) * np.log(sigma) - self.subar/(2.0 * sigma ** 2)
413 |
414 | if 'kernel_only' in kwargs and kwargs['kernel_only'] == True:
415 | return loglike + kern + kerns
416 |
417 | else:
418 | const = -0.5 * self.kreg * np.log(2 * np.pi) + 0.5 * self.lndetvubar
419 | consts = np.log(2) - special.gammaln(self.nuubar / 2.) +\
420 | self.nuubar / 2. * np.log(self.subar / 2.)
421 | return loglike + kern + kerns + const + consts
422 |
423 |
424 | def __log_cand_pr_sig_jeff(self, sigma, beta, **kwargs):
425 |
426 | loglike = self.loglike(sigma, beta)
427 | return loglike - np.log(sigma)
428 |
429 | def __log_canc_pr_sig_gprior(self, sigma, beta, **kwargs):
430 | loglike = self.loglike(sigma, beta)
431 |
432 | dbeta = beta - self.betaubar
433 | kern = -self.kreg * np.log(sigma) -0.5 * self.kreg * np.log(self.g) \
434 | -0.5 / (self.g * sigma ** 2) \
435 | * np.dot(dbeta, dot(self.vubar, dbeta))
436 |
437 |
438 | if 'kernel_only' in kwargs and kwargs['kernel_only'] == True:
439 | return kern - np.log(sigma)
440 | else:
441 | const = -0.5 * self.kreg * np.log(2 * np.pi) + 0.5 * self.lndetvubar
442 | return loglike + kern + const - np.log(sigma)
443 |
444 |
445 | def __log_cand_pr_kappa(self, kappa, beta, **kwargs):
446 | loglike = self.loglike(sigma, beta)
447 |
448 | dbeta = beta - betaubar
449 | kern = 0.5 * self.kreg * np.log(kappa) -0.5 * kappa \
450 | * np.dot(dbeta, np.dot(self.vubar, dbeta))
451 |
452 | kerns = (nu + 1) / np.log(kappa) - ns * kappa /2.0
453 |
454 | if 'kernel_only' in kwargs and kwargs['kernel_only'] == True:
455 | return loglike + kern + kerns
456 |
457 | else:
458 | const = -0.5 * self.kreg * log(2 * np.pi) + 0.5 * self.lndetvubar
459 | consts = np.log(2) - special.gammaln(self.nuubar / 2.) +\
460 | self.nuubar / 2. * np.log(self.subar/2.)
461 | return loglike + kern + kerns + const + consts
462 |
463 |
464 | def __sample_standard_deviation(self):
465 | sig = 1.0/np.sqrt(np.random.gamma(self.nuobar/2., 2./self.sobar, 1))
466 | # self.cholvobar = 1.0/sig * np.linalg.cholesky(self.vobar)
467 | self.cholvobar = 1.0/sig * np.linalg.cholesky(self.vobar)
468 | return sig
469 |
470 | def __sample_precision(self):
471 | kappa = np.random.gamma(self.nuobar/2., 2./self.sobar, 1)
472 | self.cholvobar = np.linalg.cholesky(kappa * self.vobar)
473 | return kappa
474 |
475 | def loglike(self, scale, beta):
476 | if self.calculated == False:
477 | self.__calculate()
478 | if self.prior[0] == 'normal_gamma':
479 | sig = 1. / np.sqrt(scale)
480 | else:
481 | sig = scale
482 | diff = self.yvec - np.dot(self.xmat, beta)
483 | sigsq = sig**2
484 | nobs = self.yvec.shape[0]
485 | return -0.5 * nobs * np.log(2.0 * np.pi * sigsq) - \
486 | 0.5/sigsq * np.dot(diff, diff)
487 |
488 | def log_marginal_likelihood(self):
489 | if self.calculated == False:
490 | self.__calculate()
491 | return self.__log_marginal_likelihood()
492 |
493 | def __log_marginal_likelihood_nig(self):
494 | if self.calculated == False:
495 | self.__calculate()
496 |
497 | logdet_vubar = 2.0 * sum(np.log(np.diag(np.linalg.cholesky(self.vubar))))
498 | logdet_vobar = 2.0 * sum(np.log(np.diag(np.linalg.cholesky(self.vobar))))
499 | c1 = -0.5 * self.nobs * np.log(2. * np.pi)
500 | c2 = 0.5 * (logdet_vubar - logdet_vobar)
501 | c3 = special.gammaln(self.nuobar / 2.) - special.gammaln(self.nuubar / 2.)
502 | c4 = 0.5 * (self.nuubar * np.log(self.subar / 2.) - self.nuobar * np.log(self.sobar / 2.))
503 | return c1 + c2 + c3 + c4
504 |
505 | def __log_marginal_likelihood_jeff(self):
506 | return np.nan
507 |
508 | def __log_marginal_likelihood_gprior(self):
509 | return np.nan
510 |
511 | def posterior_mean(self):
512 | if self.calculated == False:
513 | self.__calculate()
514 | betamean = self.betaobar
515 | sigmamean = self.__posterior_mean_scale()
516 |
517 | return sigmamean, betamean
518 |
519 | def __posterior_sigma_mean(self):
520 | """Zelner (1971), pp 371"""
521 |
522 | S = np.sqrt(self.sobar/self.nuobar)
523 |
524 | return np.exp(special.gammaln((self.nuobar - 1)/2.)-\
525 | special.gammaln(self.nuobar/2.))*np.sqrt(self.nuobar/2.) * S
526 |
527 | def __posterior_kappa_mean(self):
528 | # return self.nuubar/self.sobar
529 | return self.nuobar/self.sobar
530 |
531 | def __posterior_sigma_var(self):
532 | """function returns the estimate of the posterior variance for
533 | sigma, Zelner (1971), pp 373"""
534 |
535 | if self.calculated == False:
536 | self.__calculate()
537 | sigmamean = self.posterior_mean()[0]
538 | var = self.sobar/(self.nuobar - 2) -sigmamean**2
539 | return var
540 |
541 | def __posterior_kappa_var(self):
542 | if self.calculated == False:
543 | self.__calculate()
544 | s2 = self.sobar/self.nuobar
545 | return 4./(self.nuobar * s2**2)
546 |
547 |
548 | def get_posterior_covmat(self):
549 | '''
550 | return the posterior covariance
551 | matrix for beta
552 | '''
553 | if self.calculated == False:
554 | self.__calculate()
555 | s2 = self.sobar/self.nuobar
556 | Am = np.linalg.inv(self.vobar)
557 | nuobar = self.nuobar
558 | covmat = (nuobar/(nuobar - 2)) * s2 * Am
559 | return covmat
560 |
561 | def bic(self):
562 | '''
563 | Return BIC
564 | '''
565 | if self.calculated == False:
566 | self.__calculate()
567 | sig,beta = self.posterior_mean()
568 | loglike = self.loglike(sig,beta)
569 |
570 | return -2 * loglike + (self.kreg + 1) * np.log(self.nobs)
571 |
572 |
573 | def __thpd(self, nu, bbar, sd):
574 | '''
575 | Get the hpd interval for the t-dist.
576 | '''
577 | ## and plot it
578 | rv = dstn.t(nu, bbar, sd)
579 | xl = rv.ppf(0.025)
580 | xu = rv.ppf(0.975)
581 | return np.array([xl, xu])
582 |
583 | def __plottdist(self, nu, bbar, sd, title):
584 | '''
585 | Plot t distribution
586 | '''
587 | ## and plot it
588 | rv = dstn.t(nu, bbar, sd)
589 | xmin = rv.ppf(0.001)
590 | xmax = rv.ppf(0.999)
591 | x = np.linspace(xmin, xmax, 100)
592 | h = plt.plot(x, rv.pdf(x))
593 | plt.title(title)
594 | ## add the hpd's
595 | xl = rv.ppf(0.025)
596 | xu = rv.ppf(0.975)
597 | ltx = np.linspace(xmin, xl, 50)
598 | lty = rv.pdf(ltx)
599 | plt.fill(np.r_[ltx, ltx[-1]],
600 | np.r_[lty, 0], facecolor ="blue", alpha = 0.5)
601 | utx = np.linspace(xu, xmax, 50)
602 | uty = rv.pdf(utx)
603 | plt.fill(np.r_[utx, utx[0]],
604 | np.r_[uty, 0], facecolor ="blue", alpha = 0.5)
605 | ## return rv
606 |
607 |
608 |
609 |
610 | def __plotinvertedgamma(self, nu, s2, title):
611 | '''
612 | plots inverted gamma,
613 | Zellner 1971 for details
614 |
615 | '''
616 | mode = np.sqrt(s2)*np.sqrt( nu/(nu+1.0) )
617 | minx = 1E-3
618 | if minx > 0.01*mode:
619 | minx = 0.0
620 | # note this will induce a warning
621 | # due to divide by zero
622 | ## centre x on the mode
623 | x = np.linspace(minx, mode * 2, num = 200)
624 | d1 = 2.0/special.gamma(nu/2.0)
625 | d2 = ( (nu * s2)/2.0)**(nu/2.0)
626 | d3 = 1/(x**(nu + 1.0))
627 | d4 = (nu * s2)/(2 * (x**2))
628 | y = d1 * d2 * d3 * np.exp(-d4)
629 | plt.plot(x, y)
630 | plt.title(title)
631 |
632 | def get_plot_dimensions(self, kwargs):
633 | totalplots = self.kreg + 1
634 | if kwargs.has_key('individual'):
635 | cols = 1
636 | rows = 1
637 | elif kwargs.has_key('rows') and not kwargs.has_key('cols'):
638 | ## work out the cols from the rows
639 | cols = ceil(totalplots/float(kwargs['rows']))
640 | rows = kwargs['rows']
641 | elif kwargs.has_key('cols') and not kwargs.has_key('rows'):
642 | rows = ceil(totalplots/float(kwargs['cols']))
643 | cols = kwargs['cols']
644 | elif not kwargs.has_key('cols') and not kwargs.has_key('rows'):
645 | cols = np.floor(np.sqrt(totalplots))
646 | if cols == 0:
647 | cols = 1
648 | rows = int(np.ceil(totalplots/cols))
649 | else:
650 | rows = kwargs['rows']
651 | cols = kwargs['cols']
652 |
653 | plotdims = {'totalplots':totalplots,
654 | 'cols':int(cols),
655 | 'rows':int(rows),
656 | 'figsperplot':int(rows * cols)}
657 | return plotdims
658 |
659 |
660 | def plot(self, **kwargs):
661 | '''
662 | Basic plotting function for regression objects.
663 | '''
664 | if not self.calculated:
665 | self.__calculate()
666 | s2 = self.sobar/self.nuobar
667 | betasd = np.sqrt(np.diag(self.get_posterior_covmat()))
668 |
669 | plotdims = self.get_plot_dimensions(kwargs)
670 | plotcounter = 0
671 | pagecounter = 0
672 | for i in range(plotdims['totalplots'] - 1):
673 | if plotcounter % plotdims['figsperplot'] == 0:
674 | if plotcounter > 0:
675 | pagecounter = pagecounter + 1
676 | ## then already plotted something,
677 | ## we might want to save it
678 | if kwargs.has_key('filename'):
679 | (base, suffix) = os.path.splitext(kwargs['filename'])
680 | fname = "%s%03d%s" % (base, pagecounter, suffix)
681 | plt.savefig(fname)
682 | plotcounter = 0
683 | plt.figure()
684 | plotcounter = plotcounter + 1
685 | plt.subplot(plotdims['rows'], plotdims['cols'], plotcounter)
686 | title = r'$\beta_{%d}$' % i
687 | self.__plottdist(self.nuobar,
688 | self.betaobar[i],
689 | betasd[i], title)
690 | ## and the final plot..
691 | if plotcounter % plotdims['figsperplot'] == 0:
692 | if plotcounter > 0:
693 | pagecounter = pagecounter + 1
694 | ## then already plotted something,
695 | ## we might want to save it
696 | if kwargs.has_key('filename'):
697 | (base, suffix) = os.path.splitext(kwargs['filename'])
698 | fname = "%s%03d%s" % (base, pagecounter, suffix)
699 | plt.savefig(fname)
700 | plotcounter = 0
701 | plt.figure()
702 | plotcounter = plotcounter + 1
703 | plt.subplot(plotdims['rows'], plotdims['cols'], plotcounter)
704 | title = r'$\sigma$'
705 | self.__plotinvertedgamma(self.nuobar, s2, title)
706 | pagecounter = pagecounter + 1
707 | ## then already plotted something,
708 | ## we might want to save it
709 | if kwargs.has_key('filename'):
710 | (base, suffix) = os.path.splitext(kwargs['filename'])
711 | fname = "%s%03d%s" % (base, pagecounter, suffix)
712 | plt.savefig(fname)
713 | else:
714 | plt.show()
715 |
716 |
717 | def update_yvec(self, yvec):
718 | self.yvec = yvec
719 | self.updateind_yvec = 1
720 | self.calculated = False
721 |
722 | def update_xmat(self, xmat):
723 | if xmat.ndim == 1:
724 | self.xmat = xmat.reshape(xmat.shape[0], 1)
725 | else:
726 | self.xmat = xmat
727 | self.calculated = False
728 | self.updateind_xmat = 1
729 |
730 | def residuals(self):
731 | if self.calculated == False:
732 | self.__calculate()
733 | sigma,beta=self.posterior_mean()
734 | return self.yvec-np.dot(self.xmat,beta)
735 |
736 |
737 | def print_header(self, destination, colwidth, sigfigs):
738 | '''
739 | print a generic header for the output:
740 | '''
741 | print >>destination, ""
742 | hline = "{hline: ^{totalwidth}}".format(
743 | hline ="---------------------------------------------------",
744 | totalwidth = 6 * colwidth)
745 | print >>destination, hline
746 | print >>destination, \
747 | "{title: ^{totalwidth}}".format(
748 | title ="Bayesian Linear Regression Summary",
749 | totalwidth = 6 * colwidth)
750 | print >>destination, \
751 | "{priorname: ^{totalwidth}}".format(
752 | priorname = self.prior[0],
753 | totalwidth = 6 * colwidth)
754 | print >>destination, hline
755 | print >>destination, """\
756 | {0: >{colwidth}.{colwidth}s}\
757 | {1: >{colwidth}.{colwidth}s}\
758 | {2: >{colwidth}.{colwidth}s} \
759 | {3: >{colwidth}.{colwidth}s}\
760 | {4: >{colwidth}.{colwidth}s}""".format(" ", "mean", "sd", "2.5%", "97.5%",
761 | colwidth = colwidth,
762 | sigfigs = sigfigs)
763 |
764 | def print_summary(self, destination, paramname, meanval, sdval,
765 | hpdintervals, hpd05, colwidth, sigfigs):
766 | '''
767 | format the output for a single line.
768 | Arguments are the name of the parameter, its
769 | mean value, the standard deviation and the hpd (if present).
770 | Presumably only for a vector.
771 | '''
772 | name = paramname
773 | print >>destination, """\
774 | {name: >{colwidth}.{colwidth}}\
775 | {val1: >0{colwidth}.{sigfigs}g}\
776 | {val2: >0{colwidth}.{sigfigs}g}""".format(
777 | name = name,
778 | val1 = meanval,
779 | val2 = sdval,
780 | colwidth = colwidth, sigfigs = sigfigs),
781 | if hpdintervals:
782 | ## now for the hpd's
783 | print >>destination, """\
784 | {val1: >0{colwidth}.{sigfigs}g}\
785 | {val5: >0{colwidth}.{sigfigs}g}""".format(
786 | val1 = hpd05[0],
787 | val5 = hpd05[1],
788 | colwidth = colwidth,
789 | sigfigs = sigfigs)
790 | else:
791 | print >>destination, """\
792 | {0: >0{colwidth}.{colwidth}s}\
793 | {0: >0{colwidth}.{colwidth}s}""".format("NA", colwidth = colwidth - 1)
794 |
795 |
796 | def output(self, **kwargs):
797 | '''
798 | Output for the regression summary.
799 | '''
800 | colwidth = 12
801 | sigfigs = 4
802 | if not self.calculated:
803 | self.__calculate()
804 | if kwargs.has_key("filename"):
805 | destination = open(kwargs['filename'], 'w')
806 | else:
807 | destination = sys.stdout
808 | self.print_header(destination, colwidth, sigfigs)
809 | sigmean, betamean = self.posterior_mean()
810 | betasd = np.sqrt(np.diag(self.get_posterior_covmat()))
811 | for i in range(len(betamean)):
812 | paramname = "beta[%d]" % i
813 | hpd = self.__thpd(self.nuobar,
814 | betamean[i],
815 | betasd[i])
816 |
817 | self.print_summary(destination, paramname,
818 | betamean[i],
819 | betasd[i],
820 | True, hpd, colwidth, sigfigs)
821 |
822 |
823 | ## and now for sigma
824 | if self.prior[0] =="normal_gamma":
825 | scale_name = "kappa"
826 | else:
827 | scale_name = "sigma"
828 | sigsd = np.sqrt(self.__posterior_variance_scale())
829 | self.print_summary(destination, scale_name,
830 | sigmean,
831 | sigsd,
832 | False, None, colwidth, sigfigs)
833 | ## and print loglikelihood:
834 | print >>destination
835 | print >>destination, \
836 | "loglikelihood = {loglik: <0{colwidth}.{sigfigs}g}".format(
837 | loglik=self.loglike(sigmean,betamean),
838 | colwidth=colwidth,
839 | sigfigs=sigfigs)
840 | print >>destination,\
841 | "log marginal likelihood = {marglik: <0{colwidth}.{sigfigs}g}".format(
842 | marglik = self.log_marginal_likelihood(),
843 | colwidth = colwidth,
844 | sigfigs = sigfigs)
845 |
846 | print >>destination, \
847 | "BIC = {bic: <0{colwidth}.{sigfigs}g}".format(
848 | bic = self.bic(),
849 | colwidth = colwidth,
850 | sigfigs = sigfigs)
851 |
852 |
853 | class CondBetaRegSampler:
854 | """
855 |
856 | This class samples beta assuming it is generated from a linear
857 | regression model where the scale parameter is known. This class is
858 | initialised with the following arguments:
859 | yvec - a one dimensional numpy array containing the data.
860 | xmat - a two dimensional numpy array containing the regressors.
861 | kwargs - optional arguments:
862 | prior - a list containing the name of the prior and the
863 | corresponding hyperparameters.
864 | Examples:
865 | prior = ['normal', betaubar, Vubar] or
866 | prior = ['g_prior', betaubar, g].
867 | If none of these options are chosen or they are miss-specified
868 | then CondBetaRegSampler will default to Jeffrey's prior.
869 | """
870 |
871 | def __init__(self, yvec, xmat, **kwargs):
872 | self.nobs = yvec.shape[0]
873 | if xmat.ndim == 1:
874 | xmat = xmat.reshape(xmat.shape[0], 1)
875 | self.kreg = xmat.shape[1]
876 | self.yvec = yvec
877 | self.xmat = xmat
878 | self.xpx = np.dot(xmat.T, xmat)
879 | self.xpy = np.dot(xmat.T, yvec)
880 | self.updateind_xmat = 0
881 | self.updateind_yvec = 0
882 | self.betaobar = np.zeros(self.kreg)
883 | self.vobar = np.zeros((self.kreg, self.kreg))
884 | self.vbobar = np.zeros(self.kreg)
885 | self.cholvobar = np.zeros((self.kreg, self.kreg))
886 |
887 | if 'prior' not in kwargs: # default: Jeffrey's prior
888 | self.__calculate = self.__calculate_jeffreys
889 |
890 | else: # Normal - gamma prior
891 | self.prior = kwargs['prior']
892 | if type(self.prior[0])!= types.StringType:
893 | print "Warning: Jeffery's prior used as prior was \
894 | incorectly specified"
895 | self.__calculate = self.__calculate_jeffreys
896 |
897 | else:
898 | ptype = self.prior[0]
899 | if ptype not in['normal', 'g_prior']:
900 | print "Warning: Jeffery's prior used as prior was \
901 | incorectly specified"
902 | self.__calculate = self.__calculate_jeffreys
903 | elif ptype =='normal':
904 | assert(len(self.prior) == 3)
905 | self.betaubar = self.prior[1]
906 | self.vubar = self.prior[2]
907 | self.__calculate = self.__calculate_normal
908 |
909 | else:
910 | # g_prior
911 | assert(len(self.prior) == 3)
912 | self.betaubar = self.prior[1]
913 | self.g = float(self.prior[2])
914 | self.gratio = self.g/(1.+self.g)
915 | self.betahat = np.zeros(self.kreg)
916 | self.__calculate = self.__calculate_g_prior
917 |
918 |
919 | def calculate(self, sigma):
920 | self.__calculate()
921 | def __calculate_jeffreys(self, sigma):
922 | if self.updateind_xmat == 1 or self.updateind_yvec == 1:
923 | self.xpy = np.dot(self.xmat.transpose(), self.yvec)
924 | if self.updateind_xmat == 1:
925 | self.xpx = np.dot(self.xmat.transpose(), self.xmat)
926 | self.updateind_xmat = 0
927 | self.updateind_yvec = 0
928 |
929 | self.betaobar = np.linalg.solve(self.xpx, self.xpy)
930 | self.vobar = self.xpx/sigma**2
931 |
932 | def __calculate_normal(self, sigma):
933 | self.vbubar = np.dot(self.vubar, self.betaubar)
934 | if self.updateind_xmat == 1 or self.updateind_yvec == 1:
935 | self.xpy = np.dot(self.xmat.transpose(), self.yvec)
936 | if self.updateind_xmat == 1:
937 | self.xpx = np.dot(self.xmat.transpose(), self.xmat)
938 | self.updateind_xmat = 0
939 | self.updateind_yvec = 0
940 | self.vbobar = self.xpy + self.vbubar
941 | self.vobar = self.vubar + self.xpx/sigma**2
942 | self.vbobar = self.xpy/sigma**2 + self.vbubar
943 | self.betaobar = np.linalg.solve(self.vobar, self.vbobar)
944 |
945 | def __calculate_g_prior(self, sigma):
946 | if self.updateind_xmat == 1 or self.updateind_yvec == 1:
947 | self.xpy = np.dot(self.xmat.transpose(), self.yvec)
948 | if self.updateind_xmat == 1:
949 | self.xpx = np.dot(self.xmat.transpose(), self.xmat)
950 | self.updateind_xmat = 0
951 | self.updateind_yvec = 0
952 | self.betahat = np.linalg.solve(self.xpx, self.xpy)
953 | self.betaobar = self.gratio * (self.betahat + self.betaubar/self.g)
954 | self.vobar = 1.0/(sigma**2 * self.gratio) * self.xpx
955 |
956 | def sample(self, sigma):
957 | """This function returns a sample of beta"""
958 | self.__calculate(sigma)
959 | self.cholvobar = np.linalg.cholesky(self.vobar)
960 | beta = self.betaobar + np.linalg.solve(self.cholvobar.T,
961 | np.random.randn(self.kreg))
962 | return beta
963 |
964 | def get_marginal_posterior_mean(self):
965 | return self.betaobar
966 |
967 | def get_marginal_posterior_precision(self):
968 | return self.vobar
969 |
970 | def update_yvec(self, yvec):
971 | """
972 | This function updates yvec in CondRegSampler. This is often useful
973 | when the class is being used as a part of the MCMC sampling
974 | scheme.
975 | """
976 | self.yvec = yvec
977 | self.updateind_yvec = 1
978 |
979 | def update_xmat(self, xmat):
980 | """
981 | This function updates xmat in CondRegSampler. This is often useful
982 | when the class is being used as a part of the MCMC sampling
983 | scheme.
984 | """
985 |
986 | if xmat.ndim == 1:
987 | self.xmat = xmat.reshape(xmat.shape[0], 1)
988 | else:
989 | self.xmat = xmat
990 | self.updateind_xmat = 1
991 |
992 | class CondScaleSampler:
993 | def __init__(self, **kwargs):
994 | """class is used to sample sigma assuming the model is linear
995 | kwargs - optional arguments
996 | prior - is a tuple or list containing the hyperparamers that
997 | describe the prior. If it is not specified the Jeffrey's
998 | prior is used instead
999 | """
1000 |
1001 | self.nuubar = 0.
1002 | self.Subar = 0.
1003 | self.__sample = self.__sample_inverted_gamma
1004 |
1005 | if 'prior' in kwargs:
1006 | self.prior = kwargs['prior']
1007 | priorname = self.prior[0]
1008 | if type(self.prior[0])!= types.StringType:
1009 | print "Warning: Jeffery's prior used as prior was \
1010 | incorectly specified"
1011 |
1012 | else:
1013 | if priorname not in ['gamma', 'inverted_gamma', 'wishart']:
1014 | print """\nWarning: Prior type unknown for \
1015 | CondSigSample. Defaulting to Jeffrey's prior\n"""
1016 |
1017 | elif priorname =='gamma':
1018 | self.nuubar = self.prior[1]
1019 | self.Subar = self.prior[2]
1020 | self.__sample = self.__sample_gamma
1021 |
1022 | elif priorname == 'inverted_gamma':
1023 | self.nuubar = self.prior[1]
1024 | self.Subar = self.prior[2]
1025 | self.__sample = self.__sample_inverted_gamma
1026 |
1027 | else:
1028 | #wishart prior is used
1029 | self.nuubar = self.prior[1]
1030 | self.Subar = np.atleast_2d(self.prior[2])
1031 | self.__sample = self.__sample_wishart2
1032 | self.p = self.Subar.shape[0]
1033 | self.work_chisq = np.arange(self.p)
1034 | self.n_randn = (self.p * (self.p - 1)) / 2
1035 | self.randnvec = np.zeros(self.n_randn)
1036 | self.randchivec = np.zeros(self.p)
1037 | self.cmat = np.zeros((self.p, self.p), order = 'F')
1038 | self.rmat = np.zeros((self.p, self.p), order = 'F')
1039 | self.umat = np.zeros((self.p, self.p), order = 'F')
1040 | self.Sobar = np.zeros((self.p, self.p), order = 'F')
1041 |
1042 |
1043 | def sample(self, residual):
1044 | return self.__sample(residual)
1045 |
1046 | def __sample_gamma(self, residual):
1047 | nuobar = self.nuubar + residual.shape[0]
1048 | Sobar = self.Subar + np.sum(residual**2, axis = 0)
1049 | return np.random.gamma(nuobar/2., 2./Sobar)
1050 |
1051 | def __sample_inverted_gamma(self, residual):
1052 | nuobar = self.nuubar + residual.shape[0]
1053 | Sobar = self.Subar + np.sum(residual**2, axis = 0)
1054 | return 1./np.sqrt(np.random.gamma(nuobar/2., 2./Sobar))
1055 |
1056 | def __sample_wishart(self, residual):
1057 | residual = np.atleast_2d(residual)
1058 | assert residual.shape[1] == self.p
1059 | self.nuobar = self.nuubar + residual.shape[0]
1060 | self.randnvec = np.random.randn(self.n_randn)
1061 | self.randchivec = np.random.chisquare(self.nuobar - self.work_chisq)
1062 | wishart.calc_sobar(self.Subar, self.Sobar, residual)
1063 | self.cmat = np.asfortranarray(np.linalg.cholesky(np.linalg.inv(self.Sobar)).T)
1064 | wishart.chol_wishart(self.randnvec, self.randchivec, self.umat,
1065 | self.cmat, self.rmat)
1066 |
1067 | return np.dot(self.rmat.T, self.rmat)
1068 |
1069 | def __sample_wishart2(self, residual):
1070 | residual = np.atleast_2d(residual)
1071 | assert residual.shape[1] == self.p
1072 | self.nuobar = self.nuubar + residual.shape[0]
1073 | self.randnvec = np.random.randn(self.n_randn)
1074 | self.randchivec = np.random.chisquare(self.nuobar - self.work_chisq)
1075 | wishart.calc_sobar(self.Subar, self.Sobar, residual)
1076 | info = np.array(0)
1077 | wishart.chol_wishart2(self.randnvec, self.randchivec, self.umat,
1078 | self.cmat, self.Sobar, info)
1079 | assert info == 0
1080 | return self.umat
1081 |
1082 |
1083 |
1084 |
1085 |
1086 |
1087 |
--------------------------------------------------------------------------------
/MANIFEST:
--------------------------------------------------------------------------------
1 | setup.py
2 | INSTALL
3 | Lib/__init__.py
4 | Lib/mcmc.py
5 | Lib/mcmc_utilities.py
6 | Lib/mcmcplots.py
7 | Lib/regtools.py
8 | Src/stochsearch.f
9 | Src/timeseriesfunc.f
10 | Src/wishart.f
11 | data/count.txt
12 | data/yld2.txt
13 | examples/ex_AR1.py
14 | examples/ex_loglinear.py
15 | examples/using_pymcmc_efficiently.py
16 | examples/ex_variable_selection.py
17 | examples/loglinear.f
18 | examples/matplotlibrc
19 |
--------------------------------------------------------------------------------
/README:
--------------------------------------------------------------------------------
1 | Markov chain Monte Carlo (MCMC) estimation provides a solution to the
2 | complex integration problems that are faced in the Bayesian analysis
3 | of statistical problems. The implementation of MCMC algorithms is,
4 | however, code intensive and time consuming. We have developed a
5 | Python package, which is called PyMCMC, that aids in the construction
6 | of MCMC samplers and helps to substantially reduce the likelihood of
7 | coding error, as well as aid in the minimisation of repetitive code.
8 | PyMCMC contains classes for Gibbs, Metropolis Hastings, independent
9 | Metropolis Hastings, random walk Metropolis Hastings, orientational
10 | bias Monte Carlo and slice samplers as well as specific modules for
11 | common models such as a module for Bayesian regression analysis.
12 | PyMCMC is straightforward to optimise, taking advantage of the Python
13 | libraries Numpy and Scipy, as well as being readily extensible with C
14 | or Fortran.
15 |
--------------------------------------------------------------------------------
/Src/stochsearch.f:
--------------------------------------------------------------------------------
1 | c Used in PyMCMC for the stochastic search algorithm.
2 | c Copyright (C) 2010 Chris Strickland
3 | c
4 | c This program is free software: you can redistribute it and/or modify
5 | c it under the terms of the GNU General Public License as published by
6 | c the Free Software Foundation, either version 3 of the License, or
7 | c (at your option) any later version.
8 | c
9 | c This program is distributed in the hope that it will be useful,
10 | c but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | c MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | c GNU General Public License for more details.
13 |
14 | c You should have received a copy of the GNU General Public License
15 | c along with this program. If not, see .
16 |
17 |
18 | c ssreg is a Fortran77 subroutine that is used by the Python package
19 | c PyMCMC. Specifically ssreg is used to calculate the indicator
20 | c vector that is used in the Bayesian stochastic search variable
21 | c selection problem for the normal linear model. The subroutine
22 | c assumes the the model is specified using a g-prior. If y denotes
23 | c the observation vector and x denonotes the set of regressors then
24 | c ypy = y'y
25 | c g = g from the g-prior
26 | c xpx = x'x
27 | c spy = x'y
28 | c xgxg = a (k,k) matrix used for workings
29 | c xgy = a (k,1) vector used for workings
30 | c gam = a (k,1) vector of integers used to store the selection
31 | c dummies
32 | c ru = a (k,1) vector of uniform random numbers
33 | c w = a (k,6) matrix for workings
34 | c w2 = a (k,k) matrix for workings
35 | c ifo = an integer. Equals 0 on return if computuations were
36 | c successful
37 | c ifo2 = an integer. Equals 0 on return if computuations were
38 | c successful
39 | c n = the number of observations
40 | c k = the number of regressors
41 |
42 | subroutine ssreg(ypy,g,xpx,xpy,xgxg,xgy,gam,ru,w,w2,ifo, ifo2,n,k)
43 | implicit none
44 | integer n,k,gam(k),ifo, ifo2
45 | real*8 xpy(k), g, xpx(k,k), xgy(k),xgxg(k,k),ru(k),ypy
46 | real*8 w(k,6),w2(k,k), pgamnum,pgamdenom
47 | integer qgam, sumgam,i
48 | real*8 probdenom
49 |
50 | cf2py intent(inout) gam
51 | cf2py intent(in) ypy
52 | cf2py intent(in) g
53 | cf2py intent(in) xpx
54 | cf2py intent(in) xgxg
55 | cf2py intent(in) w2
56 | cf2py intent(in) xgq
57 | cf2py intent(in) ru
58 | cf2py intent(in) n
59 | cf2py intent(in) k
60 | cf2py intent(inout) ifo
61 | cf2py intent(inout) ifo2
62 | do i=2,k
63 | qgam=sumgam(gam,k)
64 | if (gam(i).eq.1) then
65 | call precalc(qgam,xpy,xpx,xgy,xgxg,w2,gam,w,k,ifo2)
66 | call probgam(pgamnum,g,ypy,qgam,xgxg,w2,xgy,w,n,k,ifo)
67 | gam(i)=0
68 | qgam=qgam-1
69 | call precalc(qgam,xpy,xpx,xgy,xgxg,w2,gam,w,k,ifo2)
70 | call probgam(pgamdenom,g,ypy,qgam,xgxg,w2,xgy,w,n,k,ifo)
71 | pgamdenom=probdenom(pgamnum,pgamdenom)
72 | if (ru(i)>pgamdenom.or.ifo2.ne.0.or.ifo.ne.0) then
73 | gam(i)=1
74 | qgam=qgam+1
75 | endif
76 | else
77 | call precalc(qgam,xpy,xpx,xgy,xgxg,w2,gam,w,k,ifo2)
78 | call probgam(pgamnum,g,ypy,qgam,xgxg,w2,xgy,w,n,k,ifo)
79 | gam(i)=1
80 | qgam=qgam+1
81 | call precalc(qgam,xpy,xpx,xgy,xgxg,w2,gam,w,k,ifo2)
82 | call probgam(pgamdenom,g,ypy,qgam,xgxg,w2,xgy,w,n,k,ifo)
83 | pgamdenom=probdenom(pgamnum,pgamdenom)
84 | if (ru(i)>pgamdenom.or.ifo2.ne.0.or.ifo.ne.0) then
85 | gam(i)=0
86 | qgam=qgam-1
87 | endif
88 | endif
89 | enddo
90 | end
91 |
92 | c subroutine that does precalulation for probgam
93 | subroutine precalc(qgam,xpy,xpx,xgy,xgxg,w2,gam,w,k,ifo2)
94 | implicit none
95 | integer k, ifo2,gam(k),qgam
96 | real*8 xpy(k), xpx(k,k),w2(k,k),xgy(k),xgxg(k,k),w(k,6)
97 |
98 | call calcxpy(xpy,gam,xgy,k)
99 | call calcxpx(xpx,gam,xgxg,k)
100 | c calculate prior mean
101 | call calcpm(qgam,xgxg,w2,gam,w,k,ifo2)
102 | end
103 |
104 | c subroutine calculates the prior mean
105 | subroutine calcpm(qgam,xgxg,w2,gam,w,k,ifo2)
106 | implicit none
107 | integer k,i,j,ifo2,qgam
108 | real*8 xgxg(k,k),w2(k,k), gam(k), w(k,6)
109 |
110 | j=0
111 | do i=1,k
112 | if (gam(i).eq.1) then
113 | w(j,5)=w(i,4)
114 | j=j+1
115 | endif
116 | enddo
117 | do j=1,qgam
118 | w(j,1)=w(j,5)
119 | do i=1,qgam
120 | w2(i,j)=xgxg(i,j)
121 | enddo
122 | enddo
123 | call dposv('u',qgam,1,w2,k,w(:,1),k,ifo2)
124 | end
125 |
126 |
127 |
128 |
129 | c subroutine calculates the probability of gamma
130 | subroutine probgam(pgam,g,ypy,qgam,xgxg,w2,xgy,w,n,k,info)
131 | implicit none
132 | integer n, k, qgam,info, i,j
133 | real*8 pgam, xgxg(k,k),w2(k,k), xgy(k), w(k,6),c1, c2,c3,g, ypy
134 | real*8 alpha,beta
135 | c BLAS function declation
136 | real*8 ddot
137 |
138 |
139 | c1=(-dble(qgam+1)/2.0)*log(g+1.0)
140 | do j=1,qgam
141 | w(j,2)=xgy(j)
142 | do i=1,qgam
143 | w2(i,j)=xgxg(i,j)
144 | enddo
145 | enddo
146 | alpha=1.0
147 | beta=0.0
148 | call dposv('u',qgam,1,w2,k,w(:,2),k,info)
149 | if(info.eq.0) then
150 | c2=g/(g+1)*ddot(qgam,xgy,1,w(:,2),1)
151 |
152 | call dgemv('n',qgam,qgam,alpha,xgxg,k,w(:,1),1,beta,w(:,3),1)
153 | c3=ddot(qgam,w(:,1),1,w(:,3),1)/(g+1.0)
154 |
155 | pgam=c1+(-(dble(n)/2.0))*log(ypy-c2-c3)
156 | else
157 | pgam=-1.0D256
158 | endif
159 | end
160 |
161 | c function returns the probability of the denominator
162 | c lnp1 is the unnormalised log probability of the numerator, while
163 | c lnp2 is the unnormalised log probability of the denonator
164 | real*8 function probdenom(lnp1,lnp2)
165 | real*8 lnp2,lnp1,maxp,lnsump
166 |
167 | maxp=max(lnp1,lnp2)
168 | lnsump=log(exp(lnp1-maxp)+exp(lnp2-maxp))+maxp
169 | probdenom=exp(lnp2-lnsump)
170 | return
171 | end
172 |
173 |
174 | c functions calulates the number of ones in gam
175 | integer function sumgam(gam,k)
176 | integer k, gam(k),i
177 |
178 | sumgam=0
179 | do i=1,k
180 | sumgam=sumgam+gam(i)
181 | enddo
182 | return
183 | end
184 |
185 | c function extracts the rows in xpy that correspond to ones in gam
186 | subroutine calcxpy(xpy,gam,xgy,k)
187 | implicit none
188 | integer k,gam(k), i,j
189 | real*8 xpy(k), xgy(k)
190 |
191 | j=1
192 | do i=1,k
193 | if(gam(i).eq.1) then
194 | xgy(j)=xpy(i)
195 | j=j+1
196 | endif
197 | enddo
198 | end
199 |
200 | subroutine calcxpx(xpx,gam,xgxg,k)
201 | implicit none
202 | integer k, gam(k),i,j,s,t
203 | real*8 xpx(k,k),xgxg(k,k)
204 |
205 | s=1
206 | t=1
207 | do j=1,k
208 | do i=1,k
209 | if(gam(i).eq.1 .and. gam(j).eq.1) then
210 | xgxg(s,t)=xpx(i,j)
211 | s=s+1
212 | endif
213 | enddo
214 | if(gam(j).eq.1) then
215 | t=t+1
216 | endif
217 | s=1
218 | enddo
219 | end
220 |
221 | c functions for the normal inverted gamma prior
222 |
223 | c subroutine samples gamma for normal-inverted gamma prior
224 | subroutine ssreg_nig(ypy,ldr,vs,vxy,vobar,vubar,gam,xpx,xpy,v,r,
225 | + nuo,ru,k)
226 |
227 | implicit none
228 | integer k,gam(k),gami,ifo,ifo2,nuo,i
229 | real*8 vobar(k,k),vubar(k,k),xpx(k,k),xpy(k),v(k,2),r(k,k)
230 | real*8 vxy(k),ldr,ypy,vs
231 | real*8 pgamnum,pgamdenom,ru(k),probdenom,marg
232 |
233 | cf2py intent(in) ypy
234 | cf2py intent(in) ldr
235 | cf2py intent(in) vs
236 | cf2py intent(in) vxy
237 | cf2py intent(in) vobar
238 | cf2py intent(in) vubar
239 | cf2py intent(in) gam
240 | cf2py intent(in) xpx
241 | cf2py intent(in) xpy
242 | cf2py intent(in) v
243 | cf2py intent(in) r
244 | cf2py intent(in) gam
245 | cf2py intent(in) nuo
246 | cf2py intent(in) ru
247 | cf2py intent(in) k
248 |
249 | do i=2,k
250 | if (gam(i).eq.1) then
251 | gami=-1
252 | pgamnum=marg(ypy,ldr,vs,vxy,vobar,vubar,gam,xpx,xpy,v,
253 | + r,gami,ifo,nuo,k)
254 | gam(i)=0
255 | gami=i
256 | pgamdenom=marg(ypy,ldr,vs,vxy,vobar,vubar,gam,xpx,xpy,v,
257 | + r,gami,ifo2,nuo,k)
258 | pgamdenom=probdenom(pgamnum,pgamdenom)
259 | if (ru(i)>pgamdenom.or.ifo2.ne.0.or.ifo.ne.0) then
260 | gam(i)=1
261 | call update_vubar(vubar,gam,v,r,gami,k)
262 | endif
263 | else
264 | gami=-1
265 | pgamnum=marg(ypy,ldr,vs,vxy,vobar,vubar,gam,xpx,xpy,v,
266 | + r,gami,ifo,nuo,k)
267 | gam(i)=1
268 | gami=i
269 | pgamdenom=marg(ypy,ldr,vs,vxy,vobar,vubar,gam,xpx,xpy,v,
270 | + r,gami,ifo2,nuo,k)
271 | pgamdenom=probdenom(pgamnum,pgamdenom)
272 | if (ru(i)>pgamdenom.or.ifo2.ne.0.or.ifo.ne.0) then
273 | gam(i)=0
274 | call update_vubar(vubar,gam,v,r,gami,k)
275 | endif
276 | endif
277 | enddo
278 | end
279 |
280 |
281 |
282 |
283 | c subroutine to initialise vubar
284 | subroutine initialise_vubar(vubar,gam,v,r,k)
285 | implicit none
286 | integer k,gam(k),i,j
287 | real*8 vubar(k,k),r(k,k),v(k,2)
288 |
289 | cf2py intent(inout) vubar
290 | cf2py intent(in) gam
291 | cf2py intent(in) v
292 | cf2py intent(in) r
293 | cf2py intent(in) k
294 |
295 | do j=1,k
296 | do i=1,k
297 | vubar(i,j)=v(i,gam(i)+1)*r(i,j)*v(j,gam(j)+1)
298 | enddo
299 | enddo
300 | end
301 |
302 |
303 |
304 | c subroutine updates vubar for a change in gamma
305 | subroutine update_vubar(vubar,gam,v,r,gami,k)
306 | implicit none
307 | integer k,gam(k),gami,i,j
308 | real*8 vubar(k,k),r(k,k),v(k,2),tmp
309 |
310 | do j=1,k
311 | vubar(gami,j)=v(gami,gam(gami)+1)*r(gami,j)
312 | enddo
313 | do i=1,k
314 | tmp=v(gami,gam(gami)+1)*vubar(i,gami)
315 | vubar(i,gami)=tmp
316 | enddo
317 | end
318 |
319 |
320 | c subroutine calculates marginal likelihood
321 | real*8 function marg(ypy,ldr,vs,vxy,vobar,vubar,gam,xpx,xpy,v,r,
322 | + gami,ifo,nuo,k)
323 | implicit none
324 | integer k,gam(k),gami,ifo,nuo,i
325 | real*8 vobar(k,k),vubar(k,k),xpx(k,k),xpy(k),v(k,2),r(k,k)
326 | real*8 alpha,beta,vxy(k),ddot,vso,ldr,lndetvu,ypy,vs
327 | real*8 lndetvo,logdetvu
328 |
329 | if (gami.ne.-1) then
330 | call update_vubar(vubar,gam,v,r,gami,k)
331 | endif
332 | alpha=1.0
333 | do i=1,k
334 | call dcopy(k,vubar(:,i),1,vobar(:,i),1)
335 | call daxpy(k,alpha,xpx(:,i),1,vobar(:,i),1)
336 | enddo
337 |
338 | call dpotrf('u',k,vobar,k,ifo)
339 | if (ifo.eq.0) then
340 | call dcopy(k,xpy,1,vxy,1)
341 | call dtrsv('u','t','n',k,vobar,k,vxy,1)
342 | beta=0.0
343 | vso=vs+ypy-ddot(k,vxy,1,vxy,1)
344 |
345 | lndetvo=0.0
346 | do i=1,k
347 | lndetvo=lndetvo+log(vobar(i,i))
348 | enddo
349 |
350 | marg=0.5*logdetvu(v,gam,ldr,k)-lndetvo-dble(nuo)/2.0*
351 | + log(vso/2.0)
352 | else
353 | marg=-1.0D256
354 | endif
355 | return
356 | end
357 | c function returns the log determinant of vubar
358 | real*8 function logdetvu(v,gam,ldr,k)
359 | implicit none
360 | integer i,k,gam(k)
361 | real*8 v(k,k),ldr
362 |
363 |
364 | logdetvu=0.0
365 | do i=1,k
366 | logdetvu=logdetvu+log(v(i,gam(i)+1))
367 | enddo
368 | logdetvu=2.0*logdetvu+ldr
369 | return
370 | end
371 |
372 |
373 | c Stochastic search gamma|beta, sigma normal-inverted gamma prior
374 |
375 | c subroutine to sample gamma
376 |
377 | subroutine ssregcbeta_nig(beta,sig,vub,ldr,vubar,gam,v,r,ru,k)
378 | implicit none
379 | integer k,gam(k),gami,i
380 | real*8 v(k,2),r(k,k),beta(k),sig,vub(k),vubar(k,k)
381 | real*8 ldr,pgamnum,pgamdenom,ru(k),probdenom,probgambet
382 |
383 | cf2py intent(in) beta
384 | cf2py intent(in) sig
385 | cf2py intent(in) vub
386 | cf2py intent(in) ldr
387 | cf2py intent(in) vubar
388 | cf2py intent(in) gam
389 | cf2py intent(in) v
390 | cf2py intent(in) r
391 | cf2py intent(in) gam
392 | cf2py intent(in) ru
393 | cf2py intent(in) k
394 |
395 | do i=2,k
396 | if (gam(i).eq.1) then
397 | gami=-1
398 | pgamnum=probgambet(ldr,beta,vub,sig,vubar,v,r,gami,gam,k)
399 | gam(i)=0
400 | gami=i
401 | pgamdenom=probgambet(ldr,beta,vub,sig,vubar,v,r,gami,gam,
402 | + k)
403 | pgamdenom=probdenom(pgamnum,pgamdenom)
404 | if (ru(i)>pgamdenom) then
405 | gam(i)=1
406 | call update_vubar(vubar,gam,v,r,gami,k)
407 | endif
408 | else
409 | gami=-1
410 | pgamnum=probgambet(ldr,beta,vub,sig,vubar,v,r,gami,gam,k)
411 | gam(i)=1
412 | gami=i
413 | pgamdenom=probgambet(ldr,beta,vub,sig,vubar,v,r,gami,gam,
414 | + k)
415 | pgamdenom=probdenom(pgamnum,pgamdenom)
416 | if (ru(i)>pgamdenom) then
417 | gam(i)=0
418 | call update_vubar(vubar,gam,v,r,gami,k)
419 | endif
420 | endif
421 | enddo
422 | end
423 |
424 |
425 | c subroutine calculate prob beta,gamma
426 | real*8 function probgambet(ldr,beta,vub,sig,vubar,v,r,gami,gam,k)
427 | implicit none
428 | integer k,gam(k),gami
429 | real*8 beta(k),v(k,2),r(k,k),vubar(k,k),ldr,logdetvu
430 | real*8 vub(k),alpha,beta1,sig,ddot
431 |
432 | if (gami.ne.-1) then
433 | call update_vubar(vubar,gam,v,r,gami,k)
434 | endif
435 |
436 | alpha=1.0
437 | beta1=0.0
438 | call dgemv('n',k,k,alpha,vubar,k,beta,1,beta1,vub,1)
439 | probgambet=0.5*logdetvu(v,gam,ldr,k)-0.5/sig**2
440 | + *ddot(k,beta,1,vub,1)
441 | return
442 | end
443 |
444 |
445 |
446 |
447 |
--------------------------------------------------------------------------------
/Src/testreg.py:
--------------------------------------------------------------------------------
1 | from numpy import*
2 | from pymcmc.regtools import BayesRegression
3 |
4 | X=random.randn(100,3)
5 | beta = random.randn(3)
6 |
7 | y=dot(X,beta) + 0.3 * random.randn(100)
8 |
9 | breg=BayesRegression(y,X,prior=['normal_inverted_gamma', zeros(3), eye(3)*0.1, 10, 0.01])
10 | print breg.log_marginal_likelihood()
11 |
12 | breg=BayesRegression(y,X)
13 | print breg.log_marginal_likelihood()
14 |
15 | breg=BayesRegression(y,X, prior = ['g_prior', 0.0, 100.0])
16 | print breg.log_marginal_likelihood()
17 |
--------------------------------------------------------------------------------
/Src/timeseriesfunc.f:
--------------------------------------------------------------------------------
1 | c Function calculates the autocorrelation function
2 | c Copyright (C) 2010 Chris Strickland
3 |
4 | c This program is free software: you can redistribute it and/or modify
5 | c it under the terms of the GNU General Public License as published by
6 | c the Free Software Foundation, either version 3 of the License, or
7 | c (at your option) any later version.
8 |
9 | c This program is distributed in the hope that it will be useful,
10 | c but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | c MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | c GNU General Public License for more details.
13 |
14 | c You should have received a copy of the GNU General Public License
15 | c along with this program. If not, see .
16 |
17 |
18 | c acf is a Fortran77 subroutine for calculating the autocorrelation
19 | c function. The computation is done in the time domain.
20 |
21 | c ts = the timeseries of interest.
22 | c n = the number of observations.
23 | c corr = a vector of length nlag that stores the autocorrelation
24 | c function for the timeseries of interest on exit.
25 | c nlag = the number of lags to be used in the calculation of the
26 | c autocorrelation function.
27 |
28 | subroutine acf(ts,n,corr,nlag)
29 | integer n,nlag
30 | double precision ts(n), corr(nlag),mean, var
31 |
32 | cf2py intent(in) ts
33 | cf2py intent(in,out) corr
34 | cf2py intent(in) n
35 | cf2py intent(in) nlag
36 | cf2py depend(ts) n
37 | cf2py depend(corr) nlag
38 |
39 |
40 | mean=0.0
41 | do i=1,n
42 | mean=mean+ts(i)
43 | enddo
44 |
45 | mean=mean/dble(n)
46 |
47 |
48 | do i=1,nlag
49 | corr(i)=0.0
50 | do j=1,n-i
51 | corr(i)=corr(i)+(ts(j)-mean)*(ts(j+i-1)-mean)
52 | enddo
53 | enddo
54 | var=corr(1)/dble(n)
55 |
56 | do i=1,nlag
57 | corr(i)=corr(i)/(var*dble(n))
58 | enddo
59 | end
60 | c end of subroutine acf
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
--------------------------------------------------------------------------------
/Src/wishart.f:
--------------------------------------------------------------------------------
1 | c Function calculates the autocorrelation function
2 | c Copyright (C) 2010 Chris Strickland
3 |
4 | c This program is free software: you can redistribute it and/or modify
5 | c it under the terms of the GNU General Public License as published by
6 | c the Free Software Foundation, either version 3 of the License, or
7 | c (at your option) any later version.
8 |
9 | c This program is distributed in the hope that it will be useful,
10 | c but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | c MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | c GNU General Public License for more details.
13 |
14 | c You should have received a copy of the GNU General Public License
15 | c along with this program. If not, see .
16 |
17 |
18 | c subroutine constructs the upper Cholesky triangle used to simulate
19 | c simulate from the cholesky decomposition. See Strickland et al
20 | c 2009. This subroutine is called from regtools.py in PyMCMC.
21 |
22 | subroutine chol_wishart(rn,rc,u,c,r,p)
23 | implicit none
24 | integer p,i,j,k
25 | real*8 rc(p),rn(p*(p-1)/2),u(p,p),c(p,p),r(p,p)
26 | real*8 alpha, beta
27 |
28 | cf2py intent(in) rc
29 | cf2py intent(in) rn
30 | cf2py intent(in) u
31 | cf2py intent(in) c
32 | cf2py intent(inout) r
33 | cf2py intent(in) p
34 |
35 | k=1
36 | do i=1,p
37 | u(i,i)=sqrt(rc(i))
38 | do j=i+1,p
39 | u(i,j)=rn(k)
40 | k=k+1
41 | enddo
42 | do j=1,i-1
43 | u(i,j)=0.0
44 | enddo
45 | enddo
46 |
47 | alpha=1.0
48 | beta=0.0
49 |
50 | call dgemm('n','n',p,p,p,alpha,c,p,u,p,beta,r,p)
51 | end
52 |
53 | c routine used to sample from the wishart distribution
54 | subroutine chol_wishart2(rn,rc,u,uc,so,ifo,p)
55 | implicit none
56 | integer p,i,j,k,ifo
57 | real*8 rc(p),rn(p*(p-1)/2),u(p,p),so(p,p)
58 | real*8 uc(p,p)
59 | real*8 alpha
60 |
61 | cf2py intent(in) rc
62 | cf2py intent(in) rn
63 | cf2py intent(inout) u
64 | cf2py intent(in) uc
65 | cf2py intent(in) c
66 | cf2py intent(in) so
67 | cf2py intent(in) p
68 |
69 | k=1
70 | do i=1,p
71 | u(i,i)=sqrt(rc(i))
72 | uc(i,i)=u(i,i)
73 | do j=i+1,p
74 | u(i,j)=rn(k)
75 | k=k+1
76 | enddo
77 | do j=1,i-1
78 | u(i,j)=0.0
79 | enddo
80 | enddo
81 |
82 | do j=1,p
83 | call dcopy(p,u(:,j),1,uc(:,j),1)
84 | enddo
85 |
86 | call dpotrf('u',p,so,p,ifo)
87 | alpha=1.0
88 | call dtrsm('l','u','t','n',p,p,alpha,so,p,u,p)
89 | call dtrsm('l','u','n','n',p,p,alpha,so,p,u,p)
90 | call dtrmm('l','u','t','n',p,p,alpha,uc,p,u,p)
91 | end
92 |
93 |
94 |
95 |
96 |
97 | c subroutine constructs second shape parameter for
98 | c wishart distribution from residuals
99 | subroutine calc_sobar(su,so,res,p,n)
100 | implicit none
101 | integer n,p,t,i
102 | real*8 su(p,p),so(p,p),res(n,p)
103 | real*8 alpha
104 |
105 | cf2py intent(in) su
106 | cf2py intent(inout) so
107 | cf2py intent(in) res
108 | cf2py intent(in) p
109 | cf2py intent(in) n
110 |
111 | do i=1,p
112 | call dcopy(p,su(:,i),1,so(:,i),1)
113 | enddo
114 |
115 | alpha=1.0
116 | do t=1,n
117 | call dger(p,p,alpha,res(t,:),1,res(t,:),1,so,p)
118 | enddo
119 | end
120 |
121 |
122 |
123 |
124 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 | ## Figures out which function we expose
2 | ## eg
3 | ## from stream_function_wrapper import zm_msf
4 |
--------------------------------------------------------------------------------
/data/count.txt:
--------------------------------------------------------------------------------
1 | Shootno Week
2 | 1 1
3 | 2 2
4 | 1 3
5 | 3 4
6 | 7 5
7 | 7 6
8 | 9 7
9 | 8 8
10 | 12 9
11 | 2 1
12 | 2 2
13 | 2 3
14 | 5 4
15 | 5 5
16 | 9 6
17 | 8 7
18 | 10 8
19 | 11 9
20 | 1 1
21 | 2 2
22 | 3 3
23 | 3 4
24 | 2 5
25 | 8 6
26 | 11 7
27 | 11 8
28 | 13 9
29 | 2 1
30 | 1 2
31 | 2 3
32 | 5 4
33 | 3 5
34 | 10 6
35 | 5 7
36 | 6 8
37 | 9 9
38 | 2 2
39 | 2 4
40 | 14 6
41 | 9 8
42 | 6 10
43 | 15 12
44 | 25 14
45 | 22 16
46 | 19 18
47 | 1 2
48 | 2 4
49 | 11 6
50 | 11 8
51 | 15 10
52 | 17 12
53 | 27 14
54 | 28 16
55 | 29 18
56 | 3 2
57 | 3 4
58 | 9 6
59 | 12 8
60 | 14 10
61 | 17 12
62 | 21 14
63 | 22 16
64 | 22 18
65 | 2 2
66 | 2 4
67 | 6 6
68 | 17 8
69 | 16 10
70 | 16 12
71 | 22 14
72 | 23 16
73 | 24 18
74 | 3 1
75 | 3 1
76 | 3 1
77 | 2 1
78 | 2 2
79 | 3 2
80 | 2 2
81 | 1 2
82 | 12 4
83 | 10 4
84 | 5 4
85 | 16 4
86 | 16 6
87 | 14 6
88 | 15 6
89 | 11 6
90 | 18 8
91 | 16 8
92 | 11 8
93 | 40 12
94 | 15 12
95 | 31 12
96 | 37 12
97 | 24 12
98 | 29 12
99 | 30 12
100 | 32 12
101 | 3 1
102 | 1 1
103 | 3 1
104 | 1 1
105 | 4 2
106 | 2 2
107 | 1 2
108 | 2 2
109 | 8 4
110 | 9 4
111 | 11 4
112 | 6 4
113 | 15 6
114 | 14 6
115 | 9 6
116 | 14 6
117 | 23 8
118 | 10 8
119 | 11 8
120 | 44 12
121 | 27 12
122 | 32 12
123 | 24 12
124 | 23 12
125 | 36 12
126 | 30 12
127 | 24 12
128 | 2 1
129 | 3 1
130 | 2 1
131 | 2 1
132 | 6 2
133 | 3 2
134 | 2 2
135 | 2 2
136 | 12 4
137 | 4 4
138 | 5 4
139 | 12 4
140 | 11 6
141 | 12 6
142 | 14 6
143 | 10 6
144 | 13 8
145 | 10 8
146 | 11 8
147 | 28 12
148 | 27 12
149 | 29 12
150 | 29 12
151 | 28 12
152 | 28 12
153 | 26 12
154 | 2 1
155 | 1 1
156 | 2 1
157 | 2 1
158 | 2 2
159 | 3 2
160 | 1 2
161 | 1 2
162 | 5 4
163 | 6 4
164 | 11 4
165 | 9 4
166 | 7 6
167 | 10 6
168 | 26 6
169 | 10 6
170 | 3 8
171 | 14 8
172 | 15 8
173 | 34 12
174 | 26 12
175 | 35 12
176 | 48 12
177 | 27 12
178 | 30 12
179 | 1 1
180 | 2 1
181 | 1 2
182 | 3 2
183 | 3 3
184 | 2 3
185 | 6 4
186 | 6 4
187 | 3 5
188 | 4 6
189 | 8 7
190 | 13 8
191 | 1 1
192 | 4 1
193 | 1 2
194 | 2 2
195 | 3 3
196 | 4 3
197 | 5 4
198 | 4 4
199 | 3 5
200 | 3 6
201 | 6 7
202 | 15 8
203 | 1 1
204 | 2 1
205 | 1 2
206 | 4 2
207 | 1 3
208 | 4 3
209 | 6 4
210 | 9 4
211 | 3 5
212 | 7 6
213 | 5 7
214 | 14 8
215 | 1 1
216 | 2 1
217 | 1 2
218 | 2 2
219 | 1 3
220 | 4 3
221 | 4 4
222 | 12 4
223 | 2 5
224 | 8 6
225 | 8 7
226 | 6 8
227 |
--------------------------------------------------------------------------------
/data/yld2.txt:
--------------------------------------------------------------------------------
1 | 2.22 1.82 26.72 14.68 132 390 205 213 1.72 5.1 5.92 2.83 3.09 58 5 52 2 82 126 44 30 156 14.3 11.8 0 209 0.46 3 2 1.5 3.6 9.7 9.7 6 12 21 0 1 3 6 0 0 0 0 0 0 0 0 0 18 42 84 50 1 3 6 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 4.91 0 1.13 18.95 8 1.01 5.75 1.08 0.02 9 5.34 5.55 0.6 19.62 27.41 0.11 0.02 48.08 51.41 0.92 5 0.87 5.49 11.05 0.08 0.07 48.72 50.19 0.68 1 0.4 6.6 5 0.04 0.03429 33.4 65.8 0.8 1 0.2 7.3 5 0.08 0.024363 42.3 57.5 0.2 2 0.2 7.5 5 0.09 0.02 43.2 56.1 0.7 1 60 64 15 14 132 104 12 15 130 100 60 16 177 10 1 4 72 46 46 82 47 50 15.79 0 0 0 32.3 25.73 9.31 4.63 4.53 76.5
2 | 1.57 3.84 18 4.69 55 260 210 199 1.25 1.93 3.26 1.16 2.1 85 5 30 1 63 107 44 32 139 0 0 2 217 0.32 2 1 2 4.6 4.6 10.5 5 5 19 2 2 2 7 0 2 2 0 0 0 0 0 0 12 24 54 27 1 1 6 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 7.22 0 1.01 9.48 8 0.87 19.05 3.51 0.9 12 5.43 20.4 0.4 18.47 37.82 0.1 0.37 43.13 56.91 0.47 2 0.79 7.58 2.52 0.17 0.06 43.67 55.69 0.13 8 0.6 8.2 5 0.46 0.05195 57.2 42.5 0.3 8 0.4 7.8 5 1.66 0.032022 59.2 40.2 0.6 8 0.2 7.9 5 0.61 0.015 57.5 41.7 0.8 0 93 94 27 1 290 34 18 27 78 33 84 53 170 0 0 0 10 5 3 10 5 3 35.24 22.54 0 0 14.12 14.16 25.66 8.98 9.01 71.92
3 | 3.54 3.12 17.28 5.54 82 567 498 440 2.48 9.05 8.67 3.69 4.98 59 5 30 1 89 132 43 27 159 10.8 12 0 264 0.45 2 3 0.67 0 3.8 3.8 0 9 9 0 0 3 3 0 0 0 1 1 6 0 0 0 12 30 30 21 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 7.85 0 1.62 13.69 8 0.89 30.65 3.5 0.71 61 8.76 28.97 0.9 6.55 23.25 0.13 0.26 40.75 58.41 1.28 2 1.18 8.15 6.31 0.14 0.1 41.25 57.19 1.12 2 1.3 8.1 5 0.38 0.11377 39 59 2 2 0.9 8.1 5 0.64 0.05117 40 56.1 3.9 2 0.8 7.7 5 2.48 0.013 51.9 42.4 5.7 0 118 121 25 1 259 95 13 8 116 92 108 33 232 0 0 0 6 0 0 6 0 0 36.51 30 0 0 72.44 50.6 62.77 12.84 8.93 207.58
4 | 1.91 3.94 10.66 2.71 70 398 240 274 2.13 3.98 4.67 1.88 2.79 60 6 35 2 66 121 55 47 168 0 11.4 2 115 0.24 1 0 1 3.6 3.6 6.9 2 2 9 1 1 1 3 1 4 6 0 0 0 0 0 0 6 6 30 30 1 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 7.64 0 1.37 12.64 1 0.96 23.54 4.37 0.38 19 5.39 24.02 0.5 7.24 19.09 0.12 0.21 45.61 54.31 0.38 5 1.03 7.96 5.36 0.13 0.08 46.19 53.09 0.02 5 0.7 8.1 5 0.21 0.06078 54.9 44.6 0.5 5 0.3 8.4 5 0.38 0.028193 77.3 22.3 0.4 5 0.2 8.6 5 0.82 0.011 76.7 23.1 0.2 1 31 62 17 1 128 71 41 29 141 39 86 46 171 11 6 2 30 18 23 41 24 25 0 18.43 0 0 32 25.61 9.53 4.77 14.07 85.97
5 | 2.44 3.76 14.8 3.94 155 465 392 348 2.64 4.96 6.32 2.84 3.48 79 4 45 6 64 118 54 41 159 12.6 14 2 169 0.24 0 1 0 3 3 11.1 3 5 13 1 1 2 6 0 2 2 1 1 2 0 0 0 6 12 36 70 1 1 4 1 2 3 0 0 0 0 0 0 0 0 0 0 0 5 6.9 0 1.5 27.36 1 1.43 22.53 3.02 0.02 6 7.45 21.22 0.9 30.16 50.3 0.12 0.35 33.82 63.2 3.27 5 1.1 7.3 18.64 0.16 0.09 34.18 62 3.53 5 1.1 7.7 5 0.14 0.09611 26 70.4 3.6 5 0.3 7.5 5 0.11 0.028193 34.5 64.1 1.4 5 0.1 7.4 5 0.06 0.024 40.1 58 1.9 1 62 59 5 2 262 122 4 41 167 125 58 44 227 48 48 48 40 46 48 88 94 96 35 13.31 0 0 25 21.81 34.07 8.67 8.72 98.27
6 | 0 2.42 27.9 11.53 76 456 192 0 1.45 2.11 2.11 0 0 57 6 28 11 85 106 21 38 144 5.9 6.6 1 122 0.11 3 0 3 1.6 1.6 6.5 5 5 12 1 1 1 5 0 3 4 0 0 0 0 0 0 18 18 42 11 1 1 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.02 0 0.76 10.53 1 1.22 4.69 1.38 0.16 11 3.4 5.55 0.3 15.26 27.41 0.09 0.01 35.11 62 3.18 5 0.64 5.58 3.47 0.07 0.05 35.49 60.8 3.42 5 0.5 6.8 5 0.08 0.04312 34.9 57 8.1 5 0.2 5.8 5 0.03 0.024363 37.2 54.2 8.5 5 0.2 6.3 5 0.04 0.01 16.4 43.5 40.1 0 11 0 0 0 98 35 5 14 54 45 5 14 65 4 0 0 4 0 0 8 0 0 30 22 0 0 11.05 12.17 9.89 9.56 4.81 47.48
7 | 1.13 2.92 22.7 7.77 90 312 176 180 0.87 1.27 2.61 1.02 1.56 76 3 25 7 81 120 39 29 149 3.3 4 0 107 0.3 2 0 3 1 1 1.6 2 2 2 2 2 2 2 0 3 6 0 0 0 0 0 0 12 12 18 20 2 2 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 4.6 0.03 0.64 8.43 2 0.84 4.38 0.87 0.02 5 5.04 4.07 0.3 12.74 19.09 0.08 0.03 42.64 57.31 0.56 1 0.56 5.2 1.57 0.07 0.05 43.16 56.09 0.24 1 0.4 4.4 5 0.03 0.03429 30.6 68.2 1.2 1 0.3 7.8 5 0.12 0.028193 54 44.9 1.1 1 0.1 8 5 0.11 0.022 73.9 25.9 0.2 0 68 42 3 0 228 34 26 21 81 60 65 23 149 5 2 4 8 9 8 13 11 12 35.24 32.83 0 0 16.9 15.9 4.78 9.18 8.7 55.47
8 | 3.88 3 12.52 4.17 99 629 409 424 1.41 4.04 7.91 2.91 5 86 5 35 1 90 118 28 41 159 27.6 0 0 121 0.25 3 1 3 4 4 5 6 9 17 2 2 2 4 0 2 5 0 0 0 0 0 0 12 24 36 35 2 2 3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 2 5.54 0 1.74 111.49 4 2.03 21.23 3.54 0.05 5 5.99 21.63 1.3 29.24 66.95 0.13 0.04 55.42 44.51 0.47 2 1.26 6.06 94.51 0.08 0.1 56.19 43.29 0.13 2 1.3 6 78 0.07 0.11377 61.6 38.4 0 2 1 6.4 63 0.07 0.054999 78 21.7 0.3 2 0.8 7.2 56 0.1 0.022 73.2 26.4 0.4 2 131 95 15 3 534 53 24 34 111 90 104 46 239 13 15 13 3 4 7 16 19 20 21.56 30.72 0 0 12.36 12.18 25.06 25.08 17.25 91.93
9 | 4.3 3.1 16.02 5.17 212 933 714 490 3.15 5.39 11 5.13 5.87 50 6 55 1 90 148 58 28 176 32 10 0 901 0.92 4 1 4 0 4 4 3 7 11 0 0 1 4 0 0 0 0 0 0 0 0 1 24 48 84 32 0 1 4 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 6.9 0 1.01 12.64 2 0.83 15.11 4.58 0.38 6 3.3 17.92 0.2 6.78 25.33 0.1 0.14 57 42.81 0.56 2 0.79 7.3 5.36 0.11 0.06 57.8 41.59 0.24 2 0.6 8.1 5 0.19 0.05195 74.7 25.9 1.4 2 0.4 8.4 5 0.25 0.032022 81.8 18.2 0 11 0.1 8.5 5 0.31 0.017 81.9 17.3 0.8 1 96 105 14 0 105 133 21 13 167 124 111 27 263 89 0 0 0 2 1 89 2 1 27.13 24.12 0 0 61.56 44.28 26.2 13 17.74 162.79
10 | 2.39 3.24 14.94 4.61 167 812 403 412 1.62 5.45 6.1 2.7 3.41 59 7 40 2 74 133 59 29 162 39.1 0 7 90 0.44 1 1 1 3.4 3.4 3.4 4 4 4 1 1 4 9 0 0 0 0 0 0 0 0 0 18 36 66 13 1 4 9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 7.43 0 1.01 10.53 2 1.46 24.37 3.1 0.38 5 7.86 23.61 0.5 7.24 17.01 0.1 0.21 45.01 55.01 0.47 2 0.79 7.77 3.47 0.13 0.06 45.59 53.79 0.13 6 0.5 8 5 0.23 0.04312 61.4 38.3 0.3 6 0.4 8.2 5 0.41 0.032022 68.4 31.3 0.3 8 0.2 8.1 5 0.83 0.016 74.1 25.6 0.3 1 95 102 1 0 215 91 15 35 140 84 115 36 235 7 0 3 1 0 0 8 0 3 15.45 47.7 0 0 24.94 21.72 47.26 44.58 22.24 160.75
11 | 2.75 3.22 20.16 6.26 103 363 315 331 0.61 3.83 6.27 2.54 3.73 84 5 40 1 57 112 55 15 127 5.6 12.3 3 296 0.16 2 1 2 0 0 6.5 0 0 10 0 0 0 4 2 5 6 0 0 0 0 0 0 0 0 18 57 0 0 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 2 5.22 0 1.62 39.98 2 1.64 13.59 3.29 0.05 5 4.13 15.04 0.7 31.3 58.63 0.13 0.01 55.42 40.01 4.54 2 1.18 5.78 30.02 0.07 0.1 56.19 38.79 5.06 5 0.5 6.9 5 0.07 0.04312 64.5 34.3 1.2 5 0.4 7.1 5 0.07 0.032022 64.5 33.9 1.6 5 0.3 7.7 5 0.12 0.02 66.9 31.7 1.4 1 64 53 21 11 409 95 34 31 159 105 67 40 212 0 0 0 0 0 0 0 0 0 23.5 19.46 0 0 12.37 13.15 18.65 4.59 4.44 53.2
12 | 1.72 4.24 11.8 2.78 135 476 397 286 1.46 5.45 4.39 1.94 2.46 57 9 40 10 72 109 37 41 150 7.2 8 0 228 0.26 3 0 3 0 1.2 8.5 1 1 1 0 0 2 7 0 0 0 2 3 3 0 0 0 0 12 42 10 0 2 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 7.74 0 1.62 8.43 3 1.21 24.35 4.88 0.42 17 4.99 25.43 0.7 6.09 19.09 0.13 0.3 61.16 38.31 0.83 2 1.18 8.06 1.57 0.15 0.1 62.04 37.09 0.57 2 0.6 8.3 5 0.31 0.05195 79.8 19.4 0.8 2 0.5 8.4 5 0.57 0.035852 76.9 22.7 0.4 2 0.3 8 5 2.08 0.015 85.3 14.4 0.3 1 56 78 15 1 216 101 12 25 138 78 75 39 192 11 4 4 42 95 11 53 99 15 22.06 17.76 22.06 0 120.96 80.85 20.21 8.09 21.12 251.22
13 | 2.83 3.9 11.2 2.87 88 545 417 424 1.05 6.91 7.16 2.96 4.2 62 7 45 10 57 128 71 41 169 37.1 8 0 102 0.29 2 1 2 15.4 31.6 35.3 4 8 8 1 1 3 5 0 0 0 0 0 0 0 0 1 18 42 84 15 1 3 5 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 7.43 0 1.5 8.43 3 1 31.43 5.33 0.33 8 5.9 31.44 0.4 6.32 19.09 0.12 0.25 52.94 46.21 1.19 2 1.1 7.77 1.57 0.14 0.09 53.66 44.99 1.01 2 1 8.1 5 0.25 0.08728 62.8 36.1 1.1 2 0.7 8.2 5 0.28 0.043511 53.1 46.2 0.7 8 0.2 8.4 5 0.29 0.01 31.3 68 0.7 1 115 119 2 0 401 137 18 14 169 134 135 16 284 2 0 0 2 1 0 4 1 0 14.77 56.79 0 0 60.9 43.53 47.68 13.03 4.54 169.69
14 | 0 3.08 19.46 6.32 61 481 340 0 1.47 1.37 1.37 0 0 63 4 28 3 85 129 44 30 159 0 0 1 153 0.14 1 1 1 3 6.3 11.5 6 15 24 1 1 4 7 0 0 0 0 0 0 0 0 0 18 36 54 8 1 4 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 7.53 0 0.76 13.69 6 1.17 26.94 5.48 0.75 31 4.91 28.97 0.7 6.32 35.74 0.09 0.3 50.07 50.11 0.29 2 0.64 7.87 6.31 0.15 0.05 50.73 48.89 0.09 6 0.4 8.1 9 0.26 0.03429 62.7 36.5 0.8 6 0.3 8 7 0.39 0.028193 75.1 24.2 0.7 11 0.2 7.7 11 2.09 0.017 76.5 22.9 0.6 1 24 27 0 0 164 48 11 43 102 45 38 43 126 4 0 0 3 0 0 7 0 0 35.24 32.77 0 0 63.11 44.76 20.08 20.86 18.46 167.26
15 | 0 3.56 12.4 3.48 88 641 379 0 0.64 1.84 1.84 0 0 63 7 31 1 58 112 54 28 140 0 0 1 121 0.13 2 1 2 7.8 10.4 10.4 5 8 8 3 2 3 3 0 0 0 0 0 0 0 0 0 12 18 18 3 2 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 7.01 0 1.01 55.75 3 1.35 15.91 2.41 0.29 32 6.61 15.7 0.9 17.55 46.14 0.1 0.37 45.01 51.81 3.36 5 0.79 7.39 44.25 0.17 0.06 45.59 50.59 3.64 5 0.6 8.2 8 0.32 0.05195 57.9 42 0.1 2 0.4 8.3 15 0.71 0.032022 53.4 46.5 0.1 2 0.4 8.1 22 1.14 0.011 65.2 33.6 1.2 1 32 35 0 0 237 58 0 40 97 55 34 40 129 50 0 0 0 0 0 50 0 0 16.95 16 0 0 65.43 47.26 23.95 4.87 28.82 170.33
16 | 2.28 2.48 20.88 8.42 132 707 330 350 2.55 6.24 6.1 2.83 3.27 57 4 37 10 84 127 43 35 162 10.8 12 2 269 0.29 3 0 3 0 0 0 1 1 10 0 0 0 2 1 4 7 0 0 0 0 0 0 12 12 24 22 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 4.6 0.08 1.25 16.84 5 0.45 4.6 3.46 0.45 15 1.33 7.94 0.5 15.03 69.03 0.11 0.09 51.95 47.41 1.01 5 0.95 5.2 9.16 0.09 0.08 52.65 46.19 0.79 5 0.3 6 5 0.11 0.02546 60.9 38.1 1 5 0.2 7.1 5 0.28 0.024363 65 34.3 0.6 5 0.3 4.4 5 0.56 0.02 70.4 29.3 0.3 1 45 42 3 3 155 88 22 30 139 90 60 30 181 16 4 0 4 3 0 20 7 0 27.13 0 0 0 52.93 39.36 14.12 4.7 13.7 124.8
17 | 1.26 3.82 9.44 2.47 57 221 179 182 0.33 1.31 2.67 0.89 1.78 94 4 40 5 77 106 29 34 140 5 11 1 310 0.19 4 1 4 0 4.8 8.6 1 7 10 0 0 2 4 0 0 0 1 1 3 0 0 0 12 36 60 29 0 2 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.54 0 0.88 8.43 5 0.43 6.81 3.81 0.36 10 1.79 9.92 0.3 7.47 37.82 0.1 0.04 45.11 53.01 2.09 5 0.72 6.06 1.57 0.08 0.06 45.69 51.79 2.11 5 0.6 5.7 5 0.07 0.05195 65.2 25.4 9.4 5 0.3 6.1 5 0.11 0.028193 72.8 26.4 0.8 5 0.4 4.9 5 0.39 0.015 73.8 25.9 0.3 0 58 41 9 2 313 71 2 38 111 88 34 44 166 0 0 0 0 0 0 0 0 0 34.94 24.5 0 0 9.72 10.82 13.38 4.47 4.18 42.57
18 | 1.96 3.52 18.56 5.27 148 787 338 284 1.82 4.29 5.34 2.56 2.78 57 7 60 12 71 141 70 29 170 5 11 0 269 0.84 1 2 0.5 1.8 6.9 14.9 2 6 11 0 1 2 5 0 0 0 0 0 0 0 1 1 18 44 86 71 1 1 2 0 1 1 0 0 2 0 1 1 0 0 0 0 0 5 4.7 0.01 0.76 8.43 7 0.78 4.58 0.83 0.02 5 5.51 4.15 0.7 15.95 31.58 0.09 0.03 55.71 43.11 1.55 5 0.64 5.3 1.57 0.07 0.05 56.49 41.89 1.45 1 0.3 6.2 5 0.03 0.02546 56.6 41.3 2.2 1 0.2 5.4 5 0.06 0.024363 59.9 36.5 3.6 1 0.2 7.2 5 0.04 0.013 63.8 33.5 2.7 1 98 104 10 0 118 145 16 18 179 140 110 27 277 160 48 8 1 0 3 161 48 11 32.83 14.06 0 0 13.67 13.8 44.07 9.61 9.52 90.66
19 | 2.15 2.62 17.08 6.52 110 338 256 286 1.01 4.61 6.42 3.41 3.02 38 5 45 1 77 141 64 39 180 19.2 6 0 235 0.43 2 1 2 0 1.4 2.4 2 10 20 0 1 3 7 1 1 1 0 0 0 0 1 1 12 32 68 61 0 0 3 0 0 1 1 1 1 0 0 0 0 0 1 0 1 7 7.74 0 1.62 14.74 6 1 31.24 4.13 0.5 11 7.57 30.04 0.6 6.09 10.76 0.13 0.23 72.06 27.32 0.83 7 1.18 8.06 7.26 0.13 0.1 73.14 26.08 0.57 2 1.1 8.1 5 0.25 0.09611 79 20.8 0.2 2 0.5 8.3 5 0.68 0.035852 82 17.5 0.5 2 0.5 7.6 5 2.39 0.014 77.6 21.8 0.6 1 79 114 30 2 183 196 34 31 260 160 118 58 336 21 4 5 11 2 0 32 6 5 18.63 30.4 0 0 38.29 29.54 13.04 8.54 8.66 98.06
20 | 4.41 2.28 25.92 11.37 160 664 367 422 2.83 10.07 13.31 7.14 6.17 40 8 40 1 89 159 70 34 193 8 6.6 0 264 0.33 4 1 4 4.9 4.9 4.9 3 3 17 0 1 1 5 0 3 3 0 0 0 0 0 1 18 18 48 61 0 1 4 0 1 2 1 0 0 0 0 0 0 0 0 1 1 2 4.81 0 1.01 10 5 0.84 4.78 0.99 0.02 5 4.81 4.56 0.2 15 37 0.1 0.05 31.9 67 1.1 2 0.79 5.39 8 0.07 0.06 36.3 56.3 7.5 2 0.3 6.2 5 0.03 0.02546 35.4 50.9 13.7 8 0.3 7.6 6 0.13 0.028193 39.7 49.1 11.2 8 0.2 6.7 7 0.06 0.02 46.3 42.3 11.4 1 93 89 15 30 284 275 26 43 343 279 99 28 406 3 1 1 20 10 14 23 11 15 15.12 34.33 0 0 22.22 19.74 36.49 8.07 8.21 94.73
21 | 3.37 2.26 23.52 10.41 96 314 245 248 1.4 4.05 7.2 2.68 4.52 62 6 45 2 93 135 42 49 184 9 10 0 183 0.5 0 1 0 2.4 2.4 3.9 3 3 5 1 1 1 3 0 2 5 0 0 0 1 1 1 12 12 24 91 1 1 3 0 0 0 0 0 0 0 0 0 1 1 1 0 0 5 5.85 0 1.13 9.48 7 1.87 11.96 2.02 0.02 5 5.93 12.23 0.6 17.78 29.49 0.11 0.02 64.03 30.92 5.35 5 0.87 6.35 2.52 0.08 0.07 64.97 29.69 6.05 1 0.4 6.4 5 0.05 0.03429 67.9 26 6.1 1 0.3 6.7 5 0.04 0.028193 73.2 23.4 3.4 1 0.2 6.9 5 0.04 0.026 80.6 17.8 1.5 2 76 69 21 2 151 143 70 32 245 150 117 52 319 0 17 9 4 10 4 4 27 13 24.43 13.81 0 24.95 21.83 18.91 27.06 17.88 8.57 94.25
22 | 1.58 2.96 15.14 5.11 74 334 269 289 1.14 3.32 4.42 2.1 2.32 79 5 46 8 79 119 40 33 152 5.9 6.6 0 282 0.35 2 1 2 0 0 3 0 0 3 0 0 0 2 1 4 6 0 0 0 0 0 0 12 12 24 40 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 5 4.7 0 0.88 9.48 5 1.08 5.61 1.28 0.02 5 4.37 5.72 0.7 21.45 37.82 0.1 0.06 54.72 45.41 0.29 5 0.72 5.3 2.52 0.07 0.06 55.48 44.19 0.09 5 0.4 5.3 5 0.03 0.03429 55.6 44.3 0.1 5 0.3 8 5 0.14 0.028193 64.9 34.8 0.2 5 0.3 6.8 5 0.04 0.01 69.4 30.5 0.1 1 70 38 3 1 200 47 16 44 107 80 51 45 175 5 7 4 4 1 1 9 8 5 15.68 20.09 0 0 13.45 13.47 32.37 13.74 9.08 82.1
23 | 0 3.2 18.5 5.78 76 485 227 0 1.02 1.48 1.48 0 0 55 7 36 13 85 126 41 41 167 3 6.6 0 145 0.19 1 0 1 6.4 6.4 10.7 2 2 9 0 1 1 4 0 0 0 0 0 0 0 0 0 6 6 24 10 1 1 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.85 0 0.76 20 6 2.07 7.97 1.63 0.05 5 4.88 8.85 0.7 19.39 29.49 0.09 0.04 55.71 43.01 1.55 5 0.64 6.35 12 0.08 0.05 56.49 41.79 1.45 5 0.5 5.6 5 0.04 0.04312 61.2 37.7 1.1 5 0.1 7 5 0.04 0.020534 43.9 56 0.1 5 0.1 8 5 0.15 0.01 45.9 54 0.1 0 23 2 0 0 120 28 12 89 128 49 14 89 151 3 0 0 8 0 0 11 0 0 25 14 0 0 22.94 19.78 14.3 9.5 9.4 75.93
24 | 2.41 2 19.76 9.88 92 371 315 277 2.24 5.54 7.39 3.72 3.67 41 6 45 2 74 110 36 60 170 12.6 14 1 118 0.45 1 1 1 3.8 7.7 13.8 1 1 1 1 1 2 5 1 3 3 0 0 0 0 0 0 6 12 54 40 1 1 3 0 0 0 0 1 2 0 0 0 0 0 0 0 0 5 5.02 0 0.76 11.59 6 0.62 4.96 1.52 0.12 5 3.27 5.39 0.6 13.89 35.74 0.09 0.02 50.46 48.51 1.37 5 0.64 5.58 4.41 0.08 0.05 51.14 47.29 1.23 5 0.3 7.1 5 0.07 0.02546 60 38.8 1.2 5 0.2 8.1 5 0.18 0.024363 58.8 40.5 0.7 5 0.3 8.6 5 0.34 0.012 67 28.8 4.2 2 73 88 28 0 160 190 35 23 248 175 95 51 320 142 2 0 36 0 0 178 2 0 25.99 15.45 0 0 29.27 24.19 30.83 17.88 13.16 115.33
25 | 1.71 3.36 20.98 6.24 126 843 309 322 1.18 2.82 4.11 1.74 2.36 89 6 30 5 67 103 36 42 145 4 3.3 0 120 0.38 2 3 0.67 1.1 4.7 4.7 5 13 22 0 1 2 7 0 0 0 0 0 0 1 3 3 12 30 60 20 1 2 7 0 0 0 0 0 0 1 3 3 0 0 0 0 0 6 6.38 0 0.88 27 5 0.87 16.35 6.4 0.6 9 2.55 21.55 0.3 10 58 0.1 0.13 66.81 32.31 1.1 6 0.72 6.82 12 0.1 0.06 67.79 31.09 0.9 5 0.3 8.2 5 0.27 0.02546 64.2 33.1 2.7 8 0.2 7.6 5 1.16 0.024363 63.8 32.9 3.2 8 0.2 7.9 5 0.52 0.015 63.8 30.5 5.5 0 118 105 15 1 308 33 22 35 89 45 111 50 206 2 0 0 0 0 0 2 0 0 33.9 18.43 0 0 22.32 19.92 55.99 21.79 17.33 137.34
26 | 2.17 4.31 11.4 2.65 103 268 235 183 0.67 4.43 5.12 2.22 2.91 42 4 30 2 77 141 64 55 196 13.5 15 0 102 0.21 3 1 3 1.6 7.7 21 6 12 24 1 1 3 7 0 0 0 0 0 0 0 0 0 18 42 78 50 1 3 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.2 0 0.6 16 4 1.4 5.3 1.3 0.05 8 4.1 8.1 0.2 10 24 0.12 0.03 77.39 30.15 2.46 5 0.7 6.9 10 0.03 0.06 71.14 28.48 0.38 1 0.4 7.1 8 0.04 0.059 79.12 20.55 0.33 1 0.4 7.1 6 0.05 0.06 77.59 22.04 0.37 1 0.2 7.4 5 0.06 0.06 67.38 32.38 0.24 0 39 26 12 31 184 45 90 101 236 57 105 82 244 7 3 4 52 57 36 59 61 40 29.13 16.54 0 0 9.24 7.04 13.29 8.75 8.65 46.98
27 | 2.25 4.4 8.47 1.92 77 245 232 208 1.21 3.61 4.54 1.53 3.01 94 5 29 1 66 113 47 33 146 0 0 1 194 0.84 2 0 2 4.2 4.2 8.6 5 9 9 3 2 3 7 0 1 2 0 0 0 0 0 0 12 30 54 30 1 3 6 0 0 0 1 1 1 0 0 0 0 0 0 0 0 5 7.1 0 0.8 10 2 0.7 15.8 4 1.11 19 4 21.7 0.5 10 27 0.12 0.14 80.43 19.51 0.6 6 0.9 7.8 5 0.19 0.13 70.2 29 0.8 6 0.7 8.1 5 0.33 0.11 77.3 22.5 0.2 6 0.4 7.9 5 1.53 0.07 74.3 25.3 0.3 5 0.2 8 5 1.32 0.05 73.6 26 0.4 2 98 88 37 15 116 97 24 71 192 106 76 93 274 10 4 2 60 22 2 70 22 4 21.39 30.13 0 4.44 19.23 32.15 38.53 17.85 31.23 138.99
28 | 2.39 3.15 12.43 3.94 72 295 276 247 1.48 5.44 6.53 3.08 3.44 36 7 40 3 97 140 43 56 196 14.4 16 0 244 0.15 0 5 0 3 5.2 6.8 3 6 9 0 1 3 4 0 0 0 0 0 4 0 0 0 18 42 48 22 1 3 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 7.5 0 1.1 14 7 0.7 36.9 3.8 0.35 7 9.7 41.8 0.7 4 16 0.19 0.16 70.4 28.7 0.9 3 1.1 7.8 7 0.18 0.08 65.72 33.38 0.9 9 0.9 8 5 0.3 0.069 73.6 25.8 0.5 4 0.9 7.8 5 2.4 0.041 71.1 27.1 1.8 8 0.5 7.7 5 3.11 0.03 70.8 27.8 1.4 1 38 72 19 45 249 108 19 87 214 74 72 61 207 0 0 0 24 15 7 24 25 7 38.65 21.56 0 0 35.49 19.9 17.28 8.84 8.97 90.49
29 | 2.91 3.73 11.67 3.13 110 310 295 268 2.01 3.59 5.95 2.18 3.77 111 5 33 1 73 100 27 40 140 0 0 0 218 0.25 3 1 3 4 7 7 5 12 12 1 1 4 4 0 0 0 0 0 5 1 1 1 12 30 30 32 1 3 3 0 1 1 0 0 0 1 1 1 0 0 0 0 0 5 7.8 0 1.2 11 1 1 21.8 3.3 0.18 16 6.6 26.3 0.5 5 13 0.15 0.13 73.6 25.8 0.7 8 0.9 7.9 7 0.14 0.13 65.6 33.3 1.2 8 0.4 8.2 5 0.21 0.079 81.2 18.7 0.1 5 0.4 8.5 5 0.39 0.06 82.2 17.5 0.3 10 0.4 8.6 5 0.66 0.05 84.2 15.7 0.1 0 75 69 21 3 299 86 78 62 226 92 126 79 297 0 1 0 2 14 1 2 15 1 19 20.27 0 0 35.11 24.34 34.5 21.99 13.31 129.25
30 | 5.13 2.49 22.67 9.1 156 537 380 376 2.21 7.53 12.65 6.03 6.62 54 4 40 14 98 158 60 40 198 9.7 8.9 0 176 0.24 3 0 3 1.9 1.9 4.9 3 3 6 0 0 0 1 0 2 6 0 0 0 2 3 3 6 6 12 72 0 0 1 0 0 0 0 0 0 0 0 0 1 1 1 0 0 5 5.3 0 0.9 13 5 1.2 7.7 4.3 0.15 11 1.8 13.4 0.4 28 104 0.18 0.06 62.3 33.9 3.8 5 0.9 6 7 0.08 0.16 63.4 33.1 3.5 1 0.6 6.7 6 0.08 0.096 80.6 17.9 1.5 1 0.4 7.6 5 0.21 0.078 77.3 19.9 2.8 1 0.2 7.4 6 0.14 0.059 70.1 24 5.9 1 64 62 51 25 262 113 68 110 291 115 79 136 330 3 3 0 101 67 10 104 70 10 30.79 8.33 0 6.8 38.49 39.16 67.26 21.31 8.44 174.67
31 | 3.32 2.99 17.95 6.01 87 346 372 395 1.2 7.21 8.53 3.99 4.54 42 7 48 10 83 148 65 50 198 27.6 0 0 106 0.31 3 0 3 8.2 14.4 18.2 6 12 18 3 2 3 4 0 1 4 0 0 1 0 0 0 12 30 36 36 2 3 3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 2 5.4 0 1.3 77 4 1.5 17.4 4 0.05 7 4.4 22.9 1.3 23 51 0.17 0.09 76.6 23.2 0.2 2 1.2 5.5 87 0.08 0.12 83.1 16.8 0.1 2 0.8 6.2 49 0.05 0.092 85.6 14.3 0.1 2 0.7 7.4 45 0.12 0.07 83.6 16.2 0.2 5 0.5 7.7 44 0.13 0.049 72.4 26.9 0.7 1 58 40 8 29 186 56 135 102 293 74 168 81 322 30 27 16 18 8 18 48 35 34 34.57 22.54 0 0 60.2 35.6 22.03 8.93 9.52 136.27
32 | 3.65 3.48 13.24 3.8 54 413 407 428 1.79 8.96 9.85 4.97 4.88 34 8 28 10 86 150 64 53 203 6.3 7 1 127 0.34 3 0 3 2.2 4.7 7.2 6 13 20 0 1 4 6 0 0 0 0 0 0 1 1 2 12 30 72 25 1 2 3 0 2 3 0 0 0 1 1 1 0 0 0 0 1 4 7.5 0 0.7 14 4 0.8 18.4 3.3 0.27 11 3.4 32.5 0.6 3 11 0.12 0.13 61.3 38.6 0.2 4 0.6 7.7 5 0.15 0.08 66.5 33.2 0.4 4 0.5 8.6 5 0.2 0.05 68.4 30.6 1 8 0.5 8.2 5 0.28 0.051 79 19.7 1.3 8 0.3 8.1 5 0.81 0.02 79.3 20.1 0.7 2 91 74 17 103 267 36 61 188 285 53 118 102 273 0 0 0 0 0 0 0 0 0 19.82 22.14 0 0 61 30.54 41.66 33.64 13.26 180.1
33 | 1.84 3.02 32.67 10.8 96 319 216 222 1.38 2.47 4.09 1.5 2.59 113 10 55 1 60 107 47 36 143 8.1 9 0 197 0.21 2 0 2 3.4 7.4 14.5 4 11 18 1 1 2 4 0 0 0 0 0 0 0 0 1 18 48 90 30 1 2 4 0 0 0 0 0 0 0 0 0 0 0 0 0 1 5 7.6 0 0.9 12 2 0.8 16.3 4.4 0.7 20 5.6 22.8 0.9 8 17 0.14 0.14 66.4 32.7 0.9 5 0.7 7.7 7 0.12 0.12 75.3 23.4 1.3 2 0.4 7.9 5 0.18 0.07 69.4 29.4 1.2 8 0.6 8.1 5 0.3 0.03 81.1 18.3 0.7 10 0.3 8 5 1.24 0.03 83.6 16.3 0.1 0 74 61 32 108 345 90 103 172 365 103 132 96 331 0 0 0 19 39 0 19 39 0 29.33 9.28 0 0 15.21 9.08 21.99 13.27 13.17 72.72
34 | 4.51 2.57 20.83 8.12 138 616 431 412 2.3 6.63 9.43 3.56 5.87 93 8 40 1 81 124 43 36 160 8 10 3 136 0.41 3 1 3 2.5 2.5 6.5 5 0 12 1 1 1 4 1 4 6 0 0 0 0 0 0 6 6 24 58 1 1 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 5 5.3 0 1.9 42 2 2.1 17.1 4.1 0.06 24 5.3 8.5 1 48 70 0.3 0.15 51.84 43.78 4.38 5 1.6 5.6 33 0.09 0.28 58 39 2.9 5 1.2 6.2 13 0.06 0.161 62.2 33.3 4.4 5 0.4 6.8 5 0.06 0.079 53.8 26.5 19.7 5 0.4 6.5 6 0.05 0.1 66.5 21.7 11.8 1 46 26 16 13 110 99 58 99 256 119 69 101 288 17 0 0 1 0 0 18 0 0 26.06 12.38 0 0 116.85 60.54 29.87 12.57 16.79 236.62
35 | 2.86 2.66 22.24 8.35 112 258 487 282 0.8 4.74 6.44 2.63 3.81 33 10 40 10 87 151 64 59 210 0 0 0 139 0.16 3 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 2 5 10 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 5.5 0 0.7 10 4 1.5 5.8 1.1 0.05 8 5 29.7 0.4 20 44 0.14 0.08 49.3 50.4 0.4 1 0.6 5.7 8 0.04 0.07 54.6 45 0.4 1 0.3 6.6 9 0.05 0.06 56.3 43.4 0.3 1 0.3 7.1 9 0.06 0.049 64.5 35.2 0.3 1 0.2 6.8 15 0.04 0.05 63.9 35.8 0.3 1 41 24 5 48 260 33 92 166 291 50 111 123 284 16 1 2 0 7 2 16 8 5 29.73 22.46 0 9.28 60.07 22.56 43.83 32.66 17.98 177.11
36 | 2.79 3.29 18.42 5.6 59 459 496 450 3.13 8.64 9.63 5.52 4.1 38 8 30 3 94 137 43 48 185 0 0 2 141 0.69 3 0 3 3 5.1 11.5 4 13 24 2 2 4 8 0 0 0 0 0 1 0 0 0 12 36 60 9 2 4 8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 6.8 0 0.5 21 4 1.2 23.5 7 0.75 22 4.2 23.4 0.8 5 23 0.08 0.14 69.9 30.1 0 4 0.8 7.3 18 0.16 0.07 47.1 52.9 0 4 0.5 7.9 15 0.22 0.049 78 21.4 0.5 4 0.5 8 14 0.41 0.029 85.6 13.6 0.8 4 0.3 8 16 0.84 0.02 84.1 15.7 0.2 0 127 119 66 1 184 106 0 27 133 114 53 92 259 0 0 0 0 0 0 0 0 0 35.24 32.77 0 0 71.43 44.11 65.8 41.66 48.29 271.29
37 | 3.12 3.92 14.6 3.72 67 436 378 374 1 8.43 9.52 4.8 4.72 33 8 34 3 63 127 64 63 190 0 0 3 82 0.49 1 0 1 4.2 13.1 15.6 5 14 16 0 1 3 4 0 0 4 0 0 0 1 2 2 12 30 36 27 1 3 4 0 0 0 0 0 0 1 2 2 0 0 0 0 0 2 5.9 0 0.8 80 2 1.6 10.1 1.8 0.05 5 5.6 13.5 1.4 23 25 0.14 0.06 50.9 49 0.1 1 0.6 6.2 68 0.05 0.08 56.1 43.7 0.1 5 0.3 7 38 0.1 0.061 56.4 43.5 0.1 1 0.4 7.6 25 0.11 0.05 59.6 40 0.4 1 0.3 7.8 23 0.1 0.031 50.4 48.5 1.1 1 44 29 17 9 88 34 93 92 219 48 105 101 254 16 2 0 1 3 0 17 5 0 16.95 16 0 0 38.84 15.02 28.23 13.99 9.52 105.6
38 | 2.72 3.13 23.83 7.61 117 311 352 375 1.52 7.28 6.82 2.98 3.84 31 4 35 10 88 152 64 50 202 13 14.4 1 153 0.1 3 0 3 0 0 5.5 3 3 3 0 0 0 2 1 4 7 0 0 0 0 0 0 12 12 24 52 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 4.9 0.06 0.6 8 4 0.7 2.8 2.6 0.7 27 1.1 6.9 0.2 22 72 0.08 0.06 51.8 47 1.2 5 0.6 5.3 6 0.04 0.04 51.9 46.8 1.3 5 0.6 6.4 5 0.11 0.029 57.1 42 0.9 5 0.2 7.9 5 0.46 0.04 65.5 32.6 1.9 2 0.2 8.1 5 0.66 0.02 63.4 31.6 5 1 32 26 12 41 324 54 94 115 263 59 109 85 253 25 20 0 6 5 6 30 25 6 52.06 4.55 0 0 20.1 4.96 14.37 9.42 9.24 58.1
39 | 2.12 2.34 33.06 14.11 115 411 291 290 0.8 4.2 4.91 2.08 2.82 31 4 45 5 76 139 63 67 206 5 11 1 97 0.13 3 0 3 0.6 3 9.2 4 8 13 1 1 2 5 0 0 0 0 1 2 0 0 0 18 30 60 30 1 2 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 4.8 0.06 0.8 10 5 0.4 2.7 1.7 0.2 9 1.6 5.1 0.8 9 33 0.05 0.03 56.29 30.23 13.48 5 0.6 5.3 7 0.05 0.05 55.34 29.18 15.48 5 0.3 6.1 5 0.08 0.02 68.09 24.15 7.76 5 0.3 6.8 5 0.09 0.03 71.91 22.22 5.87 2 0.3 6.7 5 0.12 0.029 74.41 22.32 4.27 0 27 19 4 38 202 26 75 181 282 34 91 147 271 1 0 0 0 0 0 1 0 0 45.17 14.06 0 0 19.14 14.26 27.64 9.1 8.62 78.76
40 | 2.41 3.34 13.26 3.97 130 546 374 327 2.94 6.46 7.16 3.91 3.25 32 6 50 12 87 151 64 59 210 6 9 0 243 0.17 2 2 1 1.4 5.7 7 0 0 0 0 1 2 4 0 0 0 0 0 3 0 1 1 18 42 54 72 1 2 4 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 6.5 0 0.2 5 4 1 4.1 3.9 0.81 10 1.1 9.8 0.2 3 50 0.03 0.06 53.6 43.9 2.5 1 0.4 5.5 5 0.03 0.03 52.1 45.1 2.8 1 0.7 5 10 0.05 0.088 59.6 37.1 3.3 1 0.2 7 5 0.08 0.029 67.9 28.4 3.7 1 0.2 7.6 5 0.11 0.02 80.7 19 0.4 1 36 18 11 31 211 56 116 152 324 74 123 132 329 1 1 0 18 23 0 19 24 0 34.51 12.66 0 8.33 9.99 10.09 77.44 13.96 9.06 120.53
41 | 4.68 4.19 11.21 2.67 55 291 345 368 0.87 8.02 10.68 4.61 6.07 46 15 25 1 87 144 57 50 194 0 0 1 502 0.12 0 6 0 3.5 7.9 20.1 3 10 20 0 1 3 5 0 0 0 0 0 0 0 0 1 18 42 84 24 1 3 5 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 7.7 0 0.7 17 3 1 33.9 5.2 0.87 5 6.5 41 0.8 3 23 0.09 0.14 74.9 24.5 0.5 2 0.7 7.8 9 0.16 0.07 74.1 25.5 0.4 2 0.5 7.9 7 0.23 0.051 74.8 24.7 0.4 2 0.6 8 8 0.58 0.049 82.2 17.6 0.3 5 0.4 7.9 10 1.8 0.04 80.4 19.4 0.1 0 86 116 41 3 724 110 53 128 291 80 128 166 374 0 0 0 0 0 0 0 0 0 43.45 20.27 0 0 31.55 15.64 29.12 21.24 13.78 111.34
42 | 3.19 3.25 9.61 2.96 63 392 362 372 2.71 8.87 9.64 4.86 4.78 38 15 25 10 95 152 57 42 194 0 0 0 494 0.15 0 8 0 2.6 8.2 16.8 0 15 20 0 1 3 5 0 0 0 0 0 0 0 0 1 18 40 84 12 1 3 5 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 6.7 0 0.9 19 4 1.2 24.3 5.4 0.51 9 4.5 31.4 0.7 7 26 0.14 0.12 71.3 28.3 0.4 2 1 7.4 21 0.16 0.11 74.5 25.1 0.4 2 0.6 7.9 14 0.25 0.069 77.2 22.5 0.3 2 0.5 8.1 16 0.34 0.05 87.1 12.8 0.1 2 0.3 8.2 15 0.76 0.029 83.4 16.4 0.2 0 107 127 14 18 712 122 53 105 280 101 166 101 369 2 2 0 56 18 3 58 20 3 46.26 10.3 0 0 54.68 33.61 24.67 36.83 25.62 175.43
43 | 2.14 4.17 12.7 3.04 119 301 293 256 2.78 4.14 8.61 4.68 3.93 70 8 35 1 65 116 51 44 160 5.6 5.1 0 161 0.22 3 0 3 4 4 14 4 4 11 2 2 2 5 0 0 0 0 3 5 0 0 0 12 12 36 92 0 0 1 2 2 4 0 0 1 0 0 0 0 0 0 0 0 2 7.8 0 1 7 2 0.6 20.5 3.7 0.59 20 5.5 25.4 0.4 4 9 0.17 0.15 70.7 25.5 3.8 2 0.9 7.8 5 0.14 0.14 72.5 26.2 1.4 6 0.9 8 5 0.24 0.12 75 23.2 1.8 2 0.6 8.2 5 0.61 0.08 72.2 25.5 2.3 5 0.3 8.2 5 1.02 0.04 78.2 20.8 1.1 1 49 78 17 20 226 73 32 126 231 44 93 123 260 0 0 5 185 325 172 162 325 177 12.25 26.49 0 0 45.24 22.54 21.23 8.65 8.99 106.65
44 | 3.55 2.08 26.53 12.75 138 805 494 489 2.34 8.23 9.82 5.12 4.7 33 5 42 4 102 168 66 52 220 9.7 8.9 0 246 0.2 3 0 3 2.5 2.5 9.9 5 5 5 2 2 2 5 0 3 5 0 0 0 0 0 0 12 12 30 62 1 1 3 1 1 2 0 0 0 0 0 0 0 0 0 0 0 2 5.4 0 0.9 10 5 0.6 2.9 0.8 0.05 5 3.6 4.4 0.2 15 37 0.1 0.04 31.9 67 1.1 2 0.7 5.6 8 0.04 0.03 36.3 56.3 7.5 2 0.3 6.2 5 0.03 0.039 35.4 50.9 13.7 8 0.3 7.6 6 0.13 0.02 39.7 49.1 11.2 8 0.2 6.7 7 0.06 0.02 46.3 42.3 11.4 1 81 79 32 76 411 94 64 120 278 96 110 76 282 4 8 0 60 54 0 64 62 0 6.55 15.12 0 0 31.68 15.72 12.88 8.38 8.35 77.01
45 | 5.69 2.78 20.92 7.51 86 484 432 366 3.05 12.08 13.38 6.04 7.34 49 5 60 1 103 149 46 58 207 10.4 9.5 2 232 0.13 2 4 0.5 0 0 0 0 0 4 0 0 0 3 2 5 6 0 0 0 0 0 1 0 0 24 72 0 0 3 0 0 0 0 0 0 0 0 0 0 1 3 1 3 2 5 0 0.5 20 4 2.1 5.6 1.1 0.21 7 5.1 9 0.4 38 61 0.14 0.09 44.4 43.3 12.3 5 0.4 5.7 9 0.04 0.1 39.9 42.2 17.9 5 0.4 6.1 9 0.05 0.08 52.9 36.1 11 1 0.2 6.6 11 0.04 0.069 57 32.8 10.2 1 0.3 7 6 0.06 0.059 57.6 36.5 5.9 1 53 56 30 36 400 94 54 198 346 91 80 192 363 23 1 0 4 3 7 27 5 8 48.85 4.55 0 9.28 82.64 26.75 78.25 39.96 21.82 249.42
46 | 4.93 3.34 6.81 2.04 73 496 283 327 1.75 9 11.23 4.8 6.43 55 4 37 2 83 134 51 53 187 27.5 9.9 0 287 0.08 4 1 4 0 1.9 6.7 0 3 9 0 0 0 0 0 0 0 1 3 6 0 1 3 12 12 36 65 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 1 6.4 0 0.7 15 3 0.8 7.9 2.2 0.07 22 3.6 11 1 11 12 0.1 0.88 65.01 34.48 0.51 1 1 6.3 11 0.1 0.04 60.4 39.3 0.3 1 0.5 6.5 6 0.09 0.04 66.1 33.7 0.2 1 0.3 7.1 5 0.12 0.031 71.8 28.1 0.1 1 0.2 6.9 5 0.09 0.041 77.3 22.6 0.1 1 48 52 7 21 571 89 64 278 431 86 109 264 458 117 129 39 0 0 0 118 129 39 37.41 4.66 0 16.43 29.95 25.78 32.26 18.18 17.39 123.58
47 | 2.41 4.74 7.47 1.58 107 389 333 319 1.58 3.89 5.55 2.37 3.18 70 7.5 45 5 61 117 56 57 174 35.2 11 0 109 0.67 3 1 3 5.8 9.7 15.8 5 8 17 1 2 3 6 0 1 3 0 0 0 0 0 0 12 18 48 40 2 2 4 0 0 0 0 1 2 0 0 0 0 0 0 0 0 5 5.1 0 0.5 10 2 0.2 1.8 0.4 0.05 11 4.5 2.5 0.7 6 11 0.12 0.03 27.3 70.9 1.9 5 0.4 6.1 6 0.04 0.06 29 69.4 1.5 5 0.4 5.7 5 0.05 0.07 34.5 63.6 1.9 1 0.3 5 5 0.06 0.081 50.7 47 2.3 1 0.2 5.2 5 0.05 0.06 52.5 43.1 4.4 0 39 81 17 3 59 71 91 69 231 30 155 83 268 6 32 1 16 23 3 22 55 5 21.56 0 0 0 19.97 12.57 23.76 18.2 21.86 96.37
48 | 2.98 2.41 30.27 12.54 76 542 500 488 1.88 5.99 8.23 4 4.22 31 4 26 4 92 143 51 69 212 5.4 6.1 0 122 0.2 1 2 0.5 2.1 2.1 9.1 4 12 24 1 1 2 7 0 0 0 0 0 0 1 3 3 12 30 60 21 1 3 7 0 0 0 0 0 0 1 3 3 0 0 0 2 2 6 6.1 0 0.6 27 5 1 11.5 6.8 0.68 9 1.7 20 0.3 10 58 0.09 0.06 56.07 42.76 0.17 6 0.6 6.3 12 0.06 0.07 67.36 32.59 0.05 6 0.5 7.6 5 0.18 0.02 71.62 28.2 0.18 2 0.3 8.1 5 0.37 0.03 73.39 26.09 0.52 2 0.2 8.1 5 0.46 0.02 77.15 22.73 0.12 1 45 33 36 55 224 43 75 154 272 54 72 135 262 5 1 0 1 2 0 6 3 1 27.83 18.81 0 0 19.42 8.88 26.83 13.42 9 77.55
49 | 2.72 3.62 9.21 2.54 85 332 200 200 0.87 3.89 6.3 2.5 3.81 55 4 25 2 77 115 38 60 175 70.4 11.8 0 121 0.37 4 1 4 2 2 2 3 9 15 1 1 3 5 0 0 0 0 0 0 0 0 0 18 42 54 55 1 3 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.2 0 1.2 25 3 1.1 6.6 1.6 0.06 5 4.1 9.4 0.6 38 31 0.06 0.03 77.39 30.15 2.46 5 0.5 5.8 12 0.03 0.01 71.14 28.48 0.38 1 0.4 6.7 8 0.03 0.01 79.12 20.55 0.33 1 0.3 6.8 6 0.03 0.01 77.59 22.04 0.37 1 0.2 7.1 5 0.03 0.01 67.38 32.38 0.24 0 43 69 54 36 118 136 88 115 339 110 102 134 346 16 12 5 28 16 0 44 28 5 6.55 0 6.55 0 18.61 16.7 14.14 9.31 9.25 68.02
50 | 4.28 4.44 12.99 2.93 62 335 288 308 1.79 4.81 8.54 2.71 5.83 78 5 35 1 78 107 29 54 161 14.4 16 0 172 0.35 0 4 0 3.3 5.6 8.8 5 8 8 3 2 3 6 0 0 0 0 1 2 0 0 0 12 30 48 26 1 2 5 0 0 0 1 1 1 0 0 0 0 0 0 0 0 5 6.8 0 0.9 9 14 0.9 21.7 5 1.22 21 4.3 28.9 0.5 17 41 0.06 0.12 80.43 19.51 0.6 2 0.7 7.8 5 0.22 0.03 89.31 10.29 0.4 5 0.6 8.1 5 0.24 0.01 86.62 12.98 0.4 5 0.4 7.9 5 0.45 0.01 89.67 9.6 0.73 5 0.3 7.9 7 0.56 0.01 89.71 9.8 0.49 0 91 95 99 31 264 121 97 112 330 117 92 180 389 1 3 0 7 5 0 8 8 0 14.65 13.69 11.68 0 26.25 31.87 47 53.49 26.87 185.49
51 | 2.81 2.94 13.8 4.7 75 261 208 225 1.48 4.64 6.05 2.4 3.65 55 7 43 9 76 130 54 57 187 14.9 13.6 2 145 0.61 3 0 3 4.7 6.7 15.2 5 8 16 3 2 3 7 0 2 2 0 0 0 0 0 1 12 18 48 22 2 3 6 0 0 0 0 0 1 0 0 0 0 0 0 0 1 5 5.8 0 1.2 46 3 0.8 6.3 1.9 0.14 17 3.3 9.2 0.6 26 76 0.1 0.03 53.48 45.46 1.06 5 1.4 6.5 22 0.04 0.02 56.59 44.2 1.21 5 0.4 7 18 0.05 0.01 57.35 42.32 0.33 5 0.3 7.7 7 0.16 0.01 65.14 33.4 1.46 5 0.2 8 8 0.14 0.01 67.63 29.17 3.2 1 121 146 126 99 197 152 88 148 387 127 107 175 409 6 72 82 10 0 7 16 77 82 6.55 11.54 8.13 0 8.89 13.57 13.34 8.9 8.4 53.09
52 | 3.7 4.84 3.14 0.65 54 203 222 224 0.83 3.25 8.13 3.15 4.99 47 8 40 3 79 115 36 68 183 14.4 16 0 482 0.43 3 3 1 2.4 3.6 6.2 2 5 11 0 1 2 4 0 0 0 0 1 4 0 1 1 12 30 42 23 1 2 4 0 0 0 0 0 0 1 1 1 0 0 0 0 0 2 8.1 0 1.4 12 5 1 37 3.4 0.33 16 10.9 41.8 0.5 4 9 0.09 0.1 68.1 28.25 3.65 2 1.2 8 7 0.13 0.08 65.72 33.38 0.9 2 1 8 5 0.17 0.04 62.68 37.09 0.23 2 0.9 8.1 5 0.18 0.02 75.65 23.1 0.34 2 0.8 7.8 5 0.21 0.03 66.69 30.35 2.96 2 104 134 101 77 245 135 64 148 347 105 97 172 374 13 3 0 2 0 0 14 3 0 9.97 9.46 0 0 25.04 48.13 100.02 34.05 21.74 228.98
53 | 3.02 4.8 3.55 0.74 65 214 195 205 0.86 2.45 6.73 2.61 4.12 47 8 40 3 79 115 36 68 183 14.4 16 0 482 0.43 0 0 0 2.4 3.6 6.2 2 5 11 0 1 2 4 0 0 0 0 1 4 0 1 1 12 30 42 23 1 2 4 0 0 0 0 0 0 1 1 1 0 0 0 0 0 6 7.9 0 1.7 30 4 1.1 37.2 4.7 0.51 8 7.9 43.6 0.5 3 7 0.11 0.12 79.32 19.73 0.95 6 1.3 7.8 5 0.13 0.08 55.04 43.14 1.82 9 1.1 8.2 5 0.23 0.07 56.52 43.19 0.29 9 1 8 5 0.36 0.06 75.27 23.67 1.06 2 0.8 8.2 21 0.55 0.02 75.79 21.17 3.04 2 104 133 103 81 245 135 64 148 347 107 94 170 371 2 2 0 0 0 0 2 2 0 0 4.44 0 0 39.65 41.66 74.52 29.55 13.17 198.55
54 | 4.88 3.86 10.85 2.81 78 323 311 335 2.13 6.79 10.89 4.39 6.5 45 5 34 1 91 121 30 62 183 5.1 10.1 0 288 0.09 3 1 3 0 0 0 5 5 5 0 1 1 5 0 3 3 0 0 1 0 0 0 18 12 36 30 0 0 3 1 1 2 0 0 0 0 0 0 0 0 0 0 0 2 7.8 0 1.1 10 8 1.3 26.2 4.6 0.34 15 5.7 32.5 0.5 5 6 0.11 0.16 79.5 19.9 0.6 2 1 8 7 0.15 0.07 73.66 26.18 0.16 8 0.4 8.3 5 0.22 0.02 68.38 30.68 0.94 8 0.4 8.4 5 0.35 0.01 88.09 11.32 0.59 8 0.2 8.4 5 0.62 0.01 90.87 8.38 0.75 1 70 64 90 12 757 110 97 89 295 115 71 167 352 35 9 0 22 12 0 57 21 0 4.8 9.8 0 0 73.33 63.35 34.89 25.92 22.29 219.78
55 | 6.02 4.62 10.04 2.18 101 462 403 428 1.84 7.98 12.9 5 7.9 38 5 45 1 84 125 41 51 176 65.6 9.5 0 114 0.54 3 1 3 1.1 1.1 4 2 2 5 0 0 0 1 1 4 8 0 0 0 1 1 1 6 6 12 93 0 0 1 0 0 0 0 0 0 0 0 0 1 1 1 0 0 5 5.3 0 0.8 55 5 1.4 6.6 2.9 0.05 5 2.3 11 0.5 24 86 0.06 0.05 48.85 40.56 10.59 5 0.4 5.9 33 0.04 0.04 49.99 45.06 4.95 5 0.4 6.7 25 0.03 0.02 65.96 31.92 2.12 5 0.2 6.9 16 0.03 0.02 69.68 27.94 3.38 5 1.5 7.6 19 0.08 0.01 54.5 35 10.5 1 67 64 49 0 124 110 149 104 363 114 164 153 430 5 7 0 1 0 0 7 0 0 8.13 0 19.64 0 37.67 22.38 17.92 9.3 9.39 96.66
56 | 5.84 4.61 7.9 1.71 56 499 470 594 0.95 6.71 11.71 4.21 7.51 70 5 30 5 66 105 39 56 161 30 16.3 0 145 0.11 2 2 1 2 2 2 4 4 4 2 0 0 0 0 0 0 0 3 8 0 0 0 12 12 12 2 1 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 5 5.4 0 0.9 20 5 1 9.7 3.6 0.49 12 2.7 14.8 1.5 24 45 0.1 0.1 81.61 18 0.39 5 1 6 8 0.06 0.05 88.02 11.06 0.92 5 0.4 8 5 0.19 0.05 87.37 12.31 0.32 8 0.3 8.3 8 0.37 0.02 87.29 11.62 1.09 8 0.2 8.3 8 0.45 0.01 87.91 10.67 1.42 1 22 30 31 42 189 92 73 138 303 83 73 126 282 0 0 0 0 0 0 0 0 0 6.55 22.63 7.03 0 61.95 20.78 27.7 23.23 18.23 151.88
57 | 7.05 4.39 5.11 1.17 116 445 462 464 1.53 10.43 18.59 8.83 9.76 47 5 42 1 77 133 56 64 197 16.2 18 3 175 0.24 1 0 1 3.2 3.2 16.3 6 6 18 0 0 0 5 1 4 4 0 0 0 1 1 1 6 6 36 50 0 0 3 0 0 1 0 0 1 0 0 0 1 1 1 0 0 5 5.4 0 2.6 23 8 2.4 11 2.1 0.05 18 5.2 15.6 0.5 42 29 0.15 0.07 51.52 33.4 15.28 5 1 5.7 7 0.12 0.07 52.29 32.46 15.25 5 0.4 7.4 5 0.16 0.05 51.38 39.39 9.23 8 0.3 7.3 6 0.11 0.01 67.85 25.9 6.25 5 0.1 7.2 5 0.07 0.01 66.66 24.49 8.85 1 85 103 87 49 348 185 118 187 489 167 134 224 525 54 2 0 3 3 0 56 5 0 8.13 0 19.64 0 34.79 90.99 82.95 21.57 8.56 238.85
58 | 5.08 3.38 11.31 3.34 89 443 356 322 1.94 7.6 10.64 4.12 6.52 48 4 48 1 79 125 46 59 184 56.6 0 0 141 0.11 5 0 5 0 0 0 3 8 11 1 1 2 3 1 3 3 0 0 0 0 0 0 6 12 18 35 1 2 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 5.5 0 1 34 4 0.8 14.3 6.2 0.59 15 2.3 21.9 0.5 28 45 0.06 0.06 73.18 26.56 0.26 2 0.8 6.7 30 0.09 0.04 92.3 7.59 0.11 2 0.6 8.1 6 0.17 0.01 83.34 16.35 0.31 5 0.6 8 15 0.21 0.01 88.3 11.02 0.68 2 0.2 8 16 0.25 0.01 88.48 10.42 1.1 2 88 115 91 92 798 188 84 115 386 161 107 115 382 22 0 0 5 6 0 27 6 0 0 8.13 0 0 22.83 27.21 42.46 77.45 9.59 179.54
59 | 4.1 4.14 8.01 1.94 75 519 450 399 2.02 8.01 10.67 5.01 5.66 57 5 48 1 73 125 52 59 184 56.6 0 0 269 0.19 4 2 2 0 0 0 0 0 8 0 0 0 4 1 4 5 0 0 0 0 0 0 12 12 36 40 0 0 3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 2 6.7 0 1.7 67 4 1.8 31 5.7 0.17 10 5.4 38.7 0.8 27 24 0.13 0.13 74.16 25.08 0.76 2 1.3 6.8 45 0.1 0.09 67.15 32 0.85 2 0.7 6.6 38 0.07 0.05 64.95 33.16 1.89 5 0.5 7.6 35 0.09 0.03 61.42 37.8 0.78 5 0.2 7.9 44 0.1 0.02 62.28 35.22 2.5 0 87 117 89 65 468 179 108 166 452 148 135 190 473 4 3 0 12 3 0 16 6 0 0 6.55 9.28 0 56.11 54.64 76.75 49.16 23.13 259.78
60 | 3.41 4.67 8.33 1.78 68 362 254 252 1.6 4.25 7.15 2.64 4.51 58 8 32 10 78 110 32 63 173 6.7 7.4 4 148 0.55 4 1 4 1.2 1.8 3.5 6 12 25 0 1 2 5 0 0 0 0 0 0 1 2 3 12 36 72 24 1 2 5 0 0 0 0 0 0 1 2 2 0 0 0 1 2 4 7.8 0 0.7 15 3 1.1 18.9 3.9 0.55 10 4.8 24.5 0.5 13 34 0.04 0.07 54.48 44.88 0.64 4 0.5 7.9 5 0.12 0.02 71.98 27.58 0.44 6 0.4 8 5 0.18 0.01 68.8 30.58 1.62 2 0.2 8.3 5 0.34 0.01 75.65 23.05 1.3 8 0.1 8 5 1 0.01 72.9 26.62 0.48 1 91 99 97 67 164 78 72 105 255 69 75 135 278 102 20 0 0 0 0 102 20 0 12.25 28.32 22.38 0 10.54 19.26 17.11 8.81 9.09 64.82
61 | 0.82 2.57 18.03 7.01 90 124 110 127 0.52 1.22 1.86 0.68 1.18 51 5 50 1 85 125 40 59 184 5.4 6 0 110 0.21 1 0 1 0 0 0 2 11 20 2 2 4 6 0 0 0 0 0 2 0 0 0 12 36 60 32 2 3 5 0 0 0 0 1 1 0 0 0 0 0 0 0 0 5 7.3 0 0.9 18 3 1 16.8 6.9 0.89 8 2.4 25.6 2.5 14 18 0.06 0.07 79.81 19.67 0.72 5 0.6 8 5 0.15 0.05 78.36 21.46 0.18 5 0.4 8.1 5 0.25 0.02 86.57 12.15 1.34 5 0.2 8.4 5 0.58 0.01 84.17 13.61 2.12 8 0.1 8.3 5 0.9 0.001 85.48 13.37 1.25 0 25 61 84 95 120 117 68 150 335 81 45 138 264 0 0 0 0 0 0 0 0 0 6.55 11.68 8.33 0 6.53 4.65 9.28 9.17 8.89 38.52
62 | 3.23 3.53 9.5 2.7 48 258 232 198 2.18 5.07 7.08 2.71 4.37 57 6 45 16 80 128 48 57 185 9 10 3 113 0.26 5 1 5 0 0 8.3 0 0 8 0 0 0 3 1 4 6 0 0 0 0 0 0 0 0 18 63 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.6 0 1.6 13 4 0.8 8.1 3.4 0.12 11 2.4 12.5 0.5 26 46 0.12 0.04 51.84 43.78 4.38 5 1.1 6.7 7 0.07 0.05 62.55 33.55 3.9 5 0.3 7.5 5 0.07 0.03 58.91 39.94 2.25 5 0.2 8 5 0.16 0.01 59.53 37.47 3 5 0.2 8.2 5 0.2 0.01 62.96 34.3 2.74 1 34 77 37 43 131 149 104 131 384 107 144 124 375 8 5 0 2 2 0 10 6 0 17.46 6.55 0 0 10.28 37.93 13.49 9.17 8.94 79.81
63 | 5.08 3.79 2.83 0.75 83 513 408 368 1.84 8.06 12.02 5 7.02 47 8 15 10 71 122 51 61 183 5.8 1.8 1 187 0.1 3 0 3 0 3 5 2 7 7 0 0 2 3 0 0 0 1 2 6 0 0 0 12 24 30 6 0 2 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.7 0 1.3 8 1 1 5.1 0.8 0.05 20 6.4 7 0.5 15 15 0.1 0.04 60.15 38.54 0.91 5 0.8 6.4 5 0.11 0.05 62 37.35 0.65 5 0.3 7.4 5 0.09 0.01 61.2 38.22 0.58 1 0.2 7.1 6 0.06 0.02 77.5 32.65 0.85 1 0.2 7.1 9 0.05 0.01 64.96 32.54 2.5 1 36 62 21 10 345 139 83 102 324 112 124 114 349 24 7 0 5 2 0 30 9 0 6.55 4.44 0 0 14.06 105.6 81.99 28.41 13.94 244
64 | 5.72 4 7.8 1.95 43 444 408 357 1.71 7.71 13.63 6.31 7.31 47 6 30 15 79 125 46 55 180 0 0 1 137 0.29 6 2 3 0 3.7 3.7 3 3 3 0 1 2 2 0 0 0 0 0 0 0 0 0 18 0 0 8 1 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 7.5 0 0.7 38 7 1.5 28.5 7.8 0.95 15 3.6 38.8 0.5 3 10 0.03 0.11 80.01 19.91 0.08 6 0.5 7.8 25 0.13 0.02 72.02 26.54 0.48 4 0.4 8 21 0.15 0.02 73.7 26.07 0.23 4 0.4 8.1 18 0.23 0.01 81.91 17.5 0.59 4 0.3 8.3 15 0.38 0.01 82.95 16.42 0.63 0 162 171 126 60 552 170 76 81 327 161 121 147 429 0 0 0 0 0 0 0 0 0 0 26.99 0 4.44 20.66 43.45 52.86 28.75 12.73 158.45
65 | 3.44 2.9 10.68 3.68 57 386 306 296 1.49 5.03 7.6 2.96 4.64 49 7 26 5 77 115 38 59 174 0 0 3 85 0.3 2 0 2 1.6 10.2 18.5 5 11 11 0 1 3 6 0 0 0 0 0 0 1 2 4 12 30 60 28 1 3 6 0 0 0 0 0 0 1 2 4 0 0 0 0 0 5 6.2 0 0.9 52 2 1.6 10.6 1.8 0.05 9 5.9 14.1 0.9 18 29 0.05 0.03 60.77 38.74 0.49 5 0.5 6.5 41 0.05 0.03 57.81 42.13 0.06 5 0.5 6.9 15 0.04 0.03 71.02 28.89 0.09 5 0.3 8.1 14 0.1 0.02 62.48 36.62 0.9 5 0.3 8.5 21 0.13 0.01 69.7 29.12 1.18 1 38 60 11 58 128 147 24 227 398 125 73 179 377 55 17 0 11 5 0 66 23 0 10.47 12.38 4.44 0 9.08 32.46 28.31 9.65 9.8 89.3
66 | 2.72 4.42 5.15 1.16 106 265 258 230 1.21 4.42 6.29 2.62 3.67 45 7 45 10 85 118 33 65 183 12.6 14 2 196 0.33 5 1 5 0 0 0 3 3 3 0 0 0 2 1 4 7 0 0 0 0 0 0 12 12 24 53 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 4.8 0.1 1.2 10 1 0.2 2.9 2.2 0.28 19 1.3 5.7 0.5 12 40 0.06 0.03 56.06 39.44 4.6 2 0.6 5.6 5 0.05 0.02 58.07 39.48 2.45 5 0.4 7 5 0.18 0.01 66.56 31.81 1.63 5 0.2 7.6 6 0.54 0.01 70.53 27.51 1.96 5 0.1 7.4 5 0.61 0.01 71.3 26 2.7 0 43 65 53 54 130 172 81 100 353 150 94 99 342 117 107 0 2 56 0 119 163 0 0 0 4.44 0 10.75 13.62 8.93 8.99 8.8 51.09
67 | 1.91 2.51 19.66 7.82 54 249 240 232 0.85 3.41 3.57 1.04 2.52 84 3 40 5 78 105 27 49 154 6 13.1 0 269 0.18 4 1 4 0 1.1 7.7 1 6 13 0 0 1 3 10 0 0 1 1 2 0 0 0 12 42 78 29 0 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 5.1 0 1.2 12 5 0.8 3.9 3.4 0.31 8 1.2 8.5 0.5 16 85 0.05 0.03 56.29 30.23 13.48 5 0.9 6 5 0.05 0.05 55.34 29.18 15.48 5 0.4 6.1 5 0.09 0.02 68.09 24.15 7.76 5 0.6 5.2 5 0.14 0.02 71.91 22.22 5.87 5 0.3 4.4 5 0.22 0.02 74.41 22.32 4.27 0 100 124 60 93 568 162 59 114 335 138 123 80 341 0 4 0 0 1 0 0 5 0 8.33 6.8 0 0 6.05 6.55 8.47 8.43 8.27 37.77
68 | 5.17 4.54 8.09 1.78 78 383 399 363 2.47 7.02 11.95 5.06 6.89 44 6 50 1 85 124 39 68 192 9.9 11 0 256 0.2 2 2 1 0 0 5.6 0 0 0 0 0 0 4 2 5 6 0 0 0 0 0 0 0 0 24 73 0 0 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 5 4.9 0 1.5 12 2 0.8 3 0.7 0.05 27 4.6 4.6 0.5 28 52 0.1 0.04 64.1 34 1.9 5 0.9 5.5 5 0.07 0.03 71.95 25.6 2.45 5 0.2 7.2 5 0.08 0.02 74.28 23.9 1.82 5 0.2 7.4 5 0.07 0.01 72.18 25.62 2.2 5 0.1 7.7 5 0.09 0.01 72.36 24.64 3 1 70 67 73 27 353 170 107 166 443 172 101 211 485 1 0 0 16 8 0 17 9 0 0 0 16.11 0 18.49 71.17 66.37 23.66 23.89 203.58
69 | 4.99 4.71 7.5 1.59 44 587 490 401 2.89 9.02 11.84 5.4 6.44 30 12 30 4 92 135 43 49 184 0 0 0 517 0.12 0 12 0 1.9 7 12.9 0 12 24 0 1 2 3 0 0 0 0 0 0 0 1 3 18 42 84 13 1 2 3 0 0 0 0 0 0 0 0 0 0 1 1 0 2 2 6.6 0 1 43 7 1.7 24.2 5.8 1.2 21 4.2 33 1 9 13 0.09 0.18 77.87 22.87 0.36 2 0.7 7.6 16 0.19 0.05 66.66 32.99 0.35 2 0.6 8.2 12 0.25 0.04 82.06 17.71 0.23 2 0.4 8 12 0.28 0.02 91.06 8.83 0.11 2 0.3 8.2 16 0.78 0.01 88.84 10.82 0.34 3 98 136 89 45 786 203 79 90 372 164 126 134 425 0 0 0 0 0 0 0 0 0 0 12.11 9.28 0 82.96 33.76 54.37 65.54 83.56 320.2
70 | 5.54 4.63 6.97 1.51 40 658 511 466 3.03 10.28 12.99 5.81 7.18 29 12 30 4 92 135 43 49 184 0 0 0 516 0.14 0 12 0 1.9 7.6 15.3 0 11 29 0 1 2 4 0 0 0 0 0 0 0 1 3 18 42 78 13 1 2 4 0 0 0 0 0 0 0 0 0 0 1 1 0 2 2 6.8 0 1 41 9 2 29.6 6.4 0.89 19 4.6 38.9 0.5 7 14 0.08 0.14 77.66 22.16 0.18 2 0.8 7.6 24 0.18 0.05 66.66 32.99 0.35 2 0.5 8 15 0.24 0.03 69.54 30.14 0.32 2 0.5 7.9 18 0.28 0.02 84.35 15.38 0.27 5 0.3 8 18 0.29 0.01 92.52 7.25 0.23 3 108 128 90 33 786 198 82 89 368 178 119 146 443 1 0 0 3 0 0 4 0 0 0 9.28 15.68 0 62.63 41.84 57.36 58.01 39.89 259.72
71 | 3.57 4.93 3.9 0.79 83 318 247 235 1.78 5.23 8.31 3.55 4.76 57 3 44 2 79 111 32 68 179 11.2 10.2 0 143 0.29 3 1 3 3.4 5.2 5.2 6 9 9 0 1 2 2 0 0 0 0 1 6 1 1 1 12 30 30 93 0 1 1 1 1 1 0 0 0 1 1 1 0 0 0 0 0 5 5 0 1 12 4 0.5 2.4 0.5 0.05 5 4.8 3.5 0.5 22 37 0.03 0.03 58.18 37.14 4.68 5 0.6 5.3 5 0.05 0.03 55.12 40.54 4.24 5 0.3 6.8 5 0.06 0.01 66.36 28.61 5.03 10 0.2 7.7 5 0.09 0.01 65.35 26.22 8.43 10 0.1 7.8 5 0.11 0.01 58.07 39.28 2.65 1 49 44 51 46 168 148 184 120 451 153 176 125 454 0 0 14 7 6 3 7 7 17 12.38 0 17.95 0 18.27 47.16 36.21 13.38 8.74 123.77
72 | 3.54 3.53 11.94 3.38 68 302 237 241 1.13 6.05 7.31 2.71 4.61 51 7 35 1 82 120 38 67 187 4 8.8 0 172 0.41 3 0 3 1.7 1.7 4 6 10 22 0 1 2 7 0 1 1 0 0 0 1 2 2 12 24 54 63 1 1 4 0 0 2 0 1 1 1 1 1 0 0 0 0 0 2 7.8 0 1.3 14 4 0.9 29.9 3.9 0.44 23 7.7 35.2 0.5 4 7 0.08 0.14 67.52 31.28 1.2 2 0.9 7.9 7 0.2 0.05 56.82 42.57 0.61 2 0.8 8.1 5 0.29 0.02 62.93 36.64 0.43 8 0.5 8.1 5 1.02 0.01 74.14 24.14 1.72 8 0.4 8 5 1.26 0.01 80.53 17.82 1.65 1 88 116 49 100 215 117 73 149 339 89 140 98 326 0 0 0 138 20 0 138 20 0 4.44 20 0 0 31.95 38.29 46.19 8.63 8.71 133.78
73 | 4.01 4.61 8.77 1.9 48 307 314 276 1.51 5.79 10.18 4.66 5.52 57 5 50 1 75 123 48 71 194 13.9 12.7 1 119 0.48 0 1 0 2.9 2.9 12.9 3 3 8 0 0 0 4 0 3 4 0 0 0 1 1 1 18 18 42 38 0 0 4 0 0 0 0 0 0 0 0 0 1 1 1 0 0 5 5.2 0 1.5 18 3 1.4 6.6 1.2 0.05 5 5.5 9.3 1.5 49 29 0.12 0.04 59.68 33.36 6.96 5 1.2 5.7 7 0.04 0.06 58.31 28.07 13.62 5 0.6 8 5 0.09 0.04 60.06 29.5 10.44 5 0.3 7.8 5 0.09 0.03 60.28 30.3 9.42 5 0.2 7.7 7 0.05 0.02 72.46 23.82 3.72 1 36 62 41 38 74 109 102 220 431 82 123 223 428 14 9 0 1 1 0 14 10 0 4.44 6.55 4.44 0 28.55 30.25 23.21 9.23 9.02 100.26
74 | 3.34 4.25 7.85 1.85 84 276 240 199 2.12 5.32 7.22 2.7 4.52 46 4 43 1 85 121 36 68 189 4.5 9.9 0 242 0.1 3 0 3 0.7 1.9 5.7 0 3 9 0 1 2 4 0 0 0 0 2 5 0 0 0 18 24 36 73 0 0 2 1 2 2 0 0 0 0 0 0 0 0 0 0 0 5 5.4 0 1.2 16 2 1.1 6.5 1.1 0.05 25 5.9 8.8 0.5 17 16 0.09 0.05 65.01 34.48 0.51 5 0.8 5.7 6 0.05 0.04 71.68 28.06 0.26 1 0.3 6.4 7 0.03 0.01 69.14 30.66 0.2 1 0.3 7.5 7 0.11 0.02 75.99 23.76 0.25 1 0.2 7.5 7 0.09 0.01 52.57 45.43 2 2 26 24 53 33 250 83 96 135 313 84 67 155 306 2 3 0 45 25 0 47 28 0 15.79 9.28 17.26 0 20.12 32.79 22.71 13.39 8.85 97.87
75 | 4.22 5.09 1.06 0.21 57 334 342 246 2.73 7.41 9.37 3.72 5.66 48 11 40 3 80 115 35 61 176 12.6 14 0 111 0.31 3 0 3 0 2.6 2.6 0 0 0 0 0 1 4 2 4 4 0 0 1 0 0 0 0 6 36 40 0 0 3 0 0 0 0 1 1 0 0 0 0 0 0 0 0 2 4.7 0.08 1 15 1 0.5 3.5 1.4 0.05 22 2.5 5.5 0.5 30 30 0.05 0.03 46.79 51.96 1.25 5 0.7 5.2 7 0.05 0.04 70.06 29.69 0.25 5 0.4 5.3 5 0.03 0.01 57.53 41.24 1.23 5 0.2 5.5 5 0.05 0.01 61.36 38.81 0.83 1 0.2 5.8 6 0.07 0.01 68.98 30.41 0.61 1 64 61 64 46 205 128 72 142 341 131 69 159 359 56 0 0 13 7 0 68 8 0 17.95 6.8 19.64 0 19.63 46.11 22.32 13.32 8.94 110.33
76 | 3 3.27 9.94 3.04 52 220 197 196 0.83 4.7 6.34 2.37 3.97 57 5 30 1 83 123 40 55 178 32 10 1 120 0.87 2 2 1 1.8 1.8 1.8 2 2 2 2 2 2 2 0 0 0 0 3 8 0 0 0 12 12 12 32 2 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 7.8 0 0.8 5 15 0.9 23.1 5 0.58 5 4.6 29.6 0.5 4 18 0.06 0.15 65.48 34.05 0.47 2 0.6 7.9 5 0.11 0.04 64.56 34.57 0.87 2 0.5 8 5 0.16 0.03 63.78 36.02 0.2 2 0.3 8.2 5 0.5 0.01 80.07 19.03 0.9 8 0.3 8.4 5 0.5 0.01 81.48 17.85 0.67 3 91 115 106 104 105 127 73 138 338 103 82 140 324 1 0 0 97 27 0 98 27 0 4.44 32.52 0 0 14.9 21.39 29.59 17.66 18.14 101.68
77 | 4.72 4.37 11.03 2.52 59 351 316 368 1.16 5.36 9.46 3.36 6.1 71 5 33 5 85 122 37 50 172 4.2 3.8 1 130 0.46 2 2 1 0.8 0.8 0.8 1 5 7 1 1 2 4 0 0 0 0 0 0 1 3 6 12 30 60 17 0 1 3 0 0 0 0 0 0 1 2 3 0 0 0 1 4 2 6.1 0 1.1 13 4 0.9 10.5 5.3 0.31 5 2 17.1 0.5 8 27 0.09 0.04 56.07 42.76 0.17 5 0.7 7.8 5 0.14 0.05 67.36 32.59 0.05 6 0.5 7.8 5 0.14 0.04 71.62 28.2 0.18 6 0.2 8.3 5 0.2 0.01 73.39 26.09 0.52 5 0.2 8 5 0.32 0.01 77.15 22.73 0.12 0 127 115 89 56 274 125 81 125 331 136 107 157 401 0 0 0 0 0 0 0 0 0 0 31.95 0 0 6.13 18.44 44.66 20.97 13.11 103.31
78 |
--------------------------------------------------------------------------------
/dist/pymcmc-1.0.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rdenham/pymcmc/d9563fdf9e4adb747e5e1a1cde424975b8673b9d/dist/pymcmc-1.0.tar.gz
--------------------------------------------------------------------------------
/dist/pymcmc-1.0.win32-py2.6.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rdenham/pymcmc/d9563fdf9e4adb747e5e1a1cde424975b8673b9d/dist/pymcmc-1.0.win32-py2.6.exe
--------------------------------------------------------------------------------
/doc/.gitignore:
--------------------------------------------------------------------------------
1 | PyMCMC.aux
2 | PyMCMC.bbl
3 | PyMCMC.blg
4 | PyMCMC.log
5 | PyMCMC.out
6 | PyMCMC.pdf
7 |
8 |
--------------------------------------------------------------------------------
/doc/chris.bib:
--------------------------------------------------------------------------------
1 |
2 | @Article{ CarterKohn1994,
3 | title = "On Gibbs sampling for state space models",
4 | author = "C. Carter and R. Kohn",
5 | journal = "Biometrika",
6 | pages = "541--553",
7 | volume = "81",
8 | year = "1994"
9 | }
10 |
11 | @Article{ ChibGreenberg1996,
12 | author = "Siddhartha Chib and Edward Greenberg",
13 | title = "{M}arkov chain {M}onte {C}arlo simulation methods in
14 | econometrics",
15 | journal = "Econometric Theory",
16 | year = "1996",
17 | month = "",
18 | volume = "12",
19 | pages = "409--431",
20 | otherinfo = ""
21 | }
22 |
23 | @Article{ deJongShepard1995,
24 | author = "Pierre de Jong and Neil Shephard",
25 | title = "The simulation smoother for time series models",
26 | journal = "Biometrika",
27 | year = "1995",
28 | month = "",
29 | volume = "82",
30 | pages = "339--350",
31 | otherinfo = ""
32 | }
33 |
34 | @Book{ FruwirthSchnatter2004,
35 | title = "Efficient Bayesian parameter estimation for State
36 | Space Models Based on Reparameterisations, State
37 | Space and Unobserved Component Models: Theory and
38 | Applications",
39 | author = {Sylvia Fr{\"u}wirth-Schnatter},
40 | publisher = "Cambridge University Press",
41 | address = "Cambridge",
42 | year = "2004"
43 | }
44 |
45 | @Article{ F2PY,
46 | author = "Pearu Peterson",
47 | title = "\pkg{F2PY}: a tool for connecting \proglang{Fortran}
48 | and \proglang{Python} programs",
49 | journal = "International Journal of Computational Science and
50 | Engineering",
51 | year = "2009",
52 | volume = "4",
53 | pages = "296--605",
54 | otherinfo = ""
55 | }
56 |
57 | @TechReport{ GarthwaiteYanScisson2010,
58 | author = "P. H. Garthwaite and Yanan Fan and Scott A. Scisson",
59 | title = "Adaptive optimal scaling of Metropolis-Hastings
60 | algorithms using the Robbins-Monroe process",
61 | institution = "University of New South Wales",
62 | year = "2010"
63 | }
64 |
65 | @Article{ GelfandSmith1990,
66 | title = "Sampling-based approaches to calculating marginal
67 | densities",
68 | author = "Alan E. Gelfand and Adrian F. M. Smith",
69 | journal = "Journal of the American Statistical Association",
70 | year = "1990",
71 | volume = "85",
72 | pages = "398--409"
73 | }
74 |
75 | @Article{ GelfandSahuCarlin1995,
76 | title = "Efficient parametrisations for normal linear mixed
77 | models",
78 | author = "Alan E. Gelfand and Sujit K. Sahu and Bradley
79 | P. Carlin",
80 | journal = "Biometrika",
81 | pages = "479--488",
82 | volume = "65",
83 | number = "3",
84 | year = "1995"
85 | }
86 |
87 | @Book{ GelmanCarlinSternRubin2004,
88 | title = "Bayesian Data Analysis",
89 | author = "Andrew Gelman and John B. Carlin and Hal S. Stern
90 | and Donald B. Rubin",
91 | publisher = "Chapman and Hall/CRC",
92 | address = "Florida",
93 | year = "2004"
94 | }
95 |
96 | @Article{ GeorgeMcCulloch1993,
97 | author = "Edward I. George and Robert E. McCulloch",
98 | title = "Variable selection via Gibbs sampling",
99 | journal = "Journal of the American Statistical Association",
100 | year = "1993",
101 | month = "",
102 | volume = "88",
103 | pages = "881--89",
104 | otherinfo = ""
105 | }
106 |
107 | @Article{GeorgeMcCulloch1997,
108 | author = "Edward I. George and Robert E. McCulloch",
109 | title = "Approaches for {B}ayesian variable selection",
110 | journal = "Statistica Sinica",
111 | year = "1997",
112 | number = "7",
113 | pages = "339-373",
114 | }
115 |
116 |
117 |
118 | @Article{ Ipython,
119 | author = "John D. Hunter",
120 | title = "IPython: A System for Interactive Scientific
121 | Computing",
122 | journal = "Computing in Science and Engineering",
123 | year = "2007",
124 | month = "",
125 | number = "9",
126 | pages = "21--29",
127 | otherinfo = ""
128 | }
129 |
130 | @BOOK{JudgeHillGriffithsLutkepohlLee1988,
131 | author = {George G. Judge and R. Carter Hill and William
132 | E. Griffiths and Helmut a{\"u}tkepohl and
133 | Tsoung-Chao Lee},
134 | editor = {},
135 | title = {Introduction to theory and practice of
136 | {E}conometrics},
137 | publisher = {John Wiley \& Sons},
138 | address = {New York},
139 | year = {1988},
140 | otherinfo = {}
141 | }
142 |
143 |
144 | @Article{ KassRaftery1995,
145 | author = "Robert E. Kass and Adrian E. Raftery",
146 | title = "{B}ayes factors",
147 | journal = "Journal of the American Statistical Association",
148 | year = "1995",
149 | month = "",
150 | volume = "90",
151 | pages = "773--795",
152 | otherinfo = ""
153 | }
154 |
155 | @Article{ KimShephardChib1998,
156 | title = "Stochastic Volatility: Likelihood Inference and
157 | Comparison with ARCH Models",
158 | author = "Sangjoon Kim and Neil Shephard and Siddhartha Chib",
159 | journal = "Review of Economic Studies",
160 | pages = "361--393",
161 | volume = "65",
162 | number = "3",
163 | year = "1998"
164 | }
165 |
166 | @Book{ Koop2003,
167 | title = "Bayesian Econometrics",
168 | author = "Gary Koop",
169 | publisher = "John Wiley \& Sons",
170 | address = "West Sussex",
171 | year = "2003"
172 | }
173 |
174 | @Article{ LuiLiangWong2000,
175 | author = "F. Liang and Jun S. Lui and W. H. Wong",
176 | title = "The use of multiple-try method and local
177 | optimization in Metropolis sampling",
178 | journal = "Journal of the American Statistical Association",
179 | year = "2000",
180 | month = "",
181 | volume = "95",
182 | pages = "121--134",
183 | otherinfo = ""
184 | }
185 |
186 | @Article{ LuiKongWong1994,
187 | title = "Covariance structure of the Gibbs sampler with
188 | applications to the comparisons of estimators and
189 | augmentations schemes",
190 | author = "Jun S. Lui and W. H. Wong and A. Kong",
191 | journal = "Journal of the Royal Statistical Society B",
192 | pages = "157--169",
193 | volume = "57",
194 | number = "1",
195 | year = "1994"
196 | }
197 |
198 | @Book{ MarinRobert2007,
199 | author = "Jean-Michel Marin and Christian P. Robert",
200 | title = {{B}ayesian Core},
201 | publisher = "Springer--Verlag",
202 | year = "2007",
203 | address = "New York",
204 | otherinfo = "<++>"
205 | }
206 |
207 | @Article{ NumpyScipy,
208 | author = "Travis E. Oliphant",
209 | title = "\proglang{Python} for Scientific Computing",
210 | journal = "Computing in Science and Engineering",
211 | year = "2007",
212 | month = "",
213 | volume = "9",
214 | pages = "10--20",
215 | otherinfo = ""
216 | }
217 |
218 | @Article{ Matplotlib,
219 | author = "John D. Hunter",
220 | title = "\pkg{Matplotlib}: A 2D Graphics Environment",
221 | journal = "Computing in Science and Engineering",
222 | year = "2007",
223 | month = "",
224 | volume = "9",
225 | pages = "90--95",
226 | otherinfo = ""
227 | }
228 |
229 | @Article{ PittShepard1999,
230 | title = "Analytic convergence rates and parameterisation
231 | issues for the Gibbs sampler applied to state space
232 | models",
233 | author = "Michael Pitt and Neil Shephard",
234 | journal = "Journal of Time Series Analysis",
235 | pages = "63--85",
236 | volume = "20",
237 | year = "1999"
238 | }
239 |
240 | @TechReport{ Python,
241 | author = "Guido van Rossum",
242 | title = "\proglang{Python} Tutorial, Technical Report
243 | CS-R9526",
244 | institution = "Centrum voor Wiskunde en Informatica (CWI),
245 | Amsterdam",
246 | year = "1995",
247 | otherinfo = ""
248 | }
249 |
250 | @Manual{ R,
251 | title = "\proglang{R}: A Language and Environment for
252 | Statistical Computing",
253 | author = "{R Development Core Team}",
254 | organization = "R Foundation for Statistical Computing",
255 | address = "Vienna, Austria",
256 | year = 2010,
257 | note = "{ISBN} 3-900051-07-0",
258 | url = "http://www.R-project.org"
259 | }
260 |
261 | @Article{ Rnews:Plummer+Best+Cowles+Vines:2006,
262 | AUTHOR = "Martyn Plummer and Nicky Best and Kate Cowles and
263 | Karen Vines",
264 | TITLE = "\pkg{CODA}: Convergence Diagnosis and Output
265 | Analysis for {MCMC}",
266 | JOURNAL = "R News",
267 | YEAR = 2006,
268 | VOLUME = 6,
269 | NUMBER = 1,
270 | PAGES = "7--11",
271 | MONTH = "March",
272 | URL = "http://CRAN.R-project.org/doc/Rnews/",
273 | PDF =
274 | "http://CRAN.R-project.org/doc/Rnews/Rnews\_2006-1.pdf"
275 | }
276 |
277 | @Article{ Radford2003,
278 | title = "Slice sampling",
279 | author = "Radford M. Neal",
280 | journal = "The Annals of Statistics",
281 | pages = "705--741",
282 | volume = "31",
283 | number = "3",
284 | month = "June",
285 | year = "2003"
286 | }
287 |
288 |
289 | @Book{ RobertCassela1999,
290 | title = {Monte {C}arlo Statistical Methods},
291 | author = "C. P. Robert and G. Casella",
292 | publisher = "Springer--Verlag",
293 | address = "New York",
294 | year = 1999
295 | }
296 |
297 | @Article{ RobertMengersen1999,
298 | title = "Reparameterisation issues in mixture modelling and
299 | their bearing on MCMC algorithms",
300 | author = "Christian P. Robert and Kerrie L. Mengersen",
301 | journal = "Computational Statistics and Data Analysis",
302 | pages = "325--343",
303 | volume = "29",
304 | number = "3",
305 | year = "1999"
306 | }
307 |
308 | @Article{ RobersSahu1997,
309 | title = {Updating schemes, correlation structure, blocking
310 | and parameterisation for the {G}ibbs sampler},
311 | author = "S. K. Sahu and Gareth O. Roberts",
312 | journal = "Journal of the Royal Statistical Society B",
313 | pages = "291--317",
314 | volume = 59,
315 | year = 1997
316 | }
317 |
318 | @Misc{gautier:_rpy2,
319 | year = 2011,
320 | author = {Laurent Gautier},
321 | title = {\pkg{RPy2}: A Simple and Efficient Access to
322 | \proglang{R} from \proglang{Python}},
323 | howpublished = {\url{http://rpy.sourceforge.net/rpy2.html}},
324 | note = {Accessed 6 March, 2011}
325 | }
326 |
327 |
328 | @Misc{geus11,
329 | author = {Roman Geus},
330 | title = {\pkg{Pysparse}},
331 | howpublished = {\url{http://pysparse.sourceforge.net/}},
332 | year = 2011,
333 | note = {Accessed 6 March, 2011}
334 | }
335 |
336 | @Article{ StricklandMartinForbes2008,
337 | title = "Parameterisation and efficient MCMC estimation of
338 | non-Gaussian state space models",
339 | author = "Chris M. Strickland and Gael M. Martin and Catherine
340 | S. Forbes",
341 | journal = "Computational Statistics and Data Analysis",
342 | pages = "2911--2930",
343 | volume = "52",
344 | year = "2008"
345 | }
346 |
347 | @Book{ Zellner1971,
348 | author = "Arnold Zellner",
349 | title = "An Introduction to {B}ayesian Inference in
350 | {E}conometrics",
351 | publisher = "John Wiley \& Sons",
352 | year = 1971,
353 | address = "New York",
354 | otherinfo = ""
355 | }
356 |
357 | @INCOLLECTION{Zellner1986,
358 | author = {Arnold Zellner},
359 | title = {On assessing prior distributions and {B}ayesian
360 | regression analysis with g-prior distibutions},
361 | booktitle = {{B}ayesian inference and decision techniques: Essays
362 | in honor of {B}runo de {F}inetti},
363 | publisher = {Elsevier},
364 | year = 1986,
365 | editor = {P. K. Goel and Arnold Zelner},
366 | address = {Amsterdam: North-Holland},
367 | otherinfo = {}
368 | }
369 |
370 |
--------------------------------------------------------------------------------
/doc/ex_loglinear.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rdenham/pymcmc/d9563fdf9e4adb747e5e1a1cde424975b8673b9d/doc/ex_loglinear.pdf
--------------------------------------------------------------------------------
/doc/example1_section3.1.py:
--------------------------------------------------------------------------------
1 | ### More detailed examples are available in
2 | ### when installing the pymcmc package
3 | ### see the directory $PREFIX/pymcmc/examples/
4 | ### where PREFIX is where the package was
5 | ### installed (eg /usr/local/lib/python2.6/dist-packages/)
6 |
7 | ### Empirical illustrations ###
8 | ### Example 1: Linear regression model:
9 | ### Variable selection and estimation
10 | import os
11 | from numpy import loadtxt, hstack, ones, random, zeros, asfortranarray, log
12 | from pymcmc.mcmc import MCMC, CFsampler
13 | from pymcmc.regtools import StochasticSearch, BayesRegression
14 | import pymcmc
15 |
16 | """ get the path for the data. If this was installed using setup.py
17 | it will be in the data directory of the module"""
18 | datadir = os.path.join(os.path.dirname(pymcmc.__file__),'data')
19 |
20 | def samplegamma(store):
21 | """function that samples vector of indicators"""
22 | return store['SS'].sample_gamma(store)
23 |
24 | data = loadtxt(os.path.join(datadir,'yld2.txt'))
25 | yvec = data[:, 0]
26 | xmat = data[:, 1:20]
27 | xmat = hstack([ones((xmat.shape[0], 1)), xmat])
28 |
29 | """data is a dictionary whose elements are accessible from the functions
30 | in the MCMC sampler"""
31 | data ={'yvec':yvec, 'xmat':xmat}
32 | prior = ['g_prior',zeros(xmat.shape[1]), 100.]
33 | SSVS = StochasticSearch(yvec, xmat, prior);
34 | data['SS'] = SSVS
35 |
36 | """initialise gamma"""
37 | initgamma = zeros(xmat.shape[1], dtype ='i')
38 | initgamma[0] = 1
39 | simgam = CFsampler(samplegamma, initgamma, 'gamma', store ='all')
40 |
41 |
42 | # initialise class for MCMC samper
43 | random.seed(12346)
44 | ms = MCMC(20000, 5000, data, [simgam])
45 | ms.sampler()
46 | ms.output()
47 | ms.output(custom = SSVS.output)
48 |
49 | txmat = SSVS.extract_regressors(0)
50 | g_prior = ['g_prior', 0.0, 100.]
51 | breg = BayesRegression(yvec,txmat,prior = g_prior)
52 | breg.output()
53 |
54 | breg.plot()
55 |
--------------------------------------------------------------------------------
/doc/example2_section3.2.py:
--------------------------------------------------------------------------------
1 | ### More detailed examples are available in
2 | ### when installing the pymcmc package
3 | ### see the directory $PREFIX/pymcmc/examples/
4 | ### where PREFIX is where the package was
5 | ### installed (eg /usr/local/lib/python2.6/dist-packages/)
6 |
7 | ### Empirical illustrations ###
8 | ### Example 2: Log-linear model
9 | import os
10 | from numpy import random, loadtxt, hstack, ones, dot, exp, zeros, outer, diag
11 | from numpy import linalg
12 | from pymcmc.mcmc import MCMC, RWMH, OBMC
13 | from pymcmc.regtools import BayesRegression
14 | from scipy.optimize.minpack import leastsq
15 | import pymcmc
16 |
17 |
18 | """ get the path for the data. If this was installed using setup.py
19 | it will be in the data directory of the module"""
20 | datadir = os.path.join(os.path.dirname(pymcmc.__file__),'data')
21 |
22 | def minfunc(beta, yvec, xmat ):
23 | """function used by nonlinear least squares routine"""
24 | return yvec - exp(dot(xmat, beta))
25 |
26 | def prior(store):
27 | """function evaluates the prior pdf for beta"""
28 | mu = zeros(store['beta'].shape[0])
29 | Prec = diag(0.005 * ones(store['beta'].shape[0]))
30 | return -0.5 * dot(store['beta'].transpose(), dot(Prec, store['beta']))
31 |
32 | def logl(store):
33 | """
34 | function evaluates the log - likelihood
35 | for the log - linear model
36 | """
37 | xbeta = dot(store['xmat'], store['beta'])
38 | lamb = exp(xbeta)
39 | return sum(store['yvec'] * xbeta - lamb)
40 |
41 | def posterior(store):
42 | """
43 | function evaluates the posterior probability
44 | for the log - linear model
45 | """
46 | return logl(store) + prior(store)
47 |
48 | def llhessian(store, beta):
49 | """function returns the hessian for the log - linear model"""
50 | nobs = store['yvec'].shape[0]
51 | kreg = store['xmat'].shape[1]
52 | lamb = exp(dot(store['xmat'], beta))
53 | sum = zeros((kreg, kreg))
54 | for i in xrange(nobs):
55 | sum = sum + lamb[i] * outer(store['xmat'][i], store['xmat'][i])
56 | return -sum
57 |
58 |
59 | # main program
60 | random.seed(12345) # seed or the random number generator
61 |
62 | # loads data from file
63 | data = loadtxt(os.path.join(datadir,'count.txt'), skiprows = 1)
64 | yvec = data[:, 0]
65 | xmat = data[:, 1:data.shape[1]]
66 | xmat = hstack([ones((data.shape[0], 1)), xmat])
67 |
68 | data ={'yvec':yvec, 'xmat':xmat}
69 |
70 | # use bayesian regression to initialise
71 | bayesreg = BayesRegression(yvec, xmat)
72 | sig, beta0 = bayesreg.posterior_mean()
73 |
74 | init_beta, info = leastsq(minfunc, beta0, args = (yvec, xmat))
75 | data['betaprec'] =-llhessian(data, init_beta)
76 | scale = linalg.inv(data['betaprec'])
77 |
78 | # Initialise the random walk MH algorithm
79 | samplebeta = RWMH(posterior, scale, init_beta, 'beta')
80 |
81 | ms = MCMC(20000, 4000, data, [samplebeta],
82 | loglike = (logl, xmat.shape[1], 'yvec'))
83 | ms.sampler()
84 |
85 | ms.output()
86 | ms.plot('beta')
87 |
--------------------------------------------------------------------------------
/doc/example3_section3.3.py:
--------------------------------------------------------------------------------
1 | ### More detailed examples are available in
2 | ### when installing the pymcmc package
3 | ### see the directory $PREFIX/pymcmc/examples/
4 | ### where PREFIX is where the package was
5 | ### installed (eg /usr/local/lib/python2.6/dist-packages/)
6 | ### More detailed examples are available in
7 | ### when installing the pymcmc package
8 | ### see the directory $PREFIX/pymcmc/examples/
9 | ### where PREFIX is where the package was
10 | ### installed (eg /usr/local/lib/python2.6/dist-packages/)
11 |
12 | ### Empirical illustrations ###
13 | ### Example 3: First order
14 | ### autoregressive regression
15 |
16 |
17 | from numpy import random, ones, zeros, dot, hstack, eye, log
18 | from scipy import sparse
19 | from pysparse import spmatrix
20 | from pymcmc.mcmc import MCMC, SliceSampler, RWMH, OBMC, MH, CFsampler
21 | from pymcmc.regtools import BayesRegression
22 |
23 | def simdata(nobs, kreg):
24 | """function simulates data from a first order autoregressive regression"""
25 | xmat = hstack((ones((nobs, 1)), random.randn(nobs, kreg - 1)))
26 | beta = random.randn(kreg)
27 | sig = 0.2
28 | rho = 0.90
29 | yvec = zeros(nobs)
30 | eps = zeros(nobs)
31 | eps[0] = sig**2/(1.-rho**2)
32 | for i in xrange(nobs - 1):
33 | eps[i + 1] = rho * eps[i] + sig * random.randn(1)
34 | yvec = dot(xmat, beta) + eps
35 | return yvec, xmat
36 |
37 | def calcweighted(store):
38 | """re - weights yvec and xmat, for use in weighted least squares regression"""
39 | nobs = store['yvec'].shape[0]
40 | store['Upper'].put(-store['rho'], range(0, nobs - 1), range(1, nobs))
41 | store['Upper'].matvec(store['yvec'], store['yvectil'])
42 | for i in xrange(store['xmat'].shape[1]):
43 | store['Upper'].matvec(store['xmat'][:, i], store['xmattil'][:, i])
44 |
45 | def WLS(store):
46 | """computes weighted least square regression"""
47 | calcweighted(store)
48 | store['regsampler'].update_yvec(store['yvectil'])
49 | store['regsampler'].update_xmat(store['xmattil'])
50 | return store['regsampler'].sample()
51 |
52 |
53 | def loglike(store):
54 | """
55 | calculates log - likelihood for the the first
56 | order autoregressive regression model
57 | """
58 | nobs = store['yvec'].shape[0]
59 | calcweighted(store)
60 | store['regsampler'].update_yvec(store['yvectil'])
61 | store['regsampler'].update_xmat(store['xmattil'])
62 | return store['regsampler'].loglike(store['sigma'], store['beta'])
63 |
64 | def prior_rho(store):
65 | """
66 | evaulates the log of the prior distribution
67 | for rho. the beta distribution is used
68 | """
69 | if store['rho'] > 0. and store['rho'] < 1.0:
70 | alpha = 1.0
71 | beta = 1.0
72 | return (alpha - 1.) * log(store['rho']) + (beta - 1.) * \
73 | log(1.-store['rho'])
74 | else:
75 | return -1E256
76 |
77 | def post_rho(store):
78 | """
79 | evaulates the log of the posterior distrbution for rho
80 | """
81 | return loglike(store) + prior_rho(store)
82 |
83 |
84 | # Main program
85 | random.seed(12345)
86 | nobs = 1000
87 | kreg = 3
88 | yvec, xmat = simdata(nobs, kreg)
89 |
90 | # we use a g - prior for the regression coefficients.
91 | priorreg = ('g_prior', zeros(kreg), 1000.0)
92 | regs = BayesRegression(yvec, xmat, prior = priorreg)
93 |
94 | """A dictionary is set up. The contents of the dictionary will be
95 | available for use for by the functions that make up the MCMC sampler.
96 | Note that we pass in storage space as well as the class intance used
97 | to sample the regression from."""
98 | data ={'yvec':yvec, 'xmat':xmat, 'regsampler':regs}
99 | U = spmatrix.ll_mat(nobs, nobs, 2 * nobs - 1)
100 | U.put(1.0, range(0, nobs), range(0, nobs))
101 | data['yvectil'] = zeros(nobs)
102 | data['xmattil'] = zeros((nobs, kreg))
103 | data['Upper'] = U
104 |
105 | # Use Bayesian regression to initialise MCMC sampler
106 | bayesreg = BayesRegression(yvec, xmat)
107 | sig, beta = bayesreg.posterior_mean()
108 |
109 | simsigbeta = CFsampler(WLS, [sig, beta], ['sigma', 'beta'])
110 |
111 | rho = 0.9
112 | simrho = SliceSampler([post_rho], 0.1, 5, rho, 'rho')
113 | blocks = [simrho, simsigbeta]
114 |
115 | loglikeinfo = (loglike, kreg + 2, 'yvec')
116 | ms = MCMC(10000, 2000, data, blocks, loglike = loglikeinfo)
117 | ms.sampler()
118 |
119 | ms.output()
120 | ms.plot('rho')
121 |
--------------------------------------------------------------------------------
/doc/example4_section4.py:
--------------------------------------------------------------------------------
1 | ## Using PyMCMC efficiently
2 |
3 | ## we use the same program as for example2
4 | ## but replace logl function:
5 | import os
6 | from numpy import random, loadtxt, hstack
7 | from numpy import ones, dot, exp, zeros, outer, diag
8 | from numpy import linalg, asfortranarray
9 | from pymcmc.mcmc import MCMC, RWMH, OBMC
10 | from pymcmc.regtools import BayesRegression
11 | from scipy.optimize.minpack import leastsq
12 | from scipy import weave
13 | from scipy.weave import converters
14 | import loglinear
15 | import pymcmc
16 |
17 | datadir = os.path.join(os.path.dirname(pymcmc.__file__),'data')
18 |
19 | def minfunc(beta, yvec, xmat ):
20 | """function used by nonlinear least squares routine"""
21 | return yvec - exp(dot(xmat, beta))
22 |
23 | def prior(store):
24 | """function evaluates the prior pdf for beta"""
25 | mu = zeros(store['beta'].shape[0])
26 | Prec = diag(0.005 * ones(store['beta'].shape[0]))
27 | return -0.5 * dot(store['beta'].transpose(), dot(Prec, store['beta']))
28 |
29 |
30 | def posterior(store):
31 | """
32 | function evaluates the posterior probability
33 | for the log - linear model
34 | """
35 | return logl(store) + prior(store)
36 |
37 | def llhessian(store, beta):
38 | """function returns the hessian for the log - linear model"""
39 | nobs = store['yvec'].shape[0]
40 | kreg = store['xmat'].shape[1]
41 | lamb = exp(dot(store['xmat'], beta))
42 | sum = zeros((kreg, kreg))
43 | for i in xrange(nobs):
44 | sum = sum + lamb[i] * outer(store['xmat'][i], store['xmat'][i])
45 | return -sum
46 |
47 | ## Here we demonstrate four different versions of the
48 | ## loglikelihood function.
49 |
50 | # Numpy
51 | def loglnumpy(store):
52 | """function evaluates the log - likelihood for the log - linear model"""
53 | xbeta = dot(store['xmat'], store['beta'])
54 | lamb = exp(xbeta)
55 | return sum(store['yvec'] * xbeta - lamb)
56 |
57 | # Loop
58 | def loglloop(store):
59 | """function evaluates the log - likelihood for the log - linear model"""
60 | suml=0.0
61 | for i in xrange(store['yvec'].shape[0]):
62 | xbeta=dot(store['xmat'][i,:],store['beta'])
63 | suml=suml+store['yvec'][i] * xbeta - exp(xbeta)
64 | return suml
65 |
66 | # weave
67 | def loglweave(store):
68 | """function evaluates the log - likelihood for the log - linear model"""
69 | code = """
70 | double sum = 0.0, xbeta;
71 | for(int i=0; i> Jones et al. (1990)
43 | % \citet*{key} ==>> Jones, Baker, and Smith (1990)
44 | % \citep{key} ==>> (Jones et al., 1990)
45 | % \citep*{key} ==>> (Jones, Baker, and Smith, 1990)
46 | % \citep[chap. 2]{key} ==>> (Jones et al., 1990, chap. 2)
47 | % \citep[e.g.][]{key} ==>> (e.g. Jones et al., 1990)
48 | % \citep[e.g.][p. 32]{key} ==>> (e.g. Jones et al., p. 32)
49 | % \citeauthor{key} ==>> Jones et al.
50 | % \citeauthor*{key} ==>> Jones, Baker, and Smith
51 | % \citeyear{key} ==>> 1990
52 | %---------------------------------------------------------------------
53 |
54 | ENTRY
55 | { address
56 | archive
57 | author
58 | booktitle
59 | chapter
60 | collaboration
61 | doi
62 | edition
63 | editor
64 | eid
65 | eprint
66 | howpublished
67 | institution
68 | isbn
69 | issn
70 | journal
71 | key
72 | month
73 | note
74 | number
75 | numpages
76 | organization
77 | pages
78 | publisher
79 | school
80 | series
81 | title
82 | type
83 | url
84 | volume
85 | year
86 | }
87 | {}
88 | { label extra.label sort.label short.list }
89 | INTEGERS { output.state before.all mid.sentence after.sentence after.block }
90 | FUNCTION {init.state.consts}
91 | { #0 'before.all :=
92 | #1 'mid.sentence :=
93 | #2 'after.sentence :=
94 | #3 'after.block :=
95 | }
96 | STRINGS { s t}
97 | FUNCTION {output.nonnull}
98 | { 's :=
99 | output.state mid.sentence =
100 | { ", " * write$ }
101 | { output.state after.block =
102 | { add.period$ write$
103 | newline$
104 | "\newblock " write$
105 | }
106 | { output.state before.all =
107 | 'write$
108 | { add.period$ " " * write$ }
109 | if$
110 | }
111 | if$
112 | mid.sentence 'output.state :=
113 | }
114 | if$
115 | s
116 | }
117 | FUNCTION {output}
118 | { duplicate$ empty$
119 | 'pop$
120 | 'output.nonnull
121 | if$
122 | }
123 | FUNCTION {output.check}
124 | { 't :=
125 | duplicate$ empty$
126 | { pop$ "empty " t * " in " * cite$ * warning$ }
127 | 'output.nonnull
128 | if$
129 | }
130 | FUNCTION {fin.entry}
131 | { add.period$
132 | write$
133 | newline$
134 | }
135 |
136 | FUNCTION {new.block}
137 | { output.state before.all =
138 | 'skip$
139 | { after.block 'output.state := }
140 | if$
141 | }
142 | FUNCTION {new.sentence}
143 | { output.state after.block =
144 | 'skip$
145 | { output.state before.all =
146 | 'skip$
147 | { after.sentence 'output.state := }
148 | if$
149 | }
150 | if$
151 | }
152 | FUNCTION {add.blank}
153 | { " " * before.all 'output.state :=
154 | }
155 |
156 | FUNCTION {date.block}
157 | {
158 | new.block
159 | }
160 |
161 | FUNCTION {not}
162 | { { #0 }
163 | { #1 }
164 | if$
165 | }
166 | FUNCTION {and}
167 | { 'skip$
168 | { pop$ #0 }
169 | if$
170 | }
171 | FUNCTION {or}
172 | { { pop$ #1 }
173 | 'skip$
174 | if$
175 | }
176 | FUNCTION {non.stop}
177 | { duplicate$
178 | "}" * add.period$
179 | #-1 #1 substring$ "." =
180 | }
181 |
182 | STRINGS {z}
183 | FUNCTION {remove.dots}
184 | { 'z :=
185 | ""
186 | { z empty$ not }
187 | { z #1 #1 substring$
188 | z #2 global.max$ substring$ 'z :=
189 | duplicate$ "." = 'pop$
190 | { * }
191 | if$
192 | }
193 | while$
194 | }
195 | FUNCTION {new.block.checkb}
196 | { empty$
197 | swap$ empty$
198 | and
199 | 'skip$
200 | 'new.block
201 | if$
202 | }
203 | FUNCTION {field.or.null}
204 | { duplicate$ empty$
205 | { pop$ "" }
206 | 'skip$
207 | if$
208 | }
209 | FUNCTION {emphasize}
210 | { duplicate$ empty$
211 | { pop$ "" }
212 | { "\emph{" swap$ * "}" * }
213 | if$
214 | }
215 | FUNCTION {bolden}
216 | { duplicate$ empty$
217 | { pop$ "" }
218 | { "\textbf{" swap$ * "}" * }
219 | if$
220 | }
221 | FUNCTION {tie.or.space.prefix}
222 | { duplicate$ text.length$ #3 <
223 | { "~" }
224 | { " " }
225 | if$
226 | swap$
227 | }
228 |
229 | FUNCTION {capitalize}
230 | { "u" change.case$ "t" change.case$ }
231 |
232 | FUNCTION {space.word}
233 | { " " swap$ * " " * }
234 | % Here are the language-specific definitions for explicit words.
235 | % Each function has a name bbl.xxx where xxx is the English word.
236 | % The language selected here is ENGLISH
237 | FUNCTION {bbl.and}
238 | { "and"}
239 |
240 | FUNCTION {bbl.etal}
241 | { "et~al." }
242 |
243 | FUNCTION {bbl.editors}
244 | { "eds." }
245 |
246 | FUNCTION {bbl.editor}
247 | { "ed." }
248 |
249 | FUNCTION {bbl.edby}
250 | { "edited by" }
251 |
252 | FUNCTION {bbl.edition}
253 | { "edition" }
254 |
255 | FUNCTION {bbl.volume}
256 | { "volume" }
257 |
258 | FUNCTION {bbl.of}
259 | { "of" }
260 |
261 | FUNCTION {bbl.number}
262 | { "number" }
263 |
264 | FUNCTION {bbl.nr}
265 | { "no." }
266 |
267 | FUNCTION {bbl.in}
268 | { "in" }
269 |
270 | FUNCTION {bbl.pages}
271 | { "pp." }
272 |
273 | FUNCTION {bbl.page}
274 | { "p." }
275 |
276 | FUNCTION {bbl.eidpp}
277 | { "pages" }
278 |
279 | FUNCTION {bbl.chapter}
280 | { "chapter" }
281 |
282 | FUNCTION {bbl.techrep}
283 | { "Technical Report" }
284 |
285 | FUNCTION {bbl.mthesis}
286 | { "Master's thesis" }
287 |
288 | FUNCTION {bbl.phdthesis}
289 | { "Ph.D. thesis" }
290 |
291 | MACRO {jan} {"January"}
292 |
293 | MACRO {feb} {"February"}
294 |
295 | MACRO {mar} {"March"}
296 |
297 | MACRO {apr} {"April"}
298 |
299 | MACRO {may} {"May"}
300 |
301 | MACRO {jun} {"June"}
302 |
303 | MACRO {jul} {"July"}
304 |
305 | MACRO {aug} {"August"}
306 |
307 | MACRO {sep} {"September"}
308 |
309 | MACRO {oct} {"October"}
310 |
311 | MACRO {nov} {"November"}
312 |
313 | MACRO {dec} {"December"}
314 |
315 | MACRO {acmcs} {"ACM Computing Surveys"}
316 |
317 | MACRO {acta} {"Acta Informatica"}
318 |
319 | MACRO {cacm} {"Communications of the ACM"}
320 |
321 | MACRO {ibmjrd} {"IBM Journal of Research and Development"}
322 |
323 | MACRO {ibmsj} {"IBM Systems Journal"}
324 |
325 | MACRO {ieeese} {"IEEE Transactions on Software Engineering"}
326 |
327 | MACRO {ieeetc} {"IEEE Transactions on Computers"}
328 |
329 | MACRO {ieeetcad}
330 | {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"}
331 |
332 | MACRO {ipl} {"Information Processing Letters"}
333 |
334 | MACRO {jacm} {"Journal of the ACM"}
335 |
336 | MACRO {jcss} {"Journal of Computer and System Sciences"}
337 |
338 | MACRO {scp} {"Science of Computer Programming"}
339 |
340 | MACRO {sicomp} {"SIAM Journal on Computing"}
341 |
342 | MACRO {tocs} {"ACM Transactions on Computer Systems"}
343 |
344 | MACRO {tods} {"ACM Transactions on Database Systems"}
345 |
346 | MACRO {tog} {"ACM Transactions on Graphics"}
347 |
348 | MACRO {toms} {"ACM Transactions on Mathematical Software"}
349 |
350 | MACRO {toois} {"ACM Transactions on Office Information Systems"}
351 |
352 | MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"}
353 |
354 | MACRO {tcs} {"Theoretical Computer Science"}
355 | FUNCTION {bibinfo.check}
356 | { swap$
357 | duplicate$ missing$
358 | {
359 | pop$ pop$
360 | ""
361 | }
362 | { duplicate$ empty$
363 | {
364 | swap$ pop$
365 | }
366 | { swap$
367 | pop$
368 | }
369 | if$
370 | }
371 | if$
372 | }
373 | FUNCTION {bibinfo.warn}
374 | { swap$
375 | duplicate$ missing$
376 | {
377 | swap$ "missing " swap$ * " in " * cite$ * warning$ pop$
378 | ""
379 | }
380 | { duplicate$ empty$
381 | {
382 | swap$ "empty " swap$ * " in " * cite$ * warning$
383 | }
384 | { swap$
385 | pop$
386 | }
387 | if$
388 | }
389 | if$
390 | }
391 | FUNCTION {format.eprint}
392 | { eprint duplicate$ empty$
393 | 'skip$
394 | { "\eprint"
395 | archive empty$
396 | 'skip$
397 | { "[" * archive * "]" * }
398 | if$
399 | "{" * swap$ * "}" *
400 | }
401 | if$
402 | }
403 | FUNCTION {format.url}
404 | { url empty$
405 | { "" }
406 | { "\urlprefix\url{" url * "}" * }
407 | if$
408 | }
409 |
410 | STRINGS { bibinfo}
411 | INTEGERS { nameptr namesleft numnames }
412 |
413 | FUNCTION {format.names}
414 | { 'bibinfo :=
415 | duplicate$ empty$ 'skip$ {
416 | 's :=
417 | "" 't :=
418 | #1 'nameptr :=
419 | s num.names$ 'numnames :=
420 | numnames 'namesleft :=
421 | { namesleft #0 > }
422 | { s nameptr
423 | "{vv~}{ll}{ jj}{ f{}}"
424 | format.name$
425 | remove.dots
426 | bibinfo bibinfo.check
427 | 't :=
428 | nameptr #1 >
429 | {
430 | namesleft #1 >
431 | { ", " * t * }
432 | {
433 | s nameptr "{ll}" format.name$ duplicate$ "others" =
434 | { 't := }
435 | { pop$ }
436 | if$
437 | "," *
438 | t "others" =
439 | {
440 | " " * bbl.etal emphasize *
441 | }
442 | { " " * t * }
443 | if$
444 | }
445 | if$
446 | }
447 | 't
448 | if$
449 | nameptr #1 + 'nameptr :=
450 | namesleft #1 - 'namesleft :=
451 | }
452 | while$
453 | } if$
454 | }
455 | FUNCTION {format.names.ed}
456 | {
457 | 'bibinfo :=
458 | duplicate$ empty$ 'skip$ {
459 | 's :=
460 | "" 't :=
461 | #1 'nameptr :=
462 | s num.names$ 'numnames :=
463 | numnames 'namesleft :=
464 | { namesleft #0 > }
465 | { s nameptr
466 | "{f{}~}{vv~}{ll}{ jj}"
467 | format.name$
468 | remove.dots
469 | bibinfo bibinfo.check
470 | 't :=
471 | nameptr #1 >
472 | {
473 | namesleft #1 >
474 | { ", " * t * }
475 | {
476 | s nameptr "{ll}" format.name$ duplicate$ "others" =
477 | { 't := }
478 | { pop$ }
479 | if$
480 | "," *
481 | t "others" =
482 | {
483 |
484 | " " * bbl.etal emphasize *
485 | }
486 | { " " * t * }
487 | if$
488 | }
489 | if$
490 | }
491 | 't
492 | if$
493 | nameptr #1 + 'nameptr :=
494 | namesleft #1 - 'namesleft :=
495 | }
496 | while$
497 | } if$
498 | }
499 | FUNCTION {format.key}
500 | { empty$
501 | { key field.or.null }
502 | { "" }
503 | if$
504 | }
505 |
506 | FUNCTION {format.authors}
507 | { author "author" format.names
508 | duplicate$ empty$ 'skip$
509 | { collaboration "collaboration" bibinfo.check
510 | duplicate$ empty$ 'skip$
511 | { " (" swap$ * ")" * }
512 | if$
513 | *
514 | }
515 | if$
516 | }
517 | FUNCTION {get.bbl.editor}
518 | { editor num.names$ #1 > 'bbl.editors 'bbl.editor if$ }
519 |
520 | FUNCTION {format.editors}
521 | { editor "editor" format.names duplicate$ empty$ 'skip$
522 | {
523 | " " *
524 | get.bbl.editor
525 | "(" swap$ * ")" *
526 | *
527 | }
528 | if$
529 | }
530 | FUNCTION {format.isbn}
531 | { isbn "isbn" bibinfo.check
532 | duplicate$ empty$ 'skip$
533 | {
534 | new.block
535 | "ISBN " swap$ *
536 | }
537 | if$
538 | }
539 |
540 | FUNCTION {format.issn}
541 | { issn "issn" bibinfo.check
542 | duplicate$ empty$ 'skip$
543 | {
544 | new.block
545 | "ISSN " swap$ *
546 | }
547 | if$
548 | }
549 |
550 | FUNCTION {format.doi}
551 | { doi "doi" bibinfo.check
552 | duplicate$ empty$ 'skip$
553 | {
554 | new.block
555 | "\doi{" swap$ * "}" *
556 | }
557 | if$
558 | }
559 | FUNCTION {format.note}
560 | {
561 | note empty$
562 | { "" }
563 | { note #1 #1 substring$
564 | duplicate$ "{" =
565 | 'skip$
566 | { output.state mid.sentence =
567 | { "l" }
568 | { "u" }
569 | if$
570 | change.case$
571 | }
572 | if$
573 | note #2 global.max$ substring$ * "note" bibinfo.check
574 | }
575 | if$
576 | }
577 |
578 | FUNCTION {format.title}
579 | { title
580 | "title" bibinfo.check
581 | duplicate$ empty$ 'skip$
582 | {
583 | "\enquote{" swap$ *
584 | add.period$ "}" *
585 | }
586 | if$
587 | }
588 | FUNCTION {format.full.names}
589 | {'s :=
590 | "" 't :=
591 | #1 'nameptr :=
592 | s num.names$ 'numnames :=
593 | numnames 'namesleft :=
594 | { namesleft #0 > }
595 | { s nameptr
596 | "{vv~}{ll}" format.name$
597 | 't :=
598 | nameptr #1 >
599 | {
600 | namesleft #1 >
601 | { ", " * t * }
602 | {
603 | s nameptr "{ll}" format.name$ duplicate$ "others" =
604 | { 't := }
605 | { pop$ }
606 | if$
607 | t "others" =
608 | {
609 | " " * bbl.etal emphasize *
610 | }
611 | {
612 | numnames #2 >
613 | { "," * }
614 | 'skip$
615 | if$
616 | bbl.and
617 | space.word * t *
618 | }
619 | if$
620 | }
621 | if$
622 | }
623 | 't
624 | if$
625 | nameptr #1 + 'nameptr :=
626 | namesleft #1 - 'namesleft :=
627 | }
628 | while$
629 | }
630 |
631 | FUNCTION {author.editor.key.full}
632 | { author empty$
633 | { editor empty$
634 | { key empty$
635 | { cite$ #1 #3 substring$ }
636 | 'key
637 | if$
638 | }
639 | { editor format.full.names }
640 | if$
641 | }
642 | { author format.full.names }
643 | if$
644 | }
645 |
646 | FUNCTION {author.key.full}
647 | { author empty$
648 | { key empty$
649 | { cite$ #1 #3 substring$ }
650 | 'key
651 | if$
652 | }
653 | { author format.full.names }
654 | if$
655 | }
656 |
657 | FUNCTION {editor.key.full}
658 | { editor empty$
659 | { key empty$
660 | { cite$ #1 #3 substring$ }
661 | 'key
662 | if$
663 | }
664 | { editor format.full.names }
665 | if$
666 | }
667 |
668 | FUNCTION {make.full.names}
669 | { type$ "book" =
670 | type$ "inbook" =
671 | or
672 | 'author.editor.key.full
673 | { type$ "proceedings" =
674 | 'editor.key.full
675 | 'author.key.full
676 | if$
677 | }
678 | if$
679 | }
680 |
681 | FUNCTION {output.bibitem}
682 | { newline$
683 | "\bibitem[{" write$
684 | label write$
685 | ")" make.full.names duplicate$ short.list =
686 | { pop$ }
687 | { * }
688 | if$
689 | "}]{" * write$
690 | cite$ write$
691 | "}" write$
692 | newline$
693 | ""
694 | before.all 'output.state :=
695 | }
696 |
697 | FUNCTION {n.dashify}
698 | {
699 | 't :=
700 | ""
701 | { t empty$ not }
702 | { t #1 #1 substring$ "-" =
703 | { t #1 #2 substring$ "--" = not
704 | { "--" *
705 | t #2 global.max$ substring$ 't :=
706 | }
707 | { { t #1 #1 substring$ "-" = }
708 | { "-" *
709 | t #2 global.max$ substring$ 't :=
710 | }
711 | while$
712 | }
713 | if$
714 | }
715 | { t #1 #1 substring$ *
716 | t #2 global.max$ substring$ 't :=
717 | }
718 | if$
719 | }
720 | while$
721 | }
722 |
723 | FUNCTION {word.in}
724 | { bbl.in capitalize
725 | " " * }
726 |
727 | FUNCTION {format.date}
728 | { year "year" bibinfo.check duplicate$ empty$
729 | {
730 | "empty year in " cite$ * "; set to ????" * warning$
731 | pop$ "????"
732 | }
733 | 'skip$
734 | if$
735 | extra.label *
736 | before.all 'output.state :=
737 | " (" swap$ * ")" *
738 | }
739 | FUNCTION {format.btitle}
740 | { title "title" bibinfo.check
741 | duplicate$ empty$ 'skip$
742 | {
743 | emphasize
744 | }
745 | if$
746 | }
747 | FUNCTION {either.or.check}
748 | { empty$
749 | 'pop$
750 | { "can't use both " swap$ * " fields in " * cite$ * warning$ }
751 | if$
752 | }
753 | FUNCTION {format.bvolume}
754 | { volume empty$
755 | { "" }
756 | { bbl.volume volume tie.or.space.prefix
757 | "volume" bibinfo.check * *
758 | series "series" bibinfo.check
759 | duplicate$ empty$ 'pop$
760 | { swap$ bbl.of space.word * swap$
761 | emphasize * }
762 | if$
763 | "volume and number" number either.or.check
764 | }
765 | if$
766 | }
767 | FUNCTION {format.number.series}
768 | { volume empty$
769 | { number empty$
770 | { series field.or.null }
771 | { series empty$
772 | { number "number" bibinfo.check }
773 | { output.state mid.sentence =
774 | { bbl.number }
775 | { bbl.number capitalize }
776 | if$
777 | number tie.or.space.prefix "number" bibinfo.check * *
778 | bbl.in space.word *
779 | series "series" bibinfo.check *
780 | }
781 | if$
782 | }
783 | if$
784 | }
785 | { "" }
786 | if$
787 | }
788 |
789 | FUNCTION {format.edition}
790 | { edition duplicate$ empty$ 'skip$
791 | {
792 | output.state mid.sentence =
793 | { "l" }
794 | { "t" }
795 | if$ change.case$
796 | "edition" bibinfo.check
797 | " " * bbl.edition *
798 | }
799 | if$
800 | }
801 | INTEGERS { multiresult }
802 | FUNCTION {multi.page.check}
803 | { 't :=
804 | #0 'multiresult :=
805 | { multiresult not
806 | t empty$ not
807 | and
808 | }
809 | { t #1 #1 substring$
810 | duplicate$ "-" =
811 | swap$ duplicate$ "," =
812 | swap$ "+" =
813 | or or
814 | { #1 'multiresult := }
815 | { t #2 global.max$ substring$ 't := }
816 | if$
817 | }
818 | while$
819 | multiresult
820 | }
821 | FUNCTION {format.pages}
822 | { pages duplicate$ empty$ 'skip$
823 | { duplicate$ multi.page.check
824 | {
825 | bbl.pages swap$
826 | n.dashify
827 | }
828 | {
829 | bbl.page swap$
830 | }
831 | if$
832 | tie.or.space.prefix
833 | "pages" bibinfo.check
834 | * *
835 | }
836 | if$
837 | }
838 | FUNCTION {format.journal.pages}
839 | { pages duplicate$ empty$ 'pop$
840 | { swap$ duplicate$ empty$
841 | { pop$ pop$ format.pages }
842 | {
843 | ", " *
844 | swap$
845 | n.dashify
846 | "pages" bibinfo.check
847 | *
848 | }
849 | if$
850 | }
851 | if$
852 | }
853 | FUNCTION {format.journal.eid}
854 | { eid "eid" bibinfo.check
855 | duplicate$ empty$ 'pop$
856 | { swap$ duplicate$ empty$ 'skip$
857 | {
858 | ", " *
859 | }
860 | if$
861 | swap$ *
862 | numpages empty$ 'skip$
863 | { bbl.eidpp numpages tie.or.space.prefix
864 | "numpages" bibinfo.check * *
865 | " (" swap$ * ")" * *
866 | }
867 | if$
868 | }
869 | if$
870 | }
871 | FUNCTION {format.vol.num.pages}
872 | { volume field.or.null
873 | duplicate$ empty$ 'skip$
874 | {
875 | "volume" bibinfo.check
876 | }
877 | if$
878 | bolden
879 | number "number" bibinfo.check duplicate$ empty$ 'skip$
880 | {
881 | swap$ duplicate$ empty$
882 | { "there's a number but no volume in " cite$ * warning$ }
883 | 'skip$
884 | if$
885 | swap$
886 | "(" swap$ * ")" *
887 | }
888 | if$ *
889 | eid empty$
890 | { format.journal.pages }
891 | { format.journal.eid }
892 | if$
893 | }
894 |
895 | FUNCTION {format.chapter.pages}
896 | { chapter empty$
897 | 'format.pages
898 | { type empty$
899 | { bbl.chapter }
900 | { type "l" change.case$
901 | "type" bibinfo.check
902 | }
903 | if$
904 | chapter tie.or.space.prefix
905 | "chapter" bibinfo.check
906 | * *
907 | pages empty$
908 | 'skip$
909 | { ", " * format.pages * }
910 | if$
911 | }
912 | if$
913 | }
914 |
915 | FUNCTION {format.booktitle}
916 | {
917 | booktitle "booktitle" bibinfo.check
918 | emphasize
919 | }
920 | FUNCTION {format.in.ed.booktitle}
921 | { format.booktitle duplicate$ empty$ 'skip$
922 | {
923 | editor "editor" format.names.ed duplicate$ empty$ 'pop$
924 | {
925 | " " *
926 | get.bbl.editor
927 | "(" swap$ * "), " *
928 | * swap$
929 | * }
930 | if$
931 | word.in swap$ *
932 | }
933 | if$
934 | }
935 | FUNCTION {format.thesis.type}
936 | { type duplicate$ empty$
937 | 'pop$
938 | { swap$ pop$
939 | "t" change.case$ "type" bibinfo.check
940 | }
941 | if$
942 | }
943 | FUNCTION {format.tr.number}
944 | { number "number" bibinfo.check
945 | type duplicate$ empty$
946 | { pop$ bbl.techrep }
947 | 'skip$
948 | if$
949 | "type" bibinfo.check
950 | swap$ duplicate$ empty$
951 | { pop$ "t" change.case$ }
952 | { tie.or.space.prefix * * }
953 | if$
954 | }
955 | FUNCTION {format.article.crossref}
956 | {
957 | word.in
958 | " \cite{" * crossref * "}" *
959 | }
960 | FUNCTION {format.book.crossref}
961 | { volume duplicate$ empty$
962 | { "empty volume in " cite$ * "'s crossref of " * crossref * warning$
963 | pop$ word.in
964 | }
965 | { bbl.volume
966 | capitalize
967 | swap$ tie.or.space.prefix "volume" bibinfo.check * * bbl.of space.word *
968 | }
969 | if$
970 | " \cite{" * crossref * "}" *
971 | }
972 | FUNCTION {format.incoll.inproc.crossref}
973 | {
974 | word.in
975 | " \cite{" * crossref * "}" *
976 | }
977 | FUNCTION {format.org.or.pub}
978 | { 't :=
979 | ""
980 | address empty$ t empty$ and
981 | 'skip$
982 | {
983 | t empty$
984 | { address "address" bibinfo.check *
985 | }
986 | { t *
987 | address empty$
988 | 'skip$
989 | { ", " * address "address" bibinfo.check * }
990 | if$
991 | }
992 | if$
993 | }
994 | if$
995 | }
996 | FUNCTION {format.publisher.address}
997 | { publisher "publisher" bibinfo.warn format.org.or.pub
998 | }
999 |
1000 | FUNCTION {format.organization.address}
1001 | { organization "organization" bibinfo.check format.org.or.pub
1002 | }
1003 |
1004 | FUNCTION {article}
1005 | { output.bibitem
1006 | format.authors "author" output.check
1007 | author format.key output
1008 | format.date "year" output.check
1009 | date.block
1010 | format.title "title" output.check
1011 | new.block
1012 | crossref missing$
1013 | {
1014 | journal
1015 | "journal" bibinfo.check
1016 | emphasize
1017 | "journal" output.check
1018 | format.vol.num.pages output
1019 | }
1020 | { format.article.crossref output.nonnull
1021 | format.pages output
1022 | }
1023 | if$
1024 | format.issn output
1025 | format.doi output
1026 | new.block
1027 | format.note output
1028 | format.eprint output
1029 | format.url output
1030 | fin.entry
1031 | }
1032 | FUNCTION {book}
1033 | { output.bibitem
1034 | author empty$
1035 | { format.editors "author and editor" output.check
1036 | editor format.key output
1037 | }
1038 | { format.authors output.nonnull
1039 | crossref missing$
1040 | { "author and editor" editor either.or.check }
1041 | 'skip$
1042 | if$
1043 | }
1044 | if$
1045 | format.date "year" output.check
1046 | date.block
1047 | format.btitle "title" output.check
1048 | crossref missing$
1049 | { format.bvolume output
1050 | new.block
1051 | format.number.series output
1052 | format.edition output
1053 | new.sentence
1054 | format.publisher.address output
1055 | }
1056 | {
1057 | new.block
1058 | format.book.crossref output.nonnull
1059 | }
1060 | if$
1061 | format.isbn output
1062 | format.doi output
1063 | new.block
1064 | format.note output
1065 | format.eprint output
1066 | format.url output
1067 | fin.entry
1068 | }
1069 | FUNCTION {booklet}
1070 | { output.bibitem
1071 | format.authors output
1072 | author format.key output
1073 | format.date "year" output.check
1074 | date.block
1075 | format.title "title" output.check
1076 | new.block
1077 | howpublished "howpublished" bibinfo.check output
1078 | address "address" bibinfo.check output
1079 | format.isbn output
1080 | format.doi output
1081 | new.block
1082 | format.note output
1083 | format.eprint output
1084 | format.url output
1085 | fin.entry
1086 | }
1087 |
1088 | FUNCTION {inbook}
1089 | { output.bibitem
1090 | author empty$
1091 | { format.editors "author and editor" output.check
1092 | editor format.key output
1093 | }
1094 | { format.authors output.nonnull
1095 | crossref missing$
1096 | { "author and editor" editor either.or.check }
1097 | 'skip$
1098 | if$
1099 | }
1100 | if$
1101 | format.date "year" output.check
1102 | date.block
1103 | format.btitle "title" output.check
1104 | crossref missing$
1105 | {
1106 | format.bvolume output
1107 | format.chapter.pages "chapter and pages" output.check
1108 | new.block
1109 | format.number.series output
1110 | format.edition output
1111 | new.sentence
1112 | format.publisher.address output
1113 | }
1114 | {
1115 | format.chapter.pages "chapter and pages" output.check
1116 | new.block
1117 | format.book.crossref output.nonnull
1118 | }
1119 | if$
1120 | crossref missing$
1121 | { format.isbn output }
1122 | 'skip$
1123 | if$
1124 | format.doi output
1125 | new.block
1126 | format.note output
1127 | format.eprint output
1128 | format.url output
1129 | fin.entry
1130 | }
1131 |
1132 | FUNCTION {incollection}
1133 | { output.bibitem
1134 | format.authors "author" output.check
1135 | author format.key output
1136 | format.date "year" output.check
1137 | date.block
1138 | format.title "title" output.check
1139 | new.block
1140 | crossref missing$
1141 | { format.in.ed.booktitle "booktitle" output.check
1142 | format.bvolume output
1143 | format.number.series output
1144 | format.edition output
1145 | format.chapter.pages output
1146 | new.sentence
1147 | format.publisher.address output
1148 | format.isbn output
1149 | }
1150 | { format.incoll.inproc.crossref output.nonnull
1151 | format.chapter.pages output
1152 | }
1153 | if$
1154 | format.doi output
1155 | new.block
1156 | format.note output
1157 | format.eprint output
1158 | format.url output
1159 | fin.entry
1160 | }
1161 | FUNCTION {inproceedings}
1162 | { output.bibitem
1163 | format.authors "author" output.check
1164 | author format.key output
1165 | format.date "year" output.check
1166 | date.block
1167 | format.title "title" output.check
1168 | new.block
1169 | crossref missing$
1170 | { format.in.ed.booktitle "booktitle" output.check
1171 | format.bvolume output
1172 | format.number.series output
1173 | format.pages output
1174 | new.sentence
1175 | publisher empty$
1176 | { format.organization.address output }
1177 | { organization "organization" bibinfo.check output
1178 | format.publisher.address output
1179 | }
1180 | if$
1181 | format.isbn output
1182 | format.issn output
1183 | }
1184 | { format.incoll.inproc.crossref output.nonnull
1185 | format.pages output
1186 | }
1187 | if$
1188 | format.doi output
1189 | new.block
1190 | format.note output
1191 | format.eprint output
1192 | format.url output
1193 | fin.entry
1194 | }
1195 | FUNCTION {conference} { inproceedings }
1196 | FUNCTION {manual}
1197 | { output.bibitem
1198 | format.authors output
1199 | author format.key output
1200 | format.date "year" output.check
1201 | date.block
1202 | format.btitle "title" output.check
1203 | organization address new.block.checkb
1204 | organization "organization" bibinfo.check output
1205 | address "address" bibinfo.check output
1206 | format.edition output
1207 | format.doi output
1208 | new.block
1209 | format.note output
1210 | format.eprint output
1211 | format.url output
1212 | fin.entry
1213 | }
1214 |
1215 | FUNCTION {mastersthesis}
1216 | { output.bibitem
1217 | format.authors "author" output.check
1218 | author format.key output
1219 | format.date "year" output.check
1220 | date.block
1221 | format.btitle
1222 | "title" output.check
1223 | new.block
1224 | bbl.mthesis format.thesis.type output.nonnull
1225 | school "school" bibinfo.warn output
1226 | address "address" bibinfo.check output
1227 | format.doi output
1228 | new.block
1229 | format.note output
1230 | format.eprint output
1231 | format.url output
1232 | fin.entry
1233 | }
1234 |
1235 | FUNCTION {misc}
1236 | { output.bibitem
1237 | format.authors output
1238 | author format.key output
1239 | format.date "year" output.check
1240 | date.block
1241 | format.title output
1242 | new.block
1243 | howpublished "howpublished" bibinfo.check output
1244 | format.doi output
1245 | new.block
1246 | format.note output
1247 | format.eprint output
1248 | format.url output
1249 | fin.entry
1250 | }
1251 | FUNCTION {phdthesis}
1252 | { output.bibitem
1253 | format.authors "author" output.check
1254 | author format.key output
1255 | format.date "year" output.check
1256 | date.block
1257 | format.btitle
1258 | "title" output.check
1259 | new.block
1260 | bbl.phdthesis format.thesis.type output.nonnull
1261 | school "school" bibinfo.warn output
1262 | address "address" bibinfo.check output
1263 | format.doi output
1264 | new.block
1265 | format.note output
1266 | format.eprint output
1267 | format.url output
1268 | fin.entry
1269 | }
1270 |
1271 | FUNCTION {proceedings}
1272 | { output.bibitem
1273 | format.editors output
1274 | editor format.key output
1275 | format.date "year" output.check
1276 | date.block
1277 | format.btitle "title" output.check
1278 | format.bvolume output
1279 | format.number.series output
1280 | new.sentence
1281 | publisher empty$
1282 | { format.organization.address output }
1283 | { organization "organization" bibinfo.check output
1284 | format.publisher.address output
1285 | }
1286 | if$
1287 | format.isbn output
1288 | format.issn output
1289 | format.doi output
1290 | new.block
1291 | format.note output
1292 | format.eprint output
1293 | format.url output
1294 | fin.entry
1295 | }
1296 |
1297 | FUNCTION {techreport}
1298 | { output.bibitem
1299 | format.authors "author" output.check
1300 | author format.key output
1301 | format.date "year" output.check
1302 | date.block
1303 | format.title
1304 | "title" output.check
1305 | new.block
1306 | format.tr.number emphasize output.nonnull
1307 | institution "institution" bibinfo.warn output
1308 | address "address" bibinfo.check output
1309 | format.doi output
1310 | new.block
1311 | format.note output
1312 | format.eprint output
1313 | format.url output
1314 | fin.entry
1315 | }
1316 |
1317 | FUNCTION {unpublished}
1318 | { output.bibitem
1319 | format.authors "author" output.check
1320 | author format.key output
1321 | format.date "year" output.check
1322 | date.block
1323 | format.title "title" output.check
1324 | format.doi output
1325 | new.block
1326 | format.note "note" output.check
1327 | format.eprint output
1328 | format.url output
1329 | fin.entry
1330 | }
1331 |
1332 | FUNCTION {default.type} { misc }
1333 | READ
1334 | FUNCTION {sortify}
1335 | { purify$
1336 | "l" change.case$
1337 | }
1338 | INTEGERS { len }
1339 | FUNCTION {chop.word}
1340 | { 's :=
1341 | 'len :=
1342 | s #1 len substring$ =
1343 | { s len #1 + global.max$ substring$ }
1344 | 's
1345 | if$
1346 | }
1347 | FUNCTION {format.lab.names}
1348 | { 's :=
1349 | "" 't :=
1350 | s #1 "{vv~}{ll}" format.name$
1351 | s num.names$ duplicate$
1352 | #2 >
1353 | { pop$
1354 | " " * bbl.etal emphasize *
1355 | }
1356 | { #2 <
1357 | 'skip$
1358 | { s #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" =
1359 | {
1360 | " " * bbl.etal emphasize *
1361 | }
1362 | { bbl.and space.word * s #2 "{vv~}{ll}" format.name$
1363 | * }
1364 | if$
1365 | }
1366 | if$
1367 | }
1368 | if$
1369 | }
1370 |
1371 | FUNCTION {author.key.label}
1372 | { author empty$
1373 | { key empty$
1374 | { cite$ #1 #3 substring$ }
1375 | 'key
1376 | if$
1377 | }
1378 | { author format.lab.names }
1379 | if$
1380 | }
1381 |
1382 | FUNCTION {author.editor.key.label}
1383 | { author empty$
1384 | { editor empty$
1385 | { key empty$
1386 | { cite$ #1 #3 substring$ }
1387 | 'key
1388 | if$
1389 | }
1390 | { editor format.lab.names }
1391 | if$
1392 | }
1393 | { author format.lab.names }
1394 | if$
1395 | }
1396 |
1397 | FUNCTION {editor.key.label}
1398 | { editor empty$
1399 | { key empty$
1400 | { cite$ #1 #3 substring$ }
1401 | 'key
1402 | if$
1403 | }
1404 | { editor format.lab.names }
1405 | if$
1406 | }
1407 |
1408 | FUNCTION {calc.short.authors}
1409 | { type$ "book" =
1410 | type$ "inbook" =
1411 | or
1412 | 'author.editor.key.label
1413 | { type$ "proceedings" =
1414 | 'editor.key.label
1415 | 'author.key.label
1416 | if$
1417 | }
1418 | if$
1419 | 'short.list :=
1420 | }
1421 |
1422 | FUNCTION {calc.label}
1423 | { calc.short.authors
1424 | short.list
1425 | "("
1426 | *
1427 | year duplicate$ empty$
1428 | short.list key field.or.null = or
1429 | { pop$ "" }
1430 | 'skip$
1431 | if$
1432 | *
1433 | 'label :=
1434 | }
1435 |
1436 | FUNCTION {sort.format.names}
1437 | { 's :=
1438 | #1 'nameptr :=
1439 | ""
1440 | s num.names$ 'numnames :=
1441 | numnames 'namesleft :=
1442 | { namesleft #0 > }
1443 | { s nameptr
1444 | "{vv{ } }{ll{ }}{ f{ }}{ jj{ }}"
1445 | format.name$ 't :=
1446 | nameptr #1 >
1447 | {
1448 | " " *
1449 | namesleft #1 = t "others" = and
1450 | { "zzzzz" * }
1451 | { t sortify * }
1452 | if$
1453 | }
1454 | { t sortify * }
1455 | if$
1456 | nameptr #1 + 'nameptr :=
1457 | namesleft #1 - 'namesleft :=
1458 | }
1459 | while$
1460 | }
1461 |
1462 | FUNCTION {sort.format.title}
1463 | { 't :=
1464 | "A " #2
1465 | "An " #3
1466 | "The " #4 t chop.word
1467 | chop.word
1468 | chop.word
1469 | sortify
1470 | #1 global.max$ substring$
1471 | }
1472 | FUNCTION {author.sort}
1473 | { author empty$
1474 | { key empty$
1475 | { "to sort, need author or key in " cite$ * warning$
1476 | ""
1477 | }
1478 | { key sortify }
1479 | if$
1480 | }
1481 | { author sort.format.names }
1482 | if$
1483 | }
1484 | FUNCTION {author.editor.sort}
1485 | { author empty$
1486 | { editor empty$
1487 | { key empty$
1488 | { "to sort, need author, editor, or key in " cite$ * warning$
1489 | ""
1490 | }
1491 | { key sortify }
1492 | if$
1493 | }
1494 | { editor sort.format.names }
1495 | if$
1496 | }
1497 | { author sort.format.names }
1498 | if$
1499 | }
1500 | FUNCTION {editor.sort}
1501 | { editor empty$
1502 | { key empty$
1503 | { "to sort, need editor or key in " cite$ * warning$
1504 | ""
1505 | }
1506 | { key sortify }
1507 | if$
1508 | }
1509 | { editor sort.format.names }
1510 | if$
1511 | }
1512 | FUNCTION {presort}
1513 | { calc.label
1514 | label sortify
1515 | " "
1516 | *
1517 | type$ "book" =
1518 | type$ "inbook" =
1519 | or
1520 | 'author.editor.sort
1521 | { type$ "proceedings" =
1522 | 'editor.sort
1523 | 'author.sort
1524 | if$
1525 | }
1526 | if$
1527 | #1 entry.max$ substring$
1528 | 'sort.label :=
1529 | sort.label
1530 | *
1531 | " "
1532 | *
1533 | title field.or.null
1534 | sort.format.title
1535 | *
1536 | #1 entry.max$ substring$
1537 | 'sort.key$ :=
1538 | }
1539 |
1540 | ITERATE {presort}
1541 | SORT
1542 | STRINGS { last.label next.extra }
1543 | INTEGERS { last.extra.num number.label }
1544 | FUNCTION {initialize.extra.label.stuff}
1545 | { #0 int.to.chr$ 'last.label :=
1546 | "" 'next.extra :=
1547 | #0 'last.extra.num :=
1548 | #0 'number.label :=
1549 | }
1550 | FUNCTION {forward.pass}
1551 | { last.label label =
1552 | { last.extra.num #1 + 'last.extra.num :=
1553 | last.extra.num int.to.chr$ 'extra.label :=
1554 | }
1555 | { "a" chr.to.int$ 'last.extra.num :=
1556 | "" 'extra.label :=
1557 | label 'last.label :=
1558 | }
1559 | if$
1560 | number.label #1 + 'number.label :=
1561 | }
1562 | FUNCTION {reverse.pass}
1563 | { next.extra "b" =
1564 | { "a" 'extra.label := }
1565 | 'skip$
1566 | if$
1567 | extra.label 'next.extra :=
1568 | extra.label
1569 | duplicate$ empty$
1570 | 'skip$
1571 | { "{\natexlab{" swap$ * "}}" * }
1572 | if$
1573 | 'extra.label :=
1574 | label extra.label * 'label :=
1575 | }
1576 | EXECUTE {initialize.extra.label.stuff}
1577 | ITERATE {forward.pass}
1578 | REVERSE {reverse.pass}
1579 | FUNCTION {bib.sort.order}
1580 | { sort.label
1581 | " "
1582 | *
1583 | year field.or.null sortify
1584 | *
1585 | " "
1586 | *
1587 | title field.or.null
1588 | sort.format.title
1589 | *
1590 | #1 entry.max$ substring$
1591 | 'sort.key$ :=
1592 | }
1593 | ITERATE {bib.sort.order}
1594 | SORT
1595 | FUNCTION {begin.bib}
1596 | { preamble$ empty$
1597 | 'skip$
1598 | { preamble$ write$ newline$ }
1599 | if$
1600 | "\begin{thebibliography}{" number.label int.to.str$ * "}" *
1601 | write$ newline$
1602 | "\newcommand{\enquote}[1]{``#1''}"
1603 | write$ newline$
1604 | "\providecommand{\natexlab}[1]{#1}"
1605 | write$ newline$
1606 | "\providecommand{\url}[1]{\texttt{#1}}"
1607 | write$ newline$
1608 | "\providecommand{\urlprefix}{URL }"
1609 | write$ newline$
1610 | "\expandafter\ifx\csname urlstyle\endcsname\relax"
1611 | write$ newline$
1612 | " \providecommand{\doi}[1]{doi:\discretionary{}{}{}#1}\else"
1613 | write$ newline$
1614 | " \providecommand{\doi}{doi:\discretionary{}{}{}\begingroup \urlstyle{rm}\Url}\fi"
1615 | write$ newline$
1616 | "\providecommand{\eprint}[2][]{\url{#2}}"
1617 | write$ newline$
1618 | }
1619 | EXECUTE {begin.bib}
1620 | EXECUTE {init.state.consts}
1621 | ITERATE {call.type$}
1622 | FUNCTION {end.bib}
1623 | { newline$
1624 | "\end{thebibliography}" write$ newline$
1625 | }
1626 | EXECUTE {end.bib}
1627 | %% End of customized bst file
1628 | %%
1629 | %% End of file `jss.bst'.
1630 |
--------------------------------------------------------------------------------
/doc/jsslogo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rdenham/pymcmc/d9563fdf9e4adb747e5e1a1cde424975b8673b9d/doc/jsslogo.jpg
--------------------------------------------------------------------------------
/doc/mpdreg.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rdenham/pymcmc/d9563fdf9e4adb747e5e1a1cde424975b8673b9d/doc/mpdreg.pdf
--------------------------------------------------------------------------------
/doc/rho.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rdenham/pymcmc/d9563fdf9e4adb747e5e1a1cde424975b8673b9d/doc/rho.pdf
--------------------------------------------------------------------------------
/examples/ex_AR1.py:
--------------------------------------------------------------------------------
1 | # example linear regression model with first order autocorrelation in the errors
2 |
3 | from numpy import random, ones, zeros, dot, hstack, eye, log
4 | from scipy import sparse
5 | from pysparse import spmatrix
6 | from pymcmc.mcmc import MCMC, SliceSampler, RWMH, OBMC, MH, CFsampler
7 | from pymcmc.regtools import BayesRegression
8 |
9 | def simdata(nobs, kreg):
10 | """function simulates data from a first order autoregressive regression"""
11 | xmat = hstack((ones((nobs, 1)), random.randn(nobs, kreg - 1)))
12 | beta = random.randn(kreg)
13 | sig = 0.2
14 | rho = 0.90
15 | yvec = zeros(nobs)
16 | eps = zeros(nobs)
17 | eps[0] = sig**2/(1.-rho**2)
18 | for i in xrange(nobs - 1):
19 | eps[i + 1] = rho * eps[i] + sig * random.randn(1)
20 | yvec = dot(xmat, beta) + eps
21 | return yvec, xmat
22 |
23 | def calcweighted(store):
24 | """re - weights yvec and xmat, for use in weighted least squares regression"""
25 | nobs = store['yvec'].shape[0]
26 | store['Upper'].put(-store['rho'], range(0, nobs - 1), range(1, nobs))
27 | store['Upper'].matvec(store['yvec'], store['yvectil'])
28 | for i in xrange(store['xmat'].shape[1]):
29 | store['Upper'].matvec(store['xmat'][:, i], store['xmattil'][:, i])
30 |
31 | def WLS(store):
32 | """computes weighted least square regression"""
33 | calcweighted(store)
34 | store['regsampler'].update_yvec(store['yvectil'])
35 | store['regsampler'].update_xmat(store['xmattil'])
36 | return store['regsampler'].sample()
37 |
38 | def loglike(store):
39 | """calculates log - likelihood for the the first order autoregressive regression model"""
40 | nobs = store['yvec'].shape[0]
41 | calcweighted(store)
42 | store['regsampler'].update_yvec(store['yvectil'])
43 | store['regsampler'].update_xmat(store['xmattil'])
44 | return store['regsampler'].loglike(store['sigma'], store['beta'])
45 |
46 | def prior_rho(store):
47 | """evaulates the log of the prior distribution for rho. the beta distribution is used"""
48 | if store['rho'] > 0. and store['rho'] < 1.0:
49 | alpha = 1.0
50 | beta = 1.0
51 | return (alpha - 1.) * log(store['rho']) + (beta - 1.) * log(1.-store['rho'])
52 | else:
53 | return -1E256
54 |
55 | def post_rho(store):
56 | """evaulates the log of the posterior distrbution for rho"""
57 | return loglike(store) + prior_rho(store)
58 |
59 |
60 | # testfunctions used to test generic MH algorithm
61 | def gencand(store):
62 | return store['rho'] + 0.02 * random.randn(1)[0]
63 |
64 | def probcandgprev(store):
65 | res = store['rho'] - store['previous_rho']
66 | return -0.5/(0.02**2) * res**2
67 |
68 | def probprevgcand(store):
69 | return probcandgprev(store)
70 |
71 | # Main program
72 | random.seed(12345)
73 | nobs = 1000
74 | kreg = 3
75 |
76 | yvec, xmat = simdata(nobs, kreg)
77 |
78 | # we use a g - prior for the regression coefficients.
79 | priorreg = ('g_prior', zeros(kreg), 1000.0)
80 | regs = BayesRegression(yvec, xmat, prior = priorreg)
81 |
82 | """A dictionary is set up. The contents of the dictionary will be
83 | available for use for by the functions that make up the MCMC sampler.
84 | Note that we pass in storage space as well as the class intance used
85 | to sample the regression from."""
86 | data ={'yvec':yvec, 'xmat':xmat, 'regsampler':regs}
87 | U = spmatrix.ll_mat(nobs, nobs, 2 * nobs - 1)
88 | U.put(1.0, range(0, nobs), range(0, nobs))
89 | data['yvectil'] = zeros(nobs)
90 | data['xmattil'] = zeros((nobs, kreg))
91 | data['Upper'] = U
92 |
93 | # Use Bayesian regression to initialise MCMC sampler
94 | bayesreg = BayesRegression(yvec, xmat)
95 | sig, beta = bayesreg.posterior_mean()
96 |
97 | simsigbeta = CFsampler(WLS, [sig, beta], ['sigma', 'beta'])
98 | scale = 0.002 # tuning parameter for RWMH
99 | rho = 0.9
100 | ##rho = [1] ## to test exception handling
101 | # simrho = RWMH(post_rho, scale, rho, 'rho')
102 | simrho = SliceSampler([post_rho], 0.1, 5, rho, 'rho')
103 | #simrho = OBMC(post_rho, 3, scale, rho, 'rho')
104 | # simrho = MH(gencand, post_rho, probcandgprev, probprevgcand, rho, 'rho')
105 | blocks = [simrho, simsigbeta]
106 | loglikeinfo = (loglike, kreg + 2, 'yvec')
107 | ms = MCMC(10000, 2000, data, blocks, loglike = loglikeinfo)
108 | ms.sampler()
109 | ms.output()
110 | #ms.plot('sigbeta')
111 | ms.plot('rho', filename ='rho')
112 | ms.CODAoutput(parameters = ['rho'])
113 |
114 |
115 |
--------------------------------------------------------------------------------
/examples/ex_loglinear.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # bayesian MCMC estimation of the log - linear model
3 |
4 | import os
5 | from numpy import random, loadtxt, hstack, ones, dot, exp, zeros, outer, diag
6 | from numpy import linalg
7 | from pymcmc.mcmc import MCMC, RWMH, OBMC
8 | from pymcmc.regtools import BayesRegression
9 | from scipy.optimize.minpack import leastsq
10 | import pymcmc
11 |
12 | """ Get the path for the data. If this was installed using setup.py
13 | it will be in the data directory of the module"""
14 | datadir = os.path.join(os.path.dirname(pymcmc.__file__), 'data')
15 |
16 | def minfunc(beta, yvec, xmat ):
17 | """function used by nonlinear least squares routine"""
18 | return yvec - exp(dot(xmat, beta))
19 |
20 | def prior(store):
21 | """function evaluates the prior pdf for beta"""
22 | mu = zeros(store['beta'].shape[0])
23 | Prec = diag(0.005 * ones(store['beta'].shape[0]))
24 | return -0.5 * dot(store['beta'].transpose(), dot(Prec, store['beta']))
25 |
26 | def logl(store):
27 | """function evaluates the log - likelihood for the log - linear model"""
28 | xbeta = dot(store['xmat'], store['beta'])
29 | lamb = exp(xbeta)
30 | return sum(store['yvec'] * xbeta - lamb)
31 |
32 | def posterior(store):
33 | """function evaluates the posterior probability for the log - linear model"""
34 | return logl(store) + prior(store)
35 |
36 | def llhessian(store, beta):
37 | """function returns the hessian for the log - linear model"""
38 | nobs = store['yvec'].shape[0]
39 | kreg = store['xmat'].shape[1]
40 | lamb = exp(dot(store['xmat'], beta))
41 | sum = zeros((kreg, kreg))
42 | for i in xrange(nobs):
43 | sum = sum + lamb[i] * outer(store['xmat'][i], store['xmat'][i])
44 | return -sum
45 |
46 | # main program
47 | random.seed(12345) # seed or the random number generator
48 |
49 | data = loadtxt(os.path.join(datadir,'count.txt'), skiprows = 1) # loads data from file
50 | yvec = data[:, 0]
51 | xmat = data[:, 1:data.shape[1]]
52 | xmat = hstack([ones((data.shape[0], 1)), xmat])
53 |
54 | data ={'yvec':yvec, 'xmat':xmat}
55 | bayesreg = BayesRegression(yvec, xmat) # use bayesian regression to initialise
56 | # nonlinear least squares algorithm
57 | sig, beta0 = bayesreg.posterior_mean()
58 | init_beta, info = leastsq(minfunc, beta0, args = (yvec, xmat))
59 | data['betaprec'] =-llhessian(data, init_beta)
60 | scale = linalg.inv(data['betaprec'])
61 |
62 | samplebeta = RWMH(posterior, scale, init_beta, 'beta')
63 | ms = MCMC(20000, 4000, data, [samplebeta], loglike = (logl, xmat.shape[1], 'yvec'))
64 | ms.sampler()
65 | ms.output(filename='example1c.out')
66 | ms.plot('beta', filename='ex_loglinear.pdf')
67 | # ms.CODAoutput('beta')
68 | # ms.plot('beta', elements = [0], plottypes ="trace", filename ="xx.pdf")
69 | # ms.plot('beta', elements = [0], plottypes ="density", filename ="xx.png")
70 | ## ms.plot('beta', elements = [0], plottypes ="acf", filename ="yy.ps")
71 |
--------------------------------------------------------------------------------
/examples/ex_variable_selection.py:
--------------------------------------------------------------------------------
1 | # example code for variable selection in regression
2 |
3 | import os
4 | from numpy import loadtxt, hstack, ones, random, zeros, asfortranarray, log
5 | from pymcmc.mcmc import MCMC, CFsampler
6 | from pymcmc.regtools import StochasticSearch, BayesRegression
7 | import pymcmc
8 |
9 | """ get the path for the data. If this was installed using setup.py
10 | it will be in the data directory of the module"""
11 | datadir = os.path.join(os.path.dirname(pymcmc.__file__),'data')
12 |
13 | def samplegamma(store):
14 | """function that samples vector of indicators"""
15 | return store['SS'].sample_gamma(store)
16 |
17 | # main program
18 | random.seed(12346)
19 |
20 | # loads data
21 | data = loadtxt(os.path.join(datadir,'yld2.txt'))
22 | yvec = data[:, 0]
23 | xmat = data[:, 1:20]
24 | xmat = hstack([ones((xmat.shape[0], 1)), xmat])
25 |
26 | """data is a dictionary whose elements are accessible from the functions
27 | in the MCMC sampler"""
28 | data ={'yvec':yvec, 'xmat':xmat}
29 | prior = ['g_prior',zeros(xmat.shape[1]), 100.]
30 | SSVS = StochasticSearch(yvec, xmat, prior);
31 | data['SS'] = SSVS
32 |
33 | """initialise gamma"""
34 | initgamma = zeros(xmat.shape[1], dtype ='i')
35 | initgamma[0] = 1
36 | simgam = CFsampler(samplegamma, initgamma, 'gamma', store ='none')
37 |
38 | # initialise class for MCMC samper
39 | ms = MCMC(20000, 5000, data, [simgam])
40 | ms.sampler()
41 | ms.output(filename ='vs.txt')
42 | ms.output(custom = SSVS.output, filename = 'SSVS.out')
43 | ms.output(custom = SSVS.output)
44 |
45 | txmat = SSVS.extract_regressors(0)
46 | g_prior = ['g_prior', 0.0, 100.]
47 | breg = BayesRegression(yvec,txmat,prior = g_prior)
48 | breg.output(filename = 'SSVS1.out')
49 | breg.plot()
50 |
51 |
--------------------------------------------------------------------------------
/examples/loglinear.f:
--------------------------------------------------------------------------------
1 | c fortran 77 code used to calculate the likelihood of a log
2 | c linear model. Function uses BLAS.
3 |
4 | subroutine logl(xb,xm,bv,yv,llike,n,k)
5 | implicit none
6 | integer n, k, i
7 | real*8 xb(n),xm(n,k), bv(k), yv(n), llike
8 | real*8 alpha, beta
9 |
10 | cf2py intent(in,out) llike
11 | cf2py intent(in) yv
12 | cf2py intent(ini bv
13 | cf2py intent(in) xmat
14 | cf2py intent(in) xb
15 |
16 | alpha=1.0
17 | beta=0.0
18 | call dgemv('n',n,k,alpha,xm,n,bv,1,beta,xb,1)
19 |
20 | llike=0.0
21 | do i=1,n
22 | llike=llike+yv(i)*xb(i)-exp(xb(i))
23 | enddo
24 | end
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/examples/matplotlibrc:
--------------------------------------------------------------------------------
1 | interactive : True
2 | ##backend : pdf
3 | ##backend : Agg
4 | ##backend : CocoaAgg
5 | ##backend : GD
6 | ##backend : Paint
7 | backend : Qt4Agg
8 | ##backend : TkAgg
9 | ##backend : WxAgg
10 | figure.figsize : 14, 5 # figure size in inches
11 |
12 | ##font.size : 6.0
13 | font.size : 10.0
14 |
15 | figure.subplot.wspace : 0.2
16 | figure.subplot.hspace : 0.5
17 |
18 |
--------------------------------------------------------------------------------
/examples/using_pymcmc_efficiently.py:
--------------------------------------------------------------------------------
1 | ## Using PyMCMC efficiently
2 |
3 | ## we use the same program as for example2
4 | ## but replace logl function:
5 | import os
6 | from numpy import random, loadtxt, hstack
7 | from numpy import ones, dot, exp, zeros, outer, diag
8 | from numpy import linalg, asfortranarray
9 | from pymcmc.mcmc import MCMC, RWMH, OBMC
10 | from pymcmc.regtools import BayesRegression
11 | from scipy.optimize.minpack import leastsq
12 | from scipy import weave
13 | from scipy.weave import converters
14 | import loglinear
15 | import pymcmc
16 |
17 | datadir = os.path.join(os.path.dirname(pymcmc.__file__),'data')
18 |
19 | def minfunc(beta, yvec, xmat ):
20 | """function used by nonlinear least squares routine"""
21 | return yvec - exp(dot(xmat, beta))
22 |
23 | def prior(store):
24 | """function evaluates the prior pdf for beta"""
25 | mu = zeros(store['beta'].shape[0])
26 | Prec = diag(0.005 * ones(store['beta'].shape[0]))
27 | return -0.5 * dot(store['beta'].transpose(), dot(Prec, store['beta']))
28 |
29 |
30 | def posterior(store):
31 | """
32 | function evaluates the posterior probability
33 | for the log - linear model
34 | """
35 | return logl(store) + prior(store)
36 |
37 | def llhessian(store, beta):
38 | """function returns the hessian for the log - linear model"""
39 | nobs = store['yvec'].shape[0]
40 | kreg = store['xmat'].shape[1]
41 | lamb = exp(dot(store['xmat'], beta))
42 | sum = zeros((kreg, kreg))
43 | for i in xrange(nobs):
44 | sum = sum + lamb[i] * outer(store['xmat'][i], store['xmat'][i])
45 | return -sum
46 |
47 | ## Here we demonstrate four different versions of the
48 | ## loglikelihood function.
49 |
50 | # Numpy
51 | def loglnumpy(store):
52 | """function evaluates the log - likelihood for the log - linear model"""
53 | xbeta = dot(store['xmat'], store['beta'])
54 | lamb = exp(xbeta)
55 | return sum(store['yvec'] * xbeta - lamb)
56 |
57 | # Loop
58 | def loglloop(store):
59 | """function evaluates the log - likelihood for the log - linear model"""
60 | suml=0.0
61 | for i in xrange(store['yvec'].shape[0]):
62 | xbeta=dot(store['xmat'][i,:],store['beta'])
63 | suml=suml+store['yvec'][i] * xbeta - exp(xbeta)
64 | return suml
65 |
66 | # weave
67 | def loglweave(store):
68 | """function evaluates the log - likelihood for the log - linear model"""
69 | code = """
70 | double sum = 0.0, xbeta;
71 | for(int i=0; i