├── .gitignore ├── DPMM_Gibbs ├── 1.png ├── 10.png ├── 11.png ├── 12.png ├── 13.png ├── 14.png ├── 2.png ├── 3.png ├── 4.png ├── 5.png ├── 6.png ├── 7.png ├── 8.png ├── 9.png ├── main.py ├── original1.0.png └── training_set_1.0.pkl ├── DPMM_MaxFilter ├── .DS_Store ├── 100PTS_putative_result_10particles_30path.pkl copy ├── FIGURES │ ├── .DS_Store │ └── seed174_300pts_putative_10particles │ │ └── .DS_Store ├── SpikingSorting │ ├── .DS_Store │ └── thirdparty │ │ ├── .DS_Store │ │ ├── @dirichlet │ │ ├── char.m │ │ ├── dimension.m │ │ ├── dirichlet.m │ │ ├── display.m │ │ ├── fit.m │ │ ├── get.m │ │ ├── length.m │ │ ├── lnpdf.m │ │ ├── pdf.m │ │ ├── plot.m │ │ ├── sample.m │ │ ├── set.m │ │ ├── subsasgn.m │ │ └── subsref.m │ │ ├── @distribution │ │ ├── distribution.m │ │ ├── lp.m │ │ ├── p.m │ │ └── plot.m │ │ ├── @gaussian │ │ ├── char.m │ │ ├── dimension.m │ │ ├── display.m │ │ ├── fit.m │ │ ├── gaussian.m │ │ ├── get.m │ │ ├── length.m │ │ ├── lp.m │ │ ├── multinomial.m │ │ ├── p.m │ │ ├── plus.m │ │ ├── private │ │ │ └── fvnlp.m │ │ ├── sample.m │ │ ├── set.m │ │ ├── subsasgn.m │ │ └── subsref.m │ │ ├── @gaussian_mixture_model │ │ ├── gaussian_mixture_model.m │ │ ├── plot.m │ │ └── train.m │ │ ├── @multinomial │ │ ├── char.m │ │ ├── display.m │ │ ├── fit.m │ │ ├── get.m │ │ ├── length.m │ │ ├── lp.m │ │ ├── multinomial.m │ │ ├── p.m │ │ ├── private │ │ │ ├── fvnlp.m │ │ │ └── msum.m │ │ ├── sample.m │ │ ├── set.m │ │ ├── size.m │ │ ├── subsasgn.m │ │ └── subsref.m │ │ ├── collapsed_gibbs_sampler.m │ │ ├── example.m │ │ ├── find_optimal_c.m │ │ ├── heldoutLikelihood.m │ │ ├── heldout_helper.m │ │ ├── logsumexp.m │ │ ├── lp_crp.m │ │ ├── lp_mvniw.m │ │ ├── lp_tpp_helper.m │ │ ├── maxfilter_resample.m │ │ ├── maxfilter_resample_nonunique.m │ │ ├── maxfilter_resample_original.m │ │ ├── mccExcludedFiles.log │ │ ├── multinomial_resample.m │ │ ├── particle_filter.m │ │ ├── plot_mixture.m │ │ ├── readme.txt │ │ ├── results │ │ ├── .DS_Store │ │ ├── aug27_maxfilter_100particles.fig │ │ ├── aug27_maxfilter_100particles.mat │ │ ├── aug27_multinomial_100particles.mat │ │ ├── aug27_stratified_100particles.mat │ │ └── useless_maxfilter_5000particles_aug18_13.mat │ │ ├── run_test_compile.sh │ │ ├── run_ucla_exp.m │ │ ├── sample_crp.m │ │ ├── sample_igmm_prior.m │ │ ├── secs2hmsstr.m │ │ ├── spike_plot.m │ │ ├── stratified_resample.m │ │ ├── test_compile.app │ │ └── Contents │ │ │ ├── Info.plist │ │ │ ├── MacOS │ │ │ ├── applauncher │ │ │ ├── prelaunch │ │ │ └── test_compile │ │ │ └── Resources │ │ │ ├── English.lproj │ │ │ ├── About.nib │ │ │ ├── MWOpenAccessoryView.nib │ │ │ ├── MWSaveAccessoryView.nib │ │ │ └── MainMenu.nib │ │ │ └── membrane.icns │ │ ├── test_compile.m │ │ ├── times_CSC4.mat │ │ ├── ucla_big_runner.m │ │ └── untitled.fig ├── dataset.jl ├── deprecated_runner.jl ├── maxfilter.jl ├── picloud_runner.py ├── plotter.py ├── putative_runner.jl ├── results │ └── .DS_Store ├── results_100pts_putative_10particles_200rep_30perdatasetorseed │ └── .DS_Store └── smoothing_runner.jl ├── DPMM_SMC ├── .DS_Store ├── backup │ ├── original.png │ ├── runner_v0.jl │ ├── runner_v1.jl │ ├── runner_v2.jl │ ├── runner_v3.jl │ ├── runner_v4.jl │ ├── runner_v4_one_particle.jl │ └── runner_v5_works_gibbs_only_no_lookahead.jl ├── dataset.jl ├── original.png ├── picloud_runner.py ├── plotter.py ├── result.pkl ├── result_1_1particle_10delta_2path.pdf ├── result_1_1particle_10delta_2path.pkl ├── result_1_1particle_10delta_2path.png ├── result_1particles_20delta_1path.pdf ├── result_1particles_20delta_1path.pkl ├── result_1particles_20delta_1path.png ├── result_1particles_2delta_2path.pdf ├── result_1particles_2delta_2path.pkl ├── result_1particles_2delta_2path.png ├── result_1particles_3delta_1path.pdf ├── result_1particles_3delta_1path.pkl ├── result_1particles_3delta_1path.png ├── result_1particles_5delta_2path.pdf ├── result_1particles_5delta_2path.pkl ├── result_1particles_5delta_2path.png ├── result_50particles_3delta_1path.pdf ├── result_50particles_3delta_1path.pkl ├── result_50particles_3delta_1path.png └── runner.jl ├── DPMM_Variational ├── .DS_Store ├── backup │ ├── .DS_Store │ ├── 150_20_50_result_variational_july_20_2013_particles_delta_path.pkl copy │ └── variational_no_lookahead │ │ ├── dataset.jl │ │ ├── gradient.jl │ │ ├── variational_lookahead.jl │ │ └── variational_runner.jl ├── branching_lookahead.jl ├── branching_runner.jl ├── dataset.jl ├── gradient.jl ├── no_lookahead_runner.jl ├── picloud_runner.py ├── plotter.py ├── variational_lookahead.jl └── variational_runner.jl ├── DPMM_Variational_full_support ├── .DS_Store ├── RESULTS │ ├── .DS_Store │ └── aug12_2013_test1 │ │ └── .DS_Store ├── branching_lookahead.jl ├── branching_runner.jl ├── dataset.jl ├── gradient.jl ├── maxfilter.jl ├── media │ └── .DS_Store ├── no_lookahead_runner.jl ├── picloud_runner.py ├── plotter.py ├── variational_lookahead.jl └── variational_runner.jl ├── IHMM_MaxFilter ├── .DS_Store ├── ComputeInferenceError.jl ├── DidacticExample.jl ├── GenerateData.jl ├── HMMSMC.jl ├── HemmingDistance.py ├── HemmingDistance.pyc ├── RunnerMaxFilterIhmm.jl ├── SMCihmm.jl ├── alice_dataprep.jl ├── alice_in_wonderland.txt └── maxFilterihmm.jl ├── README.md ├── nonlinearGaussianSSM ├── .DS_Store ├── README.md ├── lookaheadParticleHMM.jl ├── lookaheadPartilce.jl ├── picloud_runner_nonlinearModel.py ├── plotter.py ├── pmapLoading.jl └── test.jl ├── random_sum.jl ├── test.jl └── thirdparty ├── .DS_Store └── vdpgm ├── README.txt ├── gamma_multivariate_ln.m ├── generate_data.m ├── log_no_w.m ├── log_sum_exp.m ├── logmvtpdf.m ├── mkopts_avdp.m ├── mkopts_bj.m ├── mkopts_bjrnd.m ├── mkopts_cdp.m ├── mkopts_csb.m ├── mkopts_vb.m ├── mkopts_vdp.m ├── variational_lookahead_proposal.m └── vdpgm.m /.gitignore: -------------------------------------------------------------------------------- 1 | *.pdf 2 | *.png 3 | *.pkl 4 | -------------------------------------------------------------------------------- /DPMM_Gibbs/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/1.png -------------------------------------------------------------------------------- /DPMM_Gibbs/10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/10.png -------------------------------------------------------------------------------- /DPMM_Gibbs/11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/11.png -------------------------------------------------------------------------------- /DPMM_Gibbs/12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/12.png -------------------------------------------------------------------------------- /DPMM_Gibbs/13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/13.png -------------------------------------------------------------------------------- /DPMM_Gibbs/14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/14.png -------------------------------------------------------------------------------- /DPMM_Gibbs/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/2.png -------------------------------------------------------------------------------- /DPMM_Gibbs/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/3.png -------------------------------------------------------------------------------- /DPMM_Gibbs/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/4.png -------------------------------------------------------------------------------- /DPMM_Gibbs/5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/5.png -------------------------------------------------------------------------------- /DPMM_Gibbs/6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/6.png -------------------------------------------------------------------------------- /DPMM_Gibbs/7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/7.png -------------------------------------------------------------------------------- /DPMM_Gibbs/8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/8.png -------------------------------------------------------------------------------- /DPMM_Gibbs/9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/9.png -------------------------------------------------------------------------------- /DPMM_Gibbs/original1.0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Gibbs/original1.0.png -------------------------------------------------------------------------------- /DPMM_MaxFilter/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/FIGURES/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/FIGURES/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/FIGURES/seed174_300pts_putative_10particles/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/FIGURES/seed174_300pts_putative_10particles/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/char.m: -------------------------------------------------------------------------------- 1 | function s = char(g) 2 | % DIRICHLET/CHAR 3 | % CHAR(p) is the string representation 4 | 5 | % Author: Frank Wood fwood@gatsby.ucl.ac.uk 6 | 7 | 8 | s = [ 'DIRICHLET: alpha: ' mat2str(g.alpha) ]; 9 | 10 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/dimension.m: -------------------------------------------------------------------------------- 1 | function d = dimension(g) 2 | % POISSON\DIMENSION dimension of lnp model 3 | 4 | error('Semantics of poisson dimension ambiguous') 5 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/dirichlet.m: -------------------------------------------------------------------------------- 1 | function d = dirichlet(l,k) 2 | %Dirichlet distribution class constructor. 3 | % d = DIRICHLET(alpha,k) creates a dirichlet distribution with a vector of 4 | % parameters alpha if alpha is a vector or a vector of weights equal to alpha/k 5 | % (with length k) if alpha is a scalar 6 | 7 | % Author: Frank Wood fwood@gatsby.ucl.ac.uk 8 | 9 | switch nargin 10 | case 1 11 | if(isa(l,'dirichlet')) 12 | d.alpha = l.alpha; 13 | else 14 | 15 | d.alpha = l; 16 | end 17 | 18 | case 2 19 | d.alpha = ones(1,k)*l/k; 20 | otherwise 21 | error('Incorrect arguments to Dirichlet constructor'); 22 | end 23 | if sum(d.alpha<0) > 0 24 | error('Dirichlet parameters must be positive') 25 | end 26 | bc = distribution(); 27 | d.Z = prod(gamma(d.alpha))/gamma(sum(d.alpha)); 28 | d.logZ = sum(gammaln(d.alpha)) - gammaln(sum(d.alpha)); 29 | d = class(d,'dirichlet',bc); 30 | 31 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/display.m: -------------------------------------------------------------------------------- 1 | function s = display(g) 2 | % DIRICHLET/DISPLAY 3 | % DISPLAY() is the string representation of 4 | 5 | % Author: Frank Wood fwood@gatsby.ucl.ac.uk 6 | 7 | % char(g) 8 | s = char(g); 9 | disp(s) 10 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/fit.m: -------------------------------------------------------------------------------- 1 | function g = fit(d,rate) 2 | % DIRICHLET/FIT Fit DIRICHLET model to that of data. Data expected 3 | % in datum per row format 4 | 5 | % Author: Frank Wood fwood@gatsby.ucl.ac.uk 6 | 7 | 8 | error('Dirichlet distribution fit procedure not implemented') -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/get.m: -------------------------------------------------------------------------------- 1 | function val = get(s,propName) 2 | % DIRICHLET/GET Get property from the specified Dirichlet 3 | % and return the value. Property names are: alpha, and "parameter vector" 4 | 5 | switch propName 6 | case 'parameter vector' 7 | val = s.alpha; 8 | case 'parameter vector upper bounds' 9 | val = ones(size(s.alpha))*Inf; 10 | case 'parameter vector lower bounds' 11 | val = zeros(size(s.alpha)); 12 | case 'alpha' 13 | val = s.alpha; 14 | otherwise 15 | error([propName ,'Is not a valid Dirichlet distribution property']) 16 | end 17 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/length.m: -------------------------------------------------------------------------------- 1 | function l = length(m) 2 | % DIRICHLET/LENGTH returns the number of categories or multinomial states 3 | 4 | l = length(m.alpha); 5 | 6 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/lnpdf.m: -------------------------------------------------------------------------------- 1 | function [ret] = lnpdf(d,x) 2 | % DIRICHLET/LNPDF 3 | % 4 | tol = 1e-20; 5 | if sum(x) > 1+tol 6 | error('Argument must sum to one.') 7 | end 8 | 9 | 10 | ret = -d.logZ+sum(log(x).*(d.alpha-1)); -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/pdf.m: -------------------------------------------------------------------------------- 1 | function [ret] = pdf(d,x) 2 | % DIRICHLET/PDF 3 | % 4 | tol = 1e-10; 5 | if max(sum(x,2)) > 1+tol || min(sum(x,2)) < 1-tol 6 | error('Argument must sum to one.') 7 | end 8 | 9 | 10 | ret = (1/d.Z)*prod(x.^(repmat(d.alpha,size(x,1),1)-1),2); -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/plot.m: -------------------------------------------------------------------------------- 1 | function plot(d) 2 | 3 | if length(d.alpha) > 2 4 | error('plotting not supported for Dirichlet distributions greater than 2D') 5 | end 6 | 7 | X = linspace(0,1,500)'; 8 | Y = 1-X; 9 | 10 | data = [X Y]; 11 | 12 | Z = pdf(d,data); 13 | scatter3(X,Y,Z); -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/sample.m: -------------------------------------------------------------------------------- 1 | function samples = sample(d,cases) 2 | % DIRICHLET/SAMPLE 3 | % samples = p(dist,cases) returns cases number of samples 4 | % from the Dirichlet dist. 5 | if nargin ==1 6 | cases = 1; 7 | end 8 | num = gamrnd(repmat(d.alpha,cases,1),1); 9 | samples = num./repmat(sum(num,2),1,length(d.alpha)); -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/set.m: -------------------------------------------------------------------------------- 1 | function s = set(s,varargin) 2 | % Dirichlet/SET Set Dirichlet distribution properties to the specified values 3 | % and return the updated object 4 | 5 | 6 | propertyArgIn = varargin; 7 | while length(propertyArgIn) >= 2, 8 | prop = propertyArgIn{1}; 9 | val = propertyArgIn{2}; 10 | propertyArgIn = propertyArgIn(3:end); 11 | switch prop 12 | case 'alpha' 13 | s.alpha = val; 14 | d.Z = prod(gamma(d.alpha))/gamma(sum(d.alpha)); 15 | d.logZ = sum(lngamma(d.alpha)) - lngamma(sum(d.alpha)); 16 | case 'parameter vector' 17 | s.alpha = val; 18 | d.Z = prod(gamma(d.alpha))/gamma(sum(d.alpha)); 19 | d.logZ = sum(lngamma(d.alpha)) - lngamma(sum(d.alpha)); 20 | otherwise 21 | error('Invalid Dirichlet property') 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/subsasgn.m: -------------------------------------------------------------------------------- 1 | function a = subsasgn(a,index,val) 2 | % POISSON/SUBSASGN Define index assignment for Dirichlet distribution objects 3 | 4 | switch index.type 5 | case '()' 6 | % ps = a.ps; 7 | % ps(index.subs{:}) = val; 8 | % if(sum(ps)~=1) 9 | % warning('New multinomial does not sum to 1') 10 | % end 11 | % a.ps(index.subs{:}) = val; 12 | otherwise 13 | error('Cell and name indexing not supported by Dirichlet distribution objects') 14 | 15 | end 16 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@dirichlet/subsref.m: -------------------------------------------------------------------------------- 1 | function b = subsref(a,index) 2 | %POISSON/SUBSREF Field name indexing for Dirichlet distribution objects 3 | 4 | switch index.type 5 | % case '()' 6 | % b = a.ps(index.subs{:}); 7 | % case '.' 8 | % if(strcmp(index.subs,'covariance')) 9 | % b = a.c; 10 | % elseif(strcmp(index.subs,'mean')) 11 | % b = a.m; 12 | % end 13 | otherwise 14 | error('Cell and name indexing not supported by Dirichlet distribution objects') 15 | end 16 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@distribution/distribution.m: -------------------------------------------------------------------------------- 1 | function d = distribution() 2 | %DISTRIBUTION Distribution base class constructor. 3 | % d = GAUSSIAN(m,c) creates a Gaussian distribution with 4 | % mean m and covariance matrix c; may also be called 5 | % like d = GAUSSIAN(p) where p is a gaussian to be copied 6 | 7 | % Copyright October, 2006, Brown University, Providence, RI. 8 | % All Rights Reserved 9 | 10 | % Permission to use, copy, modify, and distribute this software and its 11 | % documentation for any purpose other than its incorporation into a commercial 12 | % product is hereby granted without fee, provided that the above copyright 13 | % notice appear in all copies and that both that copyright notice and this 14 | % permission notice appear in supporting documentation, and that the name of 15 | % Brown University not be used in advertising or publicity pertaining to 16 | % distribution of the software without specific, written prior permission. 17 | 18 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 19 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 20 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 21 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 22 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 23 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 24 | % CONNECTION WITH THE USE. 25 | 26 | % Author: Frank Wood fwood@cs.brown.edu 27 | 28 | d.Z = 1; 29 | d = class(d,'distribution'); 30 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@distribution/lp.m: -------------------------------------------------------------------------------- 1 | function p = lp(d,data) 2 | % DISTRIBUTION/P 3 | % -- base class 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | % p = fvnlp(data',d.m',d.c')'; 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@distribution/p.m: -------------------------------------------------------------------------------- 1 | function p = p(d,data) 2 | % DISTRIBUTION/P 3 | % 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | p = exp(fvnlp(data',d.m',d.c')'); 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@distribution/plot.m: -------------------------------------------------------------------------------- 1 | function plot(d) 2 | % DISTRIBUTION\PLOT 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | 26 | pts = sample(d,10000); 27 | dim = size(pts,2); 28 | 29 | if(dim==1) 30 | hist(sample(d,10000),100) 31 | elseif(dim==2) 32 | plot(pts(:,1),pts(:,2),'.') 33 | else 34 | error('Plots for samples with higher than 2 dimensions not implemented') 35 | end 36 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/char.m: -------------------------------------------------------------------------------- 1 | function s = char(g) 2 | % GAUSSIAN/CHAR 3 | % CHAR(p) is the string representation of 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | 27 | if(size(g.c) == [1 1]) 28 | s = [ sprintf('Mean: ') mat2str(g.m) ... 29 | sprintf(' Variance: ') mat2str(g.c)]; 30 | else 31 | s = [ sprintf('Mean: ') mat2str(g.m) ... 32 | sprintf('\nCovariance: \n\n') mat2str(g.c)]; 33 | end 34 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/dimension.m: -------------------------------------------------------------------------------- 1 | function d = dimension(g) 2 | % GAUSSIAN\DIMENSION dimension of Gaussian 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | d = size(g.m,1); 26 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/display.m: -------------------------------------------------------------------------------- 1 | function s = display(g) 2 | % GAUSSIAN/DISPLAY 3 | % DISPLAY() is the string representation of 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | % char(g) 27 | s = char(g); 28 | disp(s) 29 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/fit.m: -------------------------------------------------------------------------------- 1 | function g = fit(d,data) 2 | % GAUSSIAN/FIT Fit mean and covariance to that of data. Data expected 3 | % in datum per row format 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | g.m = mean(data); 27 | g.c = cov(data); 28 | g = gaussian(g.m,g.c); 29 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/gaussian.m: -------------------------------------------------------------------------------- 1 | function d = gaussian(m,c) 2 | %GAUSSIAN Gaussian distribution class constructor. 3 | % d = GAUSSIAN(m,c) creates a Gaussian distribution with 4 | % mean m and covariance matrix c; may also be called 5 | % like d = GAUSSIAN(p) where p is a gaussian to be copied 6 | 7 | % Copyright October, 2006, Brown University, Providence, RI. 8 | % All Rights Reserved 9 | 10 | % Permission to use, copy, modify, and distribute this software and its 11 | % documentation for any purpose other than its incorporation into a commercial 12 | % product is hereby granted without fee, provided that the above copyright 13 | % notice appear in all copies and that both that copyright notice and this 14 | % permission notice appear in supporting documentation, and that the name of 15 | % Brown University not be used in advertising or publicity pertaining to 16 | % distribution of the software without specific, written prior permission. 17 | 18 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 19 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 20 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 21 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 22 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 23 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 24 | % CONNECTION WITH THE USE. 25 | 26 | % Author: Frank Wood fwood@cs.brown.edu 27 | 28 | if nargin == 1 29 | if(isa(m,'gaussian')) 30 | d.m = m.m; 31 | d.c = m.c; 32 | d = class(d,'gaussian',dist); 33 | else 34 | d.m = 0; 35 | d.c = 1; 36 | bc = distribution(); 37 | d = class(d,'gaussian',bc); 38 | d = fit(d,m); 39 | 40 | end 41 | elseif nargin == 2 42 | d.m = m; 43 | d.c = c; 44 | bc = distribution(); 45 | d = class(d,'gaussian',bc); 46 | else 47 | error; 48 | end 49 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/get.m: -------------------------------------------------------------------------------- 1 | function val = get(s,propName) 2 | % GAUSSIAN/GET Get property from the specified Gaussian 3 | % and return the value. Property names are: Mean and Covariance 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | switch propName 27 | case 'mean' 28 | val = s.m; 29 | case 'covariance' 30 | val = s.c; 31 | otherwise 32 | error([propName ,'Is not a valid stock property']) 33 | end 34 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/length.m: -------------------------------------------------------------------------------- 1 | function l = length(m) 2 | % GAUSSIAN/LENGTH returns the number of categories or multinomial states 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | l = length(m.ps); 26 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/lp.m: -------------------------------------------------------------------------------- 1 | function lp = lp(d,data) 2 | % GAUSSIAN/LP 3 | % 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | lp = fvnlp(data,d.m,d.c); 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/multinomial.m: -------------------------------------------------------------------------------- 1 | function d = multinomial(ps) 2 | %GAUSSIAN multinomial distribution class constructor. 3 | % d = MULTINOMIAL(ps) creates a multinomial distribution with 4 | % length(ps) bins and ps probabilities 5 | 6 | % Copyright October, 2006, Brown University, Providence, RI. 7 | % All Rights Reserved 8 | 9 | % Permission to use, copy, modify, and distribute this software and its 10 | % documentation for any purpose other than its incorporation into a commercial 11 | % product is hereby granted without fee, provided that the above copyright 12 | % notice appear in all copies and that both that copyright notice and this 13 | % permission notice appear in supporting documentation, and that the name of 14 | % Brown University not be used in advertising or publicity pertaining to 15 | % distribution of the software without specific, written prior permission. 16 | 17 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 18 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 19 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 20 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 21 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 22 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 23 | % CONNECTION WITH THE USE. 24 | 25 | % Author: Frank Wood fwood@cs.brown.edu 26 | 27 | if(nargin == 0) 28 | d.ps = []; 29 | elseif(nargin == 1) 30 | d.ps = ps; 31 | if(sum(d.ps) ~=1) 32 | warning('Argument probabilities don''t sum to 1 (not a multinomial distribution)'); 33 | end 34 | end 35 | bc = distribution(); 36 | d = class(d,'multinomial',bc); 37 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/p.m: -------------------------------------------------------------------------------- 1 | function [p,lp] = p(d,data) 2 | % GAUSSIAN/P 3 | % 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | lp = fvnlp(data,d.m,d.c); 27 | p = exp(lp); 28 | 29 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/plus.m: -------------------------------------------------------------------------------- 1 | function r = plus(p,q) 2 | % GAUSSIAN/PLUS Implement p + q for gaussian distributions. 3 | % produced a Gaussian Mixture Model 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | p = gaussian(p); 27 | q = gaussian(q); 28 | error('Not yet implemented -- Need Gaussian mixture model.'); 29 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/private/fvnlp.m: -------------------------------------------------------------------------------- 1 | function [lp, md] = fvnlp(x,mu,covariance) 2 | % fast_vectorized_normal_probability(points,mean,covariance_matrix) 3 | % 4 | % INPUT: 5 | % points - the points to evaluate (column vectors) 6 | % mean - mean of normal distribution (column vector of same length) 7 | % covariance - covariance matrix 8 | % OUTPUT: 9 | % p - probability 10 | % md - Mahalanobis distance 11 | 12 | % Copyright October, 2006, Brown University, Providence, RI. 13 | % All Rights Reserved 14 | 15 | % Permission to use, copy, modify, and distribute this software and its 16 | % documentation for any purpose other than its incorporation into a commercial 17 | % product is hereby granted without fee, provided that the above copyright 18 | % notice appear in all copies and that both that copyright notice and this 19 | % permission notice appear in supporting documentation, and that the name of 20 | % Brown University not be used in advertising or publicity pertaining to 21 | % distribution of the software without specific, written prior permission. 22 | 23 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 24 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 25 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 26 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 27 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 28 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 29 | % CONNECTION WITH THE USE. 30 | 31 | % Author: Frank Wood fwood@cs.brown.edu 32 | 33 | num_points = size(x,1); 34 | [scale,inv_cov] =pcgf(covariance); 35 | 36 | y=x-repmat(mu,num_points,1); 37 | 38 | md2 = sum(y.*(inv_cov*(y'))',2); 39 | md = (md2).^(.5); 40 | 41 | if(scale == 0) 42 | warning('@gaussian private fvnlp: bad covariance matrix') 43 | lp = -Inf; 44 | else 45 | lp = -md2/2 +log(scale); 46 | end 47 | 48 | function [scale,inverse] = pcgf(covariance) 49 | 50 | dimension = size(covariance,1); 51 | 52 | % SINGULAR 53 | if(rcond(covariance)= 2, 28 | prop = propertyArgIn{1}; 29 | val = propertyArgIn{2}; 30 | propertyArgIn = propertyArgIn(3:end); 31 | switch prop 32 | case 'mean' 33 | s.m = val; 34 | case 'covariance' 35 | s.c = val; 36 | otherwise 37 | error('Invalid property') 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/subsasgn.m: -------------------------------------------------------------------------------- 1 | function a = subsasgn(a,index,val) 2 | % SUBSASGN Define index assignment for multinomial objects 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | switch index.type 26 | case '()' 27 | ps = a.ps; 28 | ps(index.subs{:}) = val; 29 | if(sum(ps)~=1) 30 | warning('New multinomial does not sum to 1') 31 | end 32 | a.ps(index.subs{:}) = val; 33 | otherwise 34 | error('Indexing method not supported by multinomial') 35 | 36 | end 37 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian/subsref.m: -------------------------------------------------------------------------------- 1 | function b = subsref(a,index) 2 | %SUBSREF Define field name indexing for multinomial objects 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | switch index.type 26 | case '()' 27 | b = a.ps(index.subs{:}); 28 | case '.' 29 | if(strcmp(index.subs,'covariance')) 30 | b = a.c; 31 | elseif(strcmp(index.subs,'mean')) 32 | b = a.m; 33 | end 34 | otherwise 35 | error('Cell and name indexing not supported by Gaussian objects') 36 | end 37 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian_mixture_model/gaussian_mixture_model.m: -------------------------------------------------------------------------------- 1 | function d = gaussian_mixture_model(varargin) 2 | %GAUSSIAN_MIXTURE_MODEL mixture model class constructor. 3 | % d = MIXTURE_MODEL(dist1, dist2, dist3, ..., distn) creates a mixture 4 | % distribution with n component densities 5 | 6 | % Copyright October, 2006, Brown University, Providence, RI. 7 | % All Rights Reserved 8 | 9 | % Permission to use, copy, modify, and distribute this software and its 10 | % documentation for any purpose other than its incorporation into a commercial 11 | % product is hereby granted without fee, provided that the above copyright 12 | % notice appear in all copies and that both that copyright notice and this 13 | % permission notice appear in supporting documentation, and that the name of 14 | % Brown University not be used in advertising or publicity pertaining to 15 | % distribution of the software without specific, written prior permission. 16 | 17 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 18 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 19 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 20 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 21 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 22 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 23 | % CONNECTION WITH THE USE. 24 | 25 | % Author: Frank Wood fwood@cs.brown.edu 26 | 27 | par = mixture_model(varargin{:}); 28 | this.Z = 1; 29 | d = class(this,'gaussian_mixture_model',par); 30 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian_mixture_model/plot.m: -------------------------------------------------------------------------------- 1 | function hndls = plot(d,colors) 2 | % GMM\PLOT 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | if(nargin < 2) 26 | colors = {'k','r','g','b','m','c'}; 27 | end 28 | 29 | 30 | nc = d.mixture_model.num_components; 31 | hndls = zeros(nc,1); 32 | invp = 1.96; 33 | hold on 34 | for(i=1:nc) 35 | g = d.mixture_model(i); 36 | g = g{1}; 37 | mu = g.mean; 38 | sigma = g.covariance; 39 | 40 | [vec, val] = eig(sigma(1:2,1:2)); 41 | axes = invp*sqrt(svd(val)); 42 | % angles = -atan2(vec(1,:),vec(2,:)); 43 | 44 | t= linspace(0,2*pi); 45 | ellip = vec*invp*sqrt(val)*[cos(t);sin(t)] + repmat([mu(1); mu(2)],1,100); 46 | ellip = ellip'; 47 | axes = axes'; 48 | color_index = i; 49 | if(color_index>length(colors)) 50 | color_index=length(colors); 51 | end 52 | hndls(i) = plot(ellip(:,1),ellip(:,2),colors{color_index}); 53 | end 54 | hold off 55 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@gaussian_mixture_model/train.m: -------------------------------------------------------------------------------- 1 | function [gmm,cluster_assignments, log_likelihood] = train(gmmin,data,num_mixture_components,max_em_iterations,use_spectral_clustering) 2 | % GAUSSIAN_MIXTURE_MODEL/TRAIN 3 | % gmm = TRAIN(gmmin,data,num_mixture_components,max_em_iterations) 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | if(nargin<4) 27 | max_em_iterations = 100; 28 | end 29 | if(nargin<3) 30 | error('Need at least num_mixture_components, data, and gmmin'); 31 | end 32 | if(nargin<5) 33 | use_spectral_clustering=1; 34 | end 35 | if(use_spectral_clustering) 36 | [cluster_assignments, log_likelihood, mixture_means, mixture_covariances, alpha] = cluster_em_gaussian_mixture(data,num_mixture_components,[],0,max_em_iterations); 37 | else 38 | n = size(data,1); 39 | k = num_mixture_components; 40 | initial_cluster_inds = ceil(rand(n,1)*k); 41 | initial_clusters = ones(n,k)*.01; 42 | for(i=1:n) 43 | initial_clusters(i,initial_cluster_inds(i)) = .95; 44 | end 45 | [cluster_assignments, log_likelihood, mixture_means, mixture_covariances, alpha] = cluster_em_gaussian_mixture(data,num_mixture_components,initial_clusters,0,max_em_iterations); 46 | end 47 | 48 | evalstr = 'gmm = gaussian_mixture_model('; 49 | 50 | for(i=1:num_mixture_components) 51 | if(i2) 27 | s = 'Multinomial'; 28 | warning('Can''t display probability table of greater than dimension two'); 29 | else 30 | s = [ sprintf('Multinomial probabilities: ') mat2str(p.ps)]; 31 | end 32 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/display.m: -------------------------------------------------------------------------------- 1 | function s = display(g) 2 | % GAUSSIAN/DISPLAY 3 | % DISPLAY() is the string representation of 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | disp(char(g)) 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/fit.m: -------------------------------------------------------------------------------- 1 | function g = fit(d,data) 2 | % MULTINOMIAL/FIT Fit multinomial to a list of integers. This function 3 | % assumes that the integers are contiguous and start from 1 (i.e. 1...N) 4 | % this creates a multinomial with N bins where each probability is the 5 | % normalized count of that integer 6 | 7 | % Copyright October, 2006, Brown University, Providence, RI. 8 | % All Rights Reserved 9 | 10 | % Permission to use, copy, modify, and distribute this software and its 11 | % documentation for any purpose other than its incorporation into a commercial 12 | % product is hereby granted without fee, provided that the above copyright 13 | % notice appear in all copies and that both that copyright notice and this 14 | % permission notice appear in supporting documentation, and that the name of 15 | % Brown University not be used in advertising or publicity pertaining to 16 | % distribution of the software without specific, written prior permission. 17 | 18 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 19 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 20 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 21 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 22 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 23 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 24 | % CONNECTION WITH THE USE. 25 | 26 | % Author: Frank Wood fwood@cs.brown.edu 27 | 28 | 29 | dim = size(data,2); 30 | uvals = cell(dim,1); 31 | nuvals = zeros(dim,1)'; 32 | 33 | for(d = 1:dim) 34 | uvals{d} = sort(unique(data(:,d))); 35 | nuvals(d) = max(uvals{d}); % could be length, max better for neural data 36 | end 37 | 38 | counts = zeros(nuvals); 39 | 40 | inds = num2cell(data,1); 41 | cii = sub2ind(size(counts),inds{:}); 42 | [N,I] = hist(cii,1:prod(size(counts))); 43 | 44 | counts(I) = N; 45 | 46 | counts = counts./length(data); 47 | g = multinomial(counts); 48 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/get.m: -------------------------------------------------------------------------------- 1 | function val = get(s,propName) 2 | % MULTINOMIAL/GET Get property from the specified Gaussian 3 | % and return the value. Property names are: Mean and Covariance 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | 27 | switch propName 28 | case 'Probabilities' 29 | val = s.ps; 30 | otherwise 31 | error([propName ,'Is not a valid multinomial property (Probabilities)']) 32 | end 33 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/length.m: -------------------------------------------------------------------------------- 1 | function l = length(m) 2 | % MULTINOMIAL/LENGTH returns the number of categories or multinomial 3 | % states, i.e. prod(size(m)) 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | l = prod(size(m.ps)); 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/lp.m: -------------------------------------------------------------------------------- 1 | function pr = lp(d,varargin) 2 | % MULTINOMIAL/LP 3 | % 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | pr = log(p(d,varargin{:})); 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/multinomial.m: -------------------------------------------------------------------------------- 1 | function d = multinomial(ps) 2 | %MULTINOMIAL multinomial distribution class constructor. 3 | % d = MULTINOMIAL(ps) creates a multinomial distribution with 4 | % length(ps) bins and ps probabilities 5 | 6 | % Copyright October, 2006, Brown University, Providence, RI. 7 | % All Rights Reserved 8 | 9 | % Permission to use, copy, modify, and distribute this software and its 10 | % documentation for any purpose other than its incorporation into a commercial 11 | % product is hereby granted without fee, provided that the above copyright 12 | % notice appear in all copies and that both that copyright notice and this 13 | % permission notice appear in supporting documentation, and that the name of 14 | % Brown University not be used in advertising or publicity pertaining to 15 | % distribution of the software without specific, written prior permission. 16 | 17 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 18 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 19 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 20 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 21 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 22 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 23 | % CONNECTION WITH THE USE. 24 | 25 | % Author: Frank Wood fwood@cs.brown.edu 26 | 27 | d.ps = []; 28 | d.dim = -1; 29 | 30 | if(nargin == 0) 31 | d.ps = []; 32 | elseif(nargin == 1) 33 | d.ps = ps; 34 | if(~isvector(ps)) 35 | d.dim = length(size(ps)); 36 | else 37 | d.dim = 1; 38 | end 39 | ss = msum(d.ps); 40 | if(~(1-5*eps <=ss & ss <= 1+5*eps)) 41 | warning('Argument probabilities don''t sum to 1 (not a multinomial distribution)'); 42 | end 43 | end 44 | bc = distribution(); 45 | d = class(d,'multinomial',bc); 46 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/p.m: -------------------------------------------------------------------------------- 1 | function p = p(d,varargin) 2 | % MULTINOMIAL/P 3 | % p = p(d,varargin) where varargin has the same semantics as sub2ind 4 | % i.e. p(d,1,2,1,...) gives the probability of row 1, column 2, page 1 5 | % and p(d,[1;1],[2;2],[1;1],...) gives the same, twice 6 | 7 | % Copyright October, 2006, Brown University, Providence, RI. 8 | % All Rights Reserved 9 | 10 | % Permission to use, copy, modify, and distribute this software and its 11 | % documentation for any purpose other than its incorporation into a commercial 12 | % product is hereby granted without fee, provided that the above copyright 13 | % notice appear in all copies and that both that copyright notice and this 14 | % permission notice appear in supporting documentation, and that the name of 15 | % Brown University not be used in advertising or publicity pertaining to 16 | % distribution of the software without specific, written prior permission. 17 | 18 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 19 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 20 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 21 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 22 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 23 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 24 | % CONNECTION WITH THE USE. 25 | 26 | % Author: Frank Wood fwood@cs.brown.edu 27 | 28 | if(nargin==2) 29 | ca =num2cell(varargin{1},1); 30 | p = d.ps(sub2ind(size(d.ps),ca{:})); 31 | 32 | else 33 | 34 | p = d.ps(sub2ind(size(d.ps),varargin{:})); 35 | end 36 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/private/fvnlp.m: -------------------------------------------------------------------------------- 1 | function [lp, md] = fvnlp(x,mu,covariance) 2 | % fast_vectorized_normal_probability(points,mean,covariance_matrix) 3 | % 4 | % INPUT: 5 | % points - the points to evaluate (column vectors) 6 | % mean - mean of normal distribution (column vector of same length) 7 | % covariance - covariance matrix 8 | % OUTPUT: 9 | % p - probability 10 | % md - Mahalanobis distance 11 | 12 | % Copyright October, 2006, Brown University, Providence, RI. 13 | % All Rights Reserved 14 | 15 | % Permission to use, copy, modify, and distribute this software and its 16 | % documentation for any purpose other than its incorporation into a commercial 17 | % product is hereby granted without fee, provided that the above copyright 18 | % notice appear in all copies and that both that copyright notice and this 19 | % permission notice appear in supporting documentation, and that the name of 20 | % Brown University not be used in advertising or publicity pertaining to 21 | % distribution of the software without specific, written prior permission. 22 | 23 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 24 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 25 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 26 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 27 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 28 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 29 | % CONNECTION WITH THE USE. 30 | 31 | % Author: Frank Wood fwood@cs.brown.edu 32 | 33 | dim = length(x(:,1)); 34 | num_points = length(x(1,:)); 35 | [scale,inv_cov] =pcgf(covariance); 36 | 37 | y=x-repmat(mu,1,num_points); 38 | 39 | md2 = sum(y.*(inv_cov*y),1); 40 | % md2 = zeros(1,num_points); 41 | % for i = 1:num_points 42 | % md2(i) = y(:,i)'*inv_cov*y(:,i); 43 | % end 44 | md = (md2).^(.5); 45 | 46 | lp = -md2/2 +log(scale); 47 | % p = scale*exp(-md2/2); 48 | 49 | 50 | 51 | function [scale,inverse] = pcgf(covariance) 52 | 53 | dimension = length(covariance(:,1)); 54 | 55 | % SINGULAR 56 | if(rcond(covariance)rs(i) ) 47 | sampinds(i) = cdfind; 48 | i = i+1; 49 | end 50 | if(i>cases) 51 | break; 52 | end 53 | end 54 | sampinds = sampinds(randperm(length(sampinds))); 55 | if(d.dim == 1) 56 | samples = zeros(cases,1); 57 | else 58 | samples = zeros(cases,ndims(d.ps)); 59 | end 60 | cs = num2cell(samples,1); 61 | [cs{:}] = ind2sub(size(d.ps),sampinds); 62 | samples = cat(1,cs{:})'; 63 | else 64 | error('Too many arguments') 65 | end 66 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/set.m: -------------------------------------------------------------------------------- 1 | function s = set(s,varargin) 2 | % MULTINOMIAL/SET Set Gaussian properties to the specified values 3 | % and return the updated object 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | propertyArgIn = varargin; 27 | while length(propertyArgIn) >= 2, 28 | prop = propertyArgIn{1}; 29 | val = propertyArgIn{2}; 30 | propertyArgIn = propertyArgIn(3:end); 31 | switch prop 32 | case 'Probabilities' 33 | s.ps = val; 34 | otherwise 35 | error([prop 'Is not a valid multinomial property (Probabilities)']) 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/size.m: -------------------------------------------------------------------------------- 1 | function l = size(m) 2 | % MULTINOMIAL/SIZE returns the number of categories or multinomial states 3 | % in each dimension 4 | 5 | % Copyright October, 2006, Brown University, Providence, RI. 6 | % All Rights Reserved 7 | 8 | % Permission to use, copy, modify, and distribute this software and its 9 | % documentation for any purpose other than its incorporation into a commercial 10 | % product is hereby granted without fee, provided that the above copyright 11 | % notice appear in all copies and that both that copyright notice and this 12 | % permission notice appear in supporting documentation, and that the name of 13 | % Brown University not be used in advertising or publicity pertaining to 14 | % distribution of the software without specific, written prior permission. 15 | 16 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 17 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 18 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 19 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 20 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 21 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 22 | % CONNECTION WITH THE USE. 23 | 24 | % Author: Frank Wood fwood@cs.brown.edu 25 | 26 | l = size(m.ps); 27 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/subsasgn.m: -------------------------------------------------------------------------------- 1 | function a = subsasgn(a,index,val) 2 | % SUBSASGN Define index assignment for multinomial objects 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | switch index.type 26 | case '()' 27 | ps = a.ps; 28 | ps(index.subs{:}) = val; 29 | if(sum(ps)~=1) 30 | warning('New multinomial does not sum to 1') 31 | end 32 | a.ps(index.subs{:}) = val; 33 | otherwise 34 | error('Indexing method not supported by multinomial') 35 | 36 | end 37 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/@multinomial/subsref.m: -------------------------------------------------------------------------------- 1 | function b = subsref(a,index) 2 | %SUBSREF Define field name indexing for multinomial objects 3 | 4 | % Copyright October, 2006, Brown University, Providence, RI. 5 | % All Rights Reserved 6 | 7 | % Permission to use, copy, modify, and distribute this software and its 8 | % documentation for any purpose other than its incorporation into a commercial 9 | % product is hereby granted without fee, provided that the above copyright 10 | % notice appear in all copies and that both that copyright notice and this 11 | % permission notice appear in supporting documentation, and that the name of 12 | % Brown University not be used in advertising or publicity pertaining to 13 | % distribution of the software without specific, written prior permission. 14 | 15 | % BROWN UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, 16 | % INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY 17 | % PARTICULAR PURPOSE. IN NO EVENT SHALL BROWN UNIVERSITY BE LIABLE FOR ANY 18 | % SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER 19 | % RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF 20 | % CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 21 | % CONNECTION WITH THE USE. 22 | 23 | % Author: Frank Wood fwood@cs.brown.edu 24 | 25 | switch index.type 26 | case '()' 27 | b = a.ps(index.subs{:}); 28 | otherwise 29 | error('Cell and name indexing not supported by multinomial objects') 30 | end 31 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/find_optimal_c.m: -------------------------------------------------------------------------------- 1 | function c = find_optimal_c(Q,N) 2 | 3 | Q = sort(Q,'descend'); 4 | c = 0; 5 | k = 0; 6 | M = length(Q); 7 | 8 | k_old = -inf; 9 | 10 | while k_old ~=k 11 | k_old = k; 12 | c = (N-k)/sum(Q(k+1:end)); 13 | k = k+ sum(Q(k+1:M)*c > 1); 14 | end 15 | 16 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/heldoutLikelihood.m: -------------------------------------------------------------------------------- 1 | 2 | %% Calculate Held out likelihood 3 | pc_max_ind = 1e5; 4 | pc_gammaln_by_2 = 1:pc_max_ind; 5 | pc_gammaln_by_2 = gammaln(pc_gammaln_by_2/2); 6 | pc_log_pi = reallog(pi); 7 | pc_log = reallog(1:pc_max_ind); 8 | 9 | %inv_Sigma = PF_inv_cov; 10 | %log_det_Sigma = PF_log_det_cov; 11 | 12 | FLAG = 0; 13 | 14 | new_spike_sortings = zeros(size(spike_sortings)); 15 | total_heldout_loglikelihood = 0; 16 | 17 | USE_PWEIGHTS=0; 18 | TOTAL_RUNNINGS=num_particles; 19 | 20 | for index=1:TOTAL_RUNNINGS%10%size(spike_sortings,1) 21 | 22 | %index = map_spike_sorting_index; 23 | 24 | if FLAG == 1 25 | new_spike_sortings(index,:) = ones(1,size(spike_sortings,1)); 26 | else 27 | new_spike_sortings(index,:) = spike_sortings(index,:); 28 | end 29 | 30 | %PLEASE REMOVE new_spike_sortings = cluster_class(:,1)'; 31 | 32 | 33 | 34 | K = number_of_neurons_in_each_sorting(index);%MAP_number_of_neurons; 35 | 36 | % sufficient_stats = {}; 37 | % for kid=1:K %Integrating cluster assigments 38 | % hits = find(spike_sortings(index,:) ==kid ); 39 | % n = length(hits); 40 | % data = in_sample_training_data(hits,:); 41 | % d = size(data,1); 42 | % mean_Y = mean(data)'; 43 | % 44 | % mu_n = k_0/(k_0+n)*mu_0 + n/(k_0+n)*mean_Y; 45 | % k_n = k_0 + n; 46 | % v_n = v_0 + n; 47 | % S = sumsqr(data); 48 | % lambda_n = lambda_0 + S ... 49 | % + k_0*n/(k_0+n)*(mean_Y-mu_0)*(mean_Y-mu_0)'; 50 | % sufficient_stats{kid} = [mu_n, (lambda_n*(k_n+1))/(k_n*(v_n-d+1))]; 51 | % end 52 | 53 | 54 | 55 | heldout_loglikelihood = 0; 56 | for i=1:size(out_of_sample_training_data, 1) 57 | %disp(i) 58 | y = out_of_sample_training_data(i,:)'; 59 | for kid=1:K 60 | 61 | hits = find(new_spike_sortings(index,:) ==kid ); 62 | n = length(hits); 63 | data = in_sample_training_data(hits,:); 64 | d = size(data,2); 65 | m_Y = mean(data,1); 66 | 67 | mean_y_vec = ones(size(data)); 68 | mean_y_vec(:,1) = mean_y_vec(:,1).*m_Y(:,1); 69 | mean_y_vec(:,2) = mean_y_vec(:,2).*m_Y(:,2); 70 | 71 | tmp = data - mean_y_vec; 72 | SS = zeros(d,d); 73 | for i=1:size(data,1) 74 | SS = SS + ( transpose(tmp(i,:))*tmp(i,:) ); 75 | end 76 | 77 | [lp ldc ic] = heldout_helper(pc_max_ind,pc_gammaln_by_2,pc_log_pi,pc_log,y,n,m_Y',SS,k_0,mu_0,v_0,lambda_0); 78 | heldout_loglikelihood = heldout_loglikelihood + lp; 79 | end 80 | end 81 | 82 | disp('------------'); 83 | disp(index); 84 | %lp_mvniw(map_spike_sorting(:,1001:8195),inspk(1001:9195,:)', mu_0, k_0,3,lambda_0) 85 | 86 | if USE_PWEIGHTS == 1 87 | heldout_loglikelihood = heldout_loglikelihood*final_wts(index); 88 | end 89 | 90 | total_heldout_loglikelihood = total_heldout_loglikelihood + heldout_loglikelihood; 91 | disp(heldout_loglikelihood); 92 | %figure(index+10); 93 | %scatter(in_sample_training_data(:,1),in_sample_training_data(:,2),50,new_spike_sortings(index, :),'.'); hold all; 94 | end 95 | 96 | disp('FINAL: '); 97 | disp(total_heldout_loglikelihood/TOTAL_RUNNINGS); 98 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/heldout_helper.m: -------------------------------------------------------------------------------- 1 | function [lp, log_det_Sigma, inv_Sigma] = heldout_helper(pc_max_ind,pc_gammaln_by_2,pc_log_pi,pc_log,y,n,m_Y,SS,k_0,mu_0,v_0,lambda_0, log_det_Sigma, inv_Sigma) 2 | % 3 | % function [lp, log_det_Sigma, inv_Sigma] = 4 | % lp_tpp_helper(pc_max_ind,pc_gammaln_by_2,pc_log_pi,pc_log,y,n,m_Y,SS,k_0,mu_0,v_0,lambda_0, log_det_Sigma, inv_Sigma) 5 | % 6 | % Computes the posterior predictive distribution under the MVN-IW model 7 | % given observations. This function is tightly coupled with 8 | % particle_filter.m and collapsed_gibbs_sampler.m and should not really be 9 | % used in other contexts. The variables are briefly described here: 10 | % 11 | % pc_max_ind : maximum argument to precomputed functions passed in 12 | % pc_gammaln_by_2 : precomputed gammaln(arg)/2 13 | % pc_log_pi : precomputed log(pi) 14 | % pc_log : precomputed log(arg) 15 | % y : datapoint for which the posterior predictive score is wanted 16 | % n : number of datapoints already observed 17 | % m_Y : the arithmetic mean of the datapoints alread observed 18 | % SS : the sum of the squares of the datapoints (for computational 19 | % efficiency purposes 20 | % k_0,mu_0,v_0,lambda_0: the parameters to the MVNIW model following 21 | % Gelman pg 87 22 | % log_det_Sigma, inv_Sigma: if the function needs to be called multiple 23 | % times for the same set of data (previously observed datapoints) then the 24 | % output log_det_Sigma and inv_Sigma from the first calls can be passed in 25 | % as arguments in subsequent calls to save computation 26 | % 27 | % returns: 28 | % lp: the log posterior predictive probably of y under the model 29 | % log_det_Sigma, inv_Sigma: partial computations used in computing this 30 | % value. These are unique per m_Y, SS, k_0,mu_0,v_0, lambda_0 tuple 31 | 32 | % persistent pc_gammaln_by_2 pc_log pc_log_pi pc_max_ind 33 | % 34 | % % pre-compute a bunch of the gammaln stuff so that time isn't wasted 35 | % % every call recomputing the same stuff 36 | % if isempty(pc_gammaln_by_2) 37 | % mlock 38 | % pc_max_ind = 1e6; 39 | % pc_gammaln_by_2 = 1:pc_max_ind; 40 | % pc_gammaln_by_2 = gammaln(pc_gammaln_by_2/2); 41 | % pc_log_pi = reallog(pi); 42 | % pc_log = reallog(1:pc_max_ind); 43 | % end 44 | 45 | d = size(y,1); 46 | if n~=0 47 | mu_n = k_0/(k_0+n)*mu_0 + n/(k_0+n)*m_Y; 48 | k_n = k_0+n; 49 | v_n = v_0+n; 50 | 51 | %S = (SS - n*m_Y*m_Y'); 52 | S = SS; %%%THIS IS BAD 53 | 54 | zm_Y = m_Y-mu_0; 55 | lambda_n = lambda_0 + S + ... 56 | k_0*n/(k_0+n)*(zm_Y)*(zm_Y)'; 57 | else 58 | mu_n = mu_0; 59 | k_n = k_0; 60 | v_n = v_0; 61 | lambda_n = lambda_0; 62 | end 63 | 64 | % set up variables for Gelman's formulation of the Student T 65 | % distribution 66 | Sigma = lambda_n*(k_n+1)/(k_n*(v_n-2+1)); 67 | v = v_n-2+1; 68 | mu = mu_n; 69 | 70 | % if a precomputed det_Sigma and inv_Sigma haven't been passed in then 71 | % compute them and also return the log probability of y under the student-T 72 | % posterior predictive distribution 73 | if nargin<13 74 | log_det_Sigma = reallog(det(Sigma)); 75 | inv_Sigma = inv(Sigma); 76 | % else 77 | % disp(['pre-computed: ' num2str(log_det_Sigma) ', check: ' num2str(log(det(Sigma)))]) 78 | end 79 | 80 | 81 | % disp(['v: ' num2str(v) 'd: ' num2str(d)]) 82 | % compute the students T log likelihood y_i under the posterior 83 | % predictive distribution given the prior parameters and all 84 | % other datapoints currently sitting at that table 85 | 86 | % if the values have been precomputed use them 87 | vd = v+d; 88 | if vd < pc_max_ind 89 | d2 = d/2; 90 | lp = pc_gammaln_by_2(vd) - (pc_gammaln_by_2(v) + d2*pc_log(v) + ... 91 | d2*pc_log_pi) - .5*log_det_Sigma-... 92 | (vd/2)*reallog(1+(1/v)*(y-mu)'*inv_Sigma*(y-mu)); 93 | else 94 | lp = gammaln((v+d)/2)-(gammaln(v/2) + (d/2)*log(v) + ... 95 | (d/2)*pc_log_pi)-.5*log_det_Sigma-... 96 | ((v+d)/2)*reallog(1+(1/v)*(y-mu)'*inv_Sigma*(y-mu)); 97 | end 98 | 99 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/logsumexp.m: -------------------------------------------------------------------------------- 1 | function s = logsumexp(x, dim) 2 | % Returns log(sum(exp(x),dim)) while avoiding numerical underflow. 3 | % Default is dim = 1 (columns). 4 | % Written by Mo Chen (mochen@ie.cuhk.edu.hk). March 2009. 5 | if nargin == 1, 6 | % Determine which dimension sum will use 7 | dim = find(size(x)~=1,1); 8 | if isempty(dim), dim = 1; end 9 | end 10 | 11 | % subtract the largest in each column 12 | y = max(x,[],dim); 13 | x = bsxfun(@minus,x,y); 14 | s = y + log(sum(exp(x),dim)); 15 | i = find(~isfinite(y)); 16 | if ~isempty(i) 17 | s(i) = y(i); 18 | end -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/lp_crp.m: -------------------------------------------------------------------------------- 1 | function lp = lp_crp(c,alpha) 2 | % function lp = lp_crp(c,alpha) 3 | % 4 | % takes a compact vector of class id's and the CRP parameter alpha 5 | % and returns the log probability of that partitioning 6 | % 7 | table_identifier = unique(c); 8 | K_plus = length(table_identifier); 9 | N = length(c); 10 | m_k = zeros(K_plus,1); 11 | 12 | for k = 1:K_plus 13 | m_k(k) = sum(c==table_identifier(k)); 14 | end 15 | 16 | %the m_k ==0 case requires some care 17 | foo = gammaln(m_k-1); 18 | foo((m_k-1)==0)=0; % definition of log(0!) 19 | 20 | lp = K_plus*log(alpha) + sum(foo) + gammaln(alpha) - gammaln(N+alpha); -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/lp_mvniw.m: -------------------------------------------------------------------------------- 1 | function lZ = lp_mvniw(class_id, training_data, mu_0, k_0, v_0, lambda_0) 2 | % 3 | % function lZ = lp_mvniw(class_id, training_data, mu_0, k_0, v_0, lambda_0) 4 | % computes the marginal likelihood of the data under the MVN IW prior 5 | % 6 | % class_id: a vector of compact class_ids 7 | % training_data: a dxn vector of n, d-dimensional datapoints 8 | % mu_0, k_0, v_0, lambda_0: MVN IW prior parameters 9 | % 10 | % returns log(prod_{j=1}^{K^+} P(Y^{(j)}; H)) 11 | 12 | lZ = 0; 13 | d = size(training_data,1); 14 | 15 | if d>v_0 16 | error('v_0 must be equal to or larger than the dimension of the data') 17 | end 18 | 19 | K_plus = length(unique(class_id)); 20 | 21 | for l = 1:K_plus 22 | % get the class ids of the points sitting at table l 23 | hits= class_id==l; 24 | % how many are sitting at table l? 25 | n = sum(hits); 26 | % get those points 27 | Y = training_data(:,hits); 28 | 29 | % check for a problem. the classid's passed into this routine 30 | % should be compact (i.e. no classid's with no associated data) 31 | if n~=0 32 | 33 | %calculate the mean of the points at this table 34 | mean_Y = mean(Y,2); 35 | 36 | % set up the variables according to pg. 87 of Gelman 37 | 38 | % not needed 39 | % mu_n = k_0/(k_0+n)*mu_0 + n/(k_0+n)*mean_Y; 40 | k_n = k_0+n; 41 | v_n = v_0+n; 42 | 43 | % the following line replaces the 5 following for reasons of 44 | % efficiency 45 | S = cov(Y')'*(n-1); 46 | % temp = Y - repmat(mean_Y,1,size(Y,2)); 47 | % S = zeros(size(lambda_0)); 48 | % for ii=1:size(Y,2) 49 | % S = S + temp(:,ii)*temp(:,ii)'; 50 | % end 51 | 52 | lambda_n = lambda_0 + S ... 53 | + k_0*n/(k_0+n)*(mean_Y-mu_0)*(mean_Y-mu_0)'; 54 | else 55 | error('Should always have one element') 56 | end 57 | 58 | % now that we have done this we can compute the marginal 59 | % probability of the data at this table under the prior. the 60 | % ratio of normalization constants is not given in Gelman but can 61 | % found instead in the paper Eqn's ? 62 | 63 | % the following split between even and odd is not necessary, the 64 | % hope is that the latter case is more efficient (particularly in a 65 | % lower level language than matlab 66 | if mod(n,2)~=0 67 | % disp('n odd') 68 | 69 | ls = 0; 70 | for j=1:d 71 | ls = ls + gammaln((v_n+1-j)/2) - gammaln((v_0+1-j)/2); 72 | end 73 | else 74 | ls = 0; 75 | for j=1:d 76 | for ii=1:floor(n/2) 77 | ls = ls + log((v_n+1-j)/2-ii); 78 | end 79 | end 80 | 81 | end 82 | 83 | lZ=lZ-n*d/2 * log(2*pi) + d/2 * (log(k_0) - log(k_n)) + ... 84 | d/2*(v_n-v_0) * log(2) + v_0/2 * log(det(lambda_0)) ... 85 | - v_n/2 * log(det(lambda_n)) +ls; 86 | 87 | end -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/lp_tpp_helper.m: -------------------------------------------------------------------------------- 1 | function [lp, log_det_Sigma, inv_Sigma] = lp_tpp_helper(pc_max_ind,pc_gammaln_by_2,pc_log_pi,pc_log,y,n,m_Y,SS,k_0,mu_0,v_0,lambda_0, log_det_Sigma, inv_Sigma) 2 | % 3 | % function [lp, log_det_Sigma, inv_Sigma] = 4 | % lp_tpp_helper(pc_max_ind,pc_gammaln_by_2,pc_log_pi,pc_log,y,n,m_Y,SS,k_0,mu_0,v_0,lambda_0, log_det_Sigma, inv_Sigma) 5 | % 6 | % Computes the posterior predictive distribution under the MVN-IW model 7 | % given observations. This function is tightly coupled with 8 | % particle_filter.m and collapsed_gibbs_sampler.m and should not really be 9 | % used in other contexts. The variables are briefly described here: 10 | % 11 | % pc_max_ind : maximum argument to precomputed functions passed in 12 | % pc_gammaln_by_2 : precomputed gammaln(arg)/2 13 | % pc_log_pi : precomputed log(pi) 14 | % pc_log : precomputed log(arg) 15 | % y : datapoint for which the posterior predictive score is wanted 16 | % n : number of datapoints already observed 17 | % m_Y : the arithmetic mean of the datapoints alread observed 18 | % SS : the sum of the squares of the datapoints (for computational 19 | % efficiency purposes 20 | % k_0,mu_0,v_0,lambda_0: the parameters to the MVNIW model following 21 | % Gelman pg 87 22 | % log_det_Sigma, inv_Sigma: if the function needs to be called multiple 23 | % times for the same set of data (previously observed datapoints) then the 24 | % output log_det_Sigma and inv_Sigma from the first calls can be passed in 25 | % as arguments in subsequent calls to save computation 26 | % 27 | % returns: 28 | % lp: the log posterior predictive probably of y under the model 29 | % log_det_Sigma, inv_Sigma: partial computations used in computing this 30 | % value. These are unique per m_Y, SS, k_0,mu_0,v_0, lambda_0 tuple 31 | 32 | % persistent pc_gammaln_by_2 pc_log pc_log_pi pc_max_ind 33 | % 34 | % % pre-compute a bunch of the gammaln stuff so that time isn't wasted 35 | % % every call recomputing the same stuff 36 | % if isempty(pc_gammaln_by_2) 37 | % mlock 38 | % pc_max_ind = 1e6; 39 | % pc_gammaln_by_2 = 1:pc_max_ind; 40 | % pc_gammaln_by_2 = gammaln(pc_gammaln_by_2/2); 41 | % pc_log_pi = reallog(pi); 42 | % pc_log = reallog(1:pc_max_ind); 43 | % end 44 | 45 | d = size(y,1); 46 | if n~=0 47 | mu_n = k_0/(k_0+n)*mu_0 + n/(k_0+n)*m_Y; 48 | k_n = k_0+n; 49 | v_n = v_0+n; 50 | 51 | S = (SS - n*m_Y*m_Y'); 52 | 53 | zm_Y = m_Y-mu_0; 54 | lambda_n = lambda_0 + S + ... 55 | k_0*n/(k_0+n)*(zm_Y)*(zm_Y)'; 56 | else 57 | mu_n = mu_0; 58 | k_n = k_0; 59 | v_n = v_0; 60 | lambda_n = lambda_0; 61 | end 62 | 63 | % set up variables for Gelman's formulation of the Student T 64 | % distribution 65 | Sigma = lambda_n*(k_n+1)/(k_n*(v_n-2+1)); 66 | v = v_n-2+1; 67 | mu = mu_n; 68 | 69 | % if a precomputed det_Sigma and inv_Sigma haven't been passed in then 70 | % compute them and also return the log probability of y under the student-T 71 | % posterior predictive distribution 72 | if nargin<13 73 | log_det_Sigma = reallog(det(Sigma)); 74 | inv_Sigma = inv(Sigma); 75 | % else 76 | % disp(['pre-computed: ' num2str(log_det_Sigma) ', check: ' num2str(log(det(Sigma)))]) 77 | end 78 | 79 | 80 | % disp(['v: ' num2str(v) 'd: ' num2str(d)]) 81 | % compute the students T log likelihood y_i under the posterior 82 | % predictive distribution given the prior parameters and all 83 | % other datapoints currently sitting at that table 84 | 85 | % if the values have been precomputed use them 86 | vd = v+d; 87 | if vd < pc_max_ind 88 | d2 = d/2; 89 | lp = pc_gammaln_by_2(vd) - (pc_gammaln_by_2(v) + d2*pc_log(v) + ... 90 | d2*pc_log_pi) - .5*log_det_Sigma-... 91 | (vd/2)*reallog(1+(1/v)*(y-mu)'*inv_Sigma*(y-mu)); 92 | else 93 | lp = gammaln((v+d)/2)-(gammaln(v/2) + (d/2)*log(v) + ... 94 | (d/2)*pc_log_pi)-.5*log_det_Sigma-... 95 | ((v+d)/2)*reallog(1+(1/v)*(y-mu)'*inv_Sigma*(y-mu)); 96 | end 97 | 98 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/maxfilter_resample.m: -------------------------------------------------------------------------------- 1 | function [rx,rw] = maxfilter_resample(x,w,N) 2 | % function [rx] = multinomial_resample(x,w,N) 3 | % 4 | % returns N samples from the weighted particle set 5 | % x,w with x (DxM) being M, D-dimensional samples and w being a vector of 6 | % M real weights that sum to one 7 | % 8 | [D, M] = size(x); 9 | rx = zeros(D,N); 10 | rw = zeros(1,N); 11 | 12 | [sorted_w, sorted_indx_array] = sort(w, 'descend'); 13 | 14 | 15 | 16 | hash = containers.Map; 17 | for i=1:N 18 | hash(int2str(i)) = []; 19 | end 20 | 21 | for i=1:length(sorted_indx_array) 22 | sampled_ind = sorted_indx_array(i); 23 | pid = x(1,sampled_ind); 24 | tmp = hash(int2str(pid)); 25 | tmp(end+1)=sampled_ind; 26 | hash(int2str(pid))=tmp; 27 | end 28 | 29 | 30 | for ii=1:length(hash) 31 | tmp = hash(int2str(ii)); 32 | offset=ii; 33 | if ii > 1 && offset <= length(tmp) 34 | offset=ii-1; 35 | tmp=tmp([offset+1:end 1:offset]); %left shift by offset 36 | end 37 | hash(int2str(ii))=tmp; 38 | end 39 | 40 | for i=1:N 41 | tmp = hash(int2str(i)); 42 | if isempty(tmp) == 0 43 | sampled_ind = tmp(1); 44 | %tmp(1)=[]; 45 | tmp=tmp([2:end 1]); 46 | hash(int2str(i))=tmp; 47 | rx(:,i) = x(:,sampled_ind); 48 | rw(i) = w(sampled_ind); 49 | end 50 | 51 | 52 | end 53 | 54 | 55 | if sum(rw) > 0 56 | rw = rw./sum(rw); 57 | end 58 | 59 | if isnan(rw) 60 | 'notallowed' 61 | end 62 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/maxfilter_resample_nonunique.m: -------------------------------------------------------------------------------- 1 | function [rx] = maxfilter_resample_nonunique(x,w,N) 2 | % function [rx] = multinomial_resample(x,w,N) 3 | % 4 | % returns N samples from the weighted particle set 5 | % x,w with x (DxM) being M, D-dimensional samples and w being a vector of 6 | % M real weights that sum to one 7 | % 8 | [D, M] = size(x); 9 | rx = zeros(D,N); 10 | rw = zeros(1,N); 11 | 12 | [sorted_w, sorted_indx_array] = sort(w, 'descend'); 13 | 14 | rind=1; 15 | for i=1:N 16 | sampled_ind = sorted_indx_array(rind); 17 | rx(:,rind) = x(:,sampled_ind); 18 | rw(rind) = w(i); 19 | rind=rind+1; 20 | end 21 | rw = rw./sum(rw); 22 | 23 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/maxfilter_resample_original.m: -------------------------------------------------------------------------------- 1 | function [rx,rw] = maxfilter_resample_original(x,w,N) 2 | % function [rx] = multinomial_resample(x,w,N) 3 | % 4 | % returns N samples from the weighted particle set 5 | % x,w with x (DxM) being M, D-dimensional samples and w being a vector of 6 | % M real weights that sum to one 7 | % 8 | [D, M] = size(x); 9 | rx = zeros(D,N); 10 | rw = zeros(1,N); 11 | 12 | [sorted_w, sorted_indx_array] = sort(w, 'descend'); 13 | 14 | hash = containers.Map; 15 | cnt=1; 16 | record_indx = []; 17 | 18 | for i=1:length(sorted_w) 19 | sampled_ind = sorted_indx_array(i); 20 | val = x(:,sampled_ind); %first dim is particle num. second is cluster-id 21 | val = val(2); 22 | if isKey(hash, int2str(val)) == 0 23 | hash(int2str(val)) = 1; 24 | record_indx(cnt)=sampled_ind; 25 | cnt=cnt+1; 26 | end 27 | end 28 | 29 | 30 | len_rec = length(record_indx); 31 | cnt = 1; 32 | for i = 1:N 33 | if i <= len_rec 34 | sampled_ind = record_indx(i); 35 | rx(:,i) = x(:,sampled_ind); 36 | rw(i) = w(sampled_ind); 37 | else 38 | sampled_ind = record_indx(1); 39 | %sampled_ind = sorted_indx_array(cnt); 40 | cnt=cnt+1; 41 | rx(:,i) = x(:,sampled_ind); 42 | rw(i) = 0;%%w(sampled_ind); 43 | end 44 | end 45 | 46 | 47 | 48 | % len_rind = length(record_indx); 49 | % pind=1; 50 | % for i = 1:N 51 | % if pind > len_rind 52 | % pind=1; 53 | % end 54 | % sampled_ind = record_indx(pind); 55 | % rx(:,i) = x(:,sampled_ind); 56 | % rw(i) = w(sampled_ind); 57 | % pind=pind+1; 58 | % end 59 | 60 | 61 | if sum(rw) > 0 62 | rw = rw./sum(rw); 63 | end 64 | 65 | if isnan(rw) 66 | 'notallowed' 67 | end 68 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/mccExcludedFiles.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/mccExcludedFiles.log -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/multinomial_resample.m: -------------------------------------------------------------------------------- 1 | function [rx] = multinomial_resample(x,w,N) 2 | % function [rx] = multinomial_resample(x,w,N) 3 | % 4 | % returns N samples from the weighted particle set 5 | % x,w with x (DxM) being M, D-dimensional samples and w being a vector of 6 | % M real weights that sum to one 7 | % 8 | [D, M] = size(x); 9 | rx = zeros(D,N); 10 | rw = zeros(1,N); 11 | 12 | w = w/sum(w); 13 | 14 | rind=1; 15 | for i=1:N 16 | sampled_ind = mnrnd(1,w); 17 | sampled_ind = find(sampled_ind == 1); 18 | rx(:,rind) = x(:,sampled_ind); 19 | rw(rind) = w(i); 20 | rind=rind+1; 21 | end 22 | rw = rw./sum(rw); 23 | 24 | 25 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/plot_mixture.m: -------------------------------------------------------------------------------- 1 | function plot_mixture(data,class_id,color) 2 | % function plot_mixture(data,class_id,color) 3 | % 4 | % plots a mixture model given data (Dim x # points), class_id 5 | % (vector), and a vector of colors. The defaults color vector is 6 | % 7 | % color = {'k.', 'r.', 'g.', 'b.', 'm.', 'c.'}; 8 | % 9 | % and if more than 6 classes are present, all classes with higher class_id 10 | % are plotted as 'k.' unless more color arguments are given. Additionally, 11 | % only the first two or three components of 12 | % the training data are plotted 13 | % 14 | % this function should be proceeded by a clf 15 | 16 | [D,N] = size(data); 17 | 18 | if D>3 19 | warning('Only the first three dimensions of the data will be plotted.'); 20 | data = data(1:3,:); 21 | end 22 | 23 | if nargin < 3 24 | color = {'k.', 'r.', 'g.', 'b.', 'm.', 'c.'}; 25 | end 26 | 27 | 28 | ucids = unique(class_id); 29 | hold_state_modified = 0; 30 | old_hold_state = get(gca,'NextPlot'); 31 | if ~strcmp(old_hold_state,'add') 32 | hold on; 33 | hold_state_modified = 1; 34 | end 35 | 36 | for i = 1:length(ucids) 37 | hits = class_id==ucids(i); 38 | if ucids(i)>mcrinstaller 12 | 13 | at MATLAB prompt. The MCRINSTALLER command displays the 14 | location of the MCR Installer. 15 | 16 | (2) run the MCR Installer. 17 | 18 | Or download the Macintosh version of the MCR for R2013a 19 | from the MathWorks Web site by navigating to 20 | 21 | http://www.mathworks.com/products/compiler/mcr/index.html 22 | 23 | 24 | For more information about the MCR and the MCR Installer, see 25 | Distribution to End Users in the MATLAB Compiler documentation 26 | in the MathWorks Documentation Center. 27 | 28 | 29 | NOTE: You will need administrator rights to run MCRInstaller. 30 | 31 | 32 | 2. Files to Deploy and Package 33 | 34 | Files to package for Standalone 35 | ================================ 36 | -run_test_compile.sh (shell script for temporarily setting environment variables and 37 | executing the application) 38 | -to run the shell script, type 39 | 40 | ./run_test_compile.sh 41 | 42 | at Linux or Mac command prompt. is the directory 43 | where version 8.1 of MCR is installed or the directory where 44 | MATLAB is installed on the machine. is all the 45 | arguments you want to pass to your application. For example, 46 | 47 | If you have version 8.1 of the MCR installed in 48 | /mathworks/home/application/v81, run the shell script as: 49 | 50 | ./run_test_compile.sh /mathworks/home/application/v81 51 | 52 | If you have MATLAB installed in /mathworks/devel/application/matlab, 53 | run the shell script as: 54 | 55 | ./run_test_compile.sh /mathworks/devel/application/matlab 56 | -MCRInstaller.zip 57 | -if end users are unable to download the MCR using the above 58 | link, include it when building your component by clicking 59 | the "Add MCR" link in the Deployment Tool 60 | -The Macintosh bundle directory structure test_compile.app 61 | -this can be gathered up using the zip command 62 | zip -r test_compile.zip test_compile.app 63 | or the tar command 64 | tar -cvf test_compile.tar test_compile.app 65 | -This readme file 66 | 67 | 3. Definitions 68 | 69 | For information on deployment terminology, go to 70 | http://www.mathworks.com/help. Select MATLAB Compiler > 71 | Getting Started > About Application Deployment > 72 | Application Deployment Terms in the MathWorks Documentation 73 | Center. 74 | 75 | 76 | 4. Appendix 77 | 78 | A. Mac systems: 79 | On the target machine, add the MCR directory to the environment variable 80 | DYLD_LIBRARY_PATH by issuing the following commands: 81 | 82 | NOTE: is the directory where MCR is installed 83 | on the target machine. 84 | 85 | setenv DYLD_LIBRARY_PATH 86 | $DYLD_LIBRARY_PATH: 87 | /v81/runtime/maci64: 88 | /v81/sys/os/maci64: 89 | /v81/bin/maci64: 90 | /System/Library/Frameworks/JavaVM.framework/JavaVM: 91 | /System/Library/Frameworks/JavaVM.framework/Libraries 92 | setenv XAPPLRESDIR /v81/X11/app-defaults 93 | 94 | 95 | For more detail information about setting MCR paths, see Distribution to End Users in 96 | the MATLAB Compiler documentation in the MathWorks Documentation Center. 97 | 98 | 99 | 100 | NOTE: To make these changes persistent after logout on Linux 101 | or Mac machines, modify the .cshrc file to include this 102 | setenv command. 103 | NOTE: The environment variable syntax utilizes forward 104 | slashes (/), delimited by colons (:). 105 | NOTE: When deploying standalone applications, it is possible 106 | to run the shell script file run_test_compile.sh 107 | instead of setting environment variables. See 108 | section 2 "Files to Deploy and Package". 109 | 110 | 111 | 112 | 5. Launching of application using Macintosh finder. 113 | 114 | If the application is purely graphical, that is, it doesn't read from standard in or 115 | write to standard out or standard error, it may be launched in the finder just like any 116 | other Macintosh application. 117 | 118 | 119 | 120 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/results/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/results/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_maxfilter_100particles.fig: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_maxfilter_100particles.fig -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_maxfilter_100particles.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_maxfilter_100particles.mat -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_multinomial_100particles.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_multinomial_100particles.mat -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_stratified_100particles.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/results/aug27_stratified_100particles.mat -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/results/useless_maxfilter_5000particles_aug18_13.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/results/useless_maxfilter_5000particles_aug18_13.mat -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/run_test_compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # script for execution of deployed applications 3 | # 4 | # Sets up the MCR environment for the current $ARCH and executes 5 | # the specified command. 6 | # 7 | exe_name=$0 8 | exe_dir=`dirname "$0"` 9 | echo "------------------------------------------" 10 | if [ "x$1" = "x" ]; then 11 | echo Usage: 12 | echo $0 \ args 13 | else 14 | echo Setting up environment variables 15 | MCRROOT="$1" 16 | echo --- 17 | DYLD_LIBRARY_PATH=.:${MCRROOT}/runtime/maci64 ; 18 | DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH}:${MCRROOT}/bin/maci64 ; 19 | DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH}:${MCRROOT}/sys/os/maci64; 20 | XAPPLRESDIR=${MCRROOT}/X11/app-defaults ; 21 | export DYLD_LIBRARY_PATH; 22 | export XAPPLRESDIR; 23 | echo DYLD_LIBRARY_PATH is ${DYLD_LIBRARY_PATH}; 24 | shift 1 25 | args= 26 | while [ $# -gt 0 ]; do 27 | token=$1 28 | args="${args} \"${token}\"" 29 | shift 30 | done 31 | eval "\"${exe_dir}/test_compile.app/Contents/MacOS/test_compile\"" $args 32 | fi 33 | exit 34 | 35 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/sample_crp.m: -------------------------------------------------------------------------------- 1 | function seating = sample_crp(n,alpha) 2 | % 3 | % function seating = sample_crp(n,alpha) 4 | % 5 | % Returns the "seating" or class id's for n draws from a Chinese 6 | % restuarant process with parameter alpha 7 | % 8 | 9 | m_k = zeros(1,n); 10 | m_k(1) = 1; 11 | seating = zeros(1,n); 12 | seating(1) = 1; 13 | 14 | for i = 2:n 15 | p_table = [m_k(m_k~= 0) alpha]; 16 | p_table = p_table/sum(p_table); 17 | seating(i) = find(cumsum(p_table) > rand,1,'first'); 18 | m_k(seating(i)) = m_k(seating(i))+1; 19 | end -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/sample_igmm_prior.m: -------------------------------------------------------------------------------- 1 | function [datapoints labels means covariances] = sample_igmm_prior(n,a_0,b_0,mu_0,lambda_0,k_0,v_0) 2 | % 3 | % function [datapoints labels means covariances] = sample_igmm_prior(n,a_0,b_0,mu_0,lambda_0,k_0,v_0) 4 | % 5 | % This function draws n samples from the infinite Gaussian mixture model parameterized by 6 | % 7 | % alpha ~ Gamma(a_0, b_0) 8 | % labels ~ CRP(alpha) 9 | % class_i_covariance ~ Inverse Wishart (lambda_0, v_0) 10 | % class_i_mean ~ Normal(m_0, class_i_covariance / k_0) 11 | % datapoints(label == i) ~ Normal(class_i_mean, class_i_covariance) 12 | % 13 | % The dimensionality of the data is implicit in the mu_0 and lambda_0 14 | % arguments (which are not themselves checked for consistency) 15 | % 16 | % In addition to the class labels and the datapoints, the means (KxD) and 17 | % the covariances (KxDxD) are returned where K is the number of classes 18 | % and D is the dimensionality of the datapoint 19 | % 20 | % 21 | alpha = gamrnd(a_0,b_0); 22 | labels = sample_crp(n,alpha); 23 | 24 | dim = length(mu_0); 25 | num_tables = length(unique(labels)); 26 | 27 | means = zeros(num_tables,dim); 28 | covariances = zeros(num_tables,dim,dim); 29 | datapoints = zeros(n,dim); 30 | 31 | for t = 1:num_tables 32 | covariances(t,:,:) =iwishrnd(lambda_0,v_0); 33 | means(t,:) = mvnrnd(mu_0,squeeze(covariances(t,:,:))/k_0); 34 | num_points_at_table_t = sum(labels == t); 35 | datapoints(labels == t,:) = mvnrnd(means(t,:),squeeze(covariances(t,:,:)),num_points_at_table_t); 36 | 37 | end 38 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/secs2hmsstr.m: -------------------------------------------------------------------------------- 1 | function str = secs2hmsstr(secs) 2 | 3 | days = floor(secs/(3600*24)); 4 | rem = (secs-(days*3600*24)); 5 | hours = floor(rem/3600); 6 | rem = rem -(hours*3600); 7 | minutes = floor(rem/60); 8 | rem = rem - minutes*60; 9 | secs = round(rem); 10 | 11 | switch days 12 | case 0 13 | str = [ num2str(hours) ':' sprintf('%02d',minutes) ':' sprintf('%02d',secs)]; 14 | case 1 15 | str = [ '1 Day + ' num2str(hours) ':' sprintf('%02d',minutes) ':' sprintf('%02d',secs)]; 16 | otherwise 17 | str = [ num2str(days) ' Days + ' num2str(hours) ':' sprintf('%02d',minutes) ':' sprintf('%02d',secs)]; 18 | end -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/spike_plot.m: -------------------------------------------------------------------------------- 1 | %colors = ['r' 'b' 'm' 'g' 'y' 'm' 'y' 'b' 'r' 'g' 'k' 'm' 'y' 'b' 'k' 'm' 'y' 'b' 'k' 'm' 'y' 'b' 'k' 'm' 'y' 'b']; 2 | load times_CSC4 3 | spikes=spikes(1:number_of_spikes-200,:); 4 | cluster_class = cluster_class(1:number_of_spikes-200,:); 5 | cluster_class(:,1) = cluster_class(:,1) + 1; 6 | 7 | map_spike_sorting = spike_sortings(1,:); 8 | map_spike_sorting = cluster_class(:,1)'; 9 | 10 | 11 | colors={}; 12 | DIV=size(unique(map_spike_sorting),2); 13 | elm = [1:255/DIV:255]; 14 | elm=elm/255; 15 | 16 | colors{1}=[0.8 0 0]; colors{2} = [0 0.8 0]; colors{3} = [0 0 0.8]; colors{4} = [0.8 0.8 0.2]; 17 | colors{5} = [0.8 0.4 0.3]; colors{6} = [1 1 1]; 18 | 19 | colors{7}=[0.2 0 0]; colors{8} = [0 0.2 0]; colors{9} = [0 0 0.2]; colors{10} = [0.2 0.8 0.4]; 20 | colors{11} = [0.5 1 0.3]; colors{12} = [1 0 1]; 21 | 22 | colors{13}=[0.2 0.3 1]; colors{14} = [1 0.2 0]; colors{15} = [0 1 0.6]; colors{16} = [0.9 0.8 0.4]; 23 | colors{17} = [0.5 1 1];colors{18} = [0.9 0.8 0]; 24 | 25 | 26 | for i=1:DIV 27 | colors{i} = colors{i}; 28 | end 29 | 30 | figure(8) 31 | for i=1:size(map_spike_sorting,2) 32 | plot(spikes(i,:),'Color', colors{map_spike_sorting(i)});hold all; 33 | end 34 | 35 | % 36 | % %selective plot 37 | % for ii=0:length(unique(map_spike_sorting)) 38 | % figure(9+ii) 39 | % for i=1:size(map_spike_sorting,2) 40 | % if map_spike_sorting(i) == ii 41 | % plot(spikes(i,:),'Color', colors{map_spike_sorting(i)});hold all; 42 | % end 43 | % end 44 | % end -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/stratified_resample.m: -------------------------------------------------------------------------------- 1 | function [rx,rw] = stratified_resample(x,w,N) 2 | % function [rx] = stratified_resample(x,w,N) 3 | % 4 | % returns N samples from the weighted particle set 5 | % x,w with x (DxM) being M, D-dimensional samples and w being a vector of 6 | % M real weights that sum to one 7 | % 8 | 9 | [D, M] = size(x); 10 | ni = randperm(M); 11 | x = x(:,ni); 12 | w = w(ni); 13 | rx = zeros(D,N); 14 | rw = zeros(1,N); 15 | cdf = cumsum(w); 16 | cdf(end) = 1; 17 | p = linspace(rand*(1/N),1,N); 18 | % p = sort(p); 19 | 20 | picked = zeros(1,M); 21 | j=1; 22 | for i=1:N 23 | while j0) 32 | for j=1:picked(i) 33 | rx(:,rind) = x(:,i); 34 | rw(rind) = w(i); 35 | rind=rind+1; 36 | end 37 | end 38 | end 39 | 40 | rw = rw./sum(rw); 41 | 42 | 43 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | English 7 | CFBundleExecutable 8 | prelaunch 9 | CFBundleIconFile 10 | membrane.icns 11 | CFBundleIdentifier 12 | test_compile 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | test_compile 17 | CFBundlePackageType 18 | APPL 19 | CFBundleSignature 20 | test_compile 21 | CFBundleVersion 22 | 1 23 | CFBundleVersionString 24 | 1.0 25 | CFResourcesFileMapped 26 | 27 | LSMinimumSystemVersion 28 | 10.7.0 29 | NSMainNibFile 30 | MainMenu 31 | NSPrincipalClass 32 | NSApplication 33 | 34 | 35 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/MacOS/applauncher: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/MacOS/applauncher -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/MacOS/prelaunch: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/MacOS/prelaunch -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/MacOS/test_compile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/MacOS/test_compile -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/About.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/About.nib -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/MWOpenAccessoryView.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/MWOpenAccessoryView.nib -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/MWSaveAccessoryView.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/MWSaveAccessoryView.nib -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/MainMenu.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/English.lproj/MainMenu.nib -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/membrane.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.app/Contents/Resources/membrane.icns -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/test_compile.m: -------------------------------------------------------------------------------- 1 | a=1; 2 | b=6+b; 3 | b -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/times_CSC4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/times_CSC4.mat -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/ucla_big_runner.m: -------------------------------------------------------------------------------- 1 | for i=1:100 2 | clear; 3 | run_ucla_exp; 4 | heldoutLikelihood; 5 | end -------------------------------------------------------------------------------- /DPMM_MaxFilter/SpikingSorting/thirdparty/untitled.fig: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/SpikingSorting/thirdparty/untitled.fig -------------------------------------------------------------------------------- /DPMM_MaxFilter/dataset.jl: -------------------------------------------------------------------------------- 1 | 2 | function dataset1() 3 | mu={[0,0], [2,2], [4,4]} 4 | std={[0.25,0.25], [0.25,0.25], [0.25,0.25]} 5 | mixture_weight = [1/2,1/6,1/3] 6 | return mu, std, mixture_weight 7 | end 8 | 9 | function dataset2() 10 | mu={[0,0], [2,2], [4,4]} 11 | std={[0.5,0.5], [0.5,0.5], [0.5,0.5]} 12 | mixture_weight = [1/2,1/6,1/3] 13 | return mu, std,mixture_weight 14 | end 15 | 16 | 17 | function dataset3() 18 | mu={[0,0], [1,1], [2,2]} 19 | std={[0.25,0.25], [0.25,0.25], [0.25,0.25]} 20 | mixture_weight = [1/2,1/6,1/3] 21 | return mu, std,mixture_weight 22 | end 23 | 24 | function dataset4() 25 | mu={[0,0], [1,1], [2,2]} 26 | std={[0.5,0.5], [0.5,0.5], [0.5,0.5]} 27 | mixture_weight = [1/2,1/6,1/3] 28 | return mu, std,mixture_weight 29 | end 30 | 31 | 32 | function dataset5() 33 | mu={[0,0], [0.5,0.5], [1,1]} 34 | std={[0.25,0.25], [0.25,0.25], [0.25,0.25]} 35 | mixture_weight = [1/2,1/6,1/3] 36 | return mu, std,mixture_weight 37 | end 38 | 39 | function dataset6() 40 | mu={[0,0], [0.5,0.5], [1,1]} 41 | std={[0.5,0.5], [0.5,0.5], [0.5,0.5]} 42 | mixture_weight = [1/2,1/6,1/3] 43 | return mu, std,mixture_weight 44 | end 45 | 46 | 47 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/maxfilter.jl: -------------------------------------------------------------------------------- 1 | 2 | using Debug 3 | @debug begin 4 | 5 | #Tejas D K : tejask@mit.edu 6 | function maxFilter(particles_t, particles_t_minus_1, maxfilter_probability_array, maxfilter_cid_array, maxfilter_particle_struct, NUM_PARTICLES) 7 | #Algorithm proposed by Sam Gershman 8 | perm = sortperm(maxfilter_probability_array, Sort.Reverse) 9 | maxfilter_cid_array = maxfilter_cid_array[perm] 10 | maxfilter_particle_struct = maxfilter_particle_struct[perm] 11 | 12 | #println(maxfilter_cid_array) 13 | for i=1:NUM_PARTICLES 14 | state=Dict() 15 | sampled_cid = maxfilter_cid_array[i] 16 | state["c"] = sampled_cid 17 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[i]]["hidden_state"]["c_aggregate"], sampled_cid) 18 | particles_t[i]["hidden_state"]=state 19 | end 20 | end 21 | 22 | 23 | function stratifiedMaxFiltering(time, particles_t, particles_t_minus_1, maxfilter_probability_array, maxfilter_cid_array, maxfilter_particle_struct, NUM_PARTICLES) 24 | 25 | ## BAD [buy why?]logperm = sortperm(log_maxfilter_probability_array, Sort.Reverse) 26 | #_maxfilter_cid_array = maxfilter_cid_array[logperm] 27 | #_maxfilter_particle_struct = maxfilter_particle_struct[logperm] 28 | 29 | # normalized_maxfilter_prob_array = maxfilter_probability_array/sum(maxfilter_probability_array) 30 | # prev_normalized_prob_array = Float64[ float(particles_t_minus_1[pid]["weight"]) for pid in [1:NUM_PARTICLES] ] 31 | # @bp 32 | # maxfilter_probability_array = normalized_maxfilter_prob_array .* prev_normalized_prob_array 33 | 34 | # perm = sortperm(maxfilter_probability_array, Sort.Reverse) 35 | 36 | #NONEED - maxfilter_probability_array = maxfilter_probability_array/sum(maxfilter_probability_array) 37 | perm = sortperm(maxfilter_probability_array, Sort.Reverse) 38 | 39 | #println( sum([i<0 for i in log_maxfilter_probability_array]) - length(log_maxfilter_probability_array)) 40 | #println(log_maxfilter_probability_array) 41 | #if perm != logperm 42 | # @bp 43 | #end 44 | 45 | 46 | maxfilter_cid_array = maxfilter_cid_array[perm] 47 | maxfilter_particle_struct = maxfilter_particle_struct[perm] 48 | maxfilter_probability_array = maxfilter_probability_array[perm] 49 | 50 | state=Dict() 51 | state["c"] = maxfilter_cid_array[1] 52 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[1]]["hidden_state"]["c_aggregate"], state["c"]) 53 | particles_t[1]["hidden_state"]=state 54 | particles_t[1]["weight"] = maxfilter_probability_array[1] 55 | 56 | particle_cnt = 2 57 | unique_indices = [] 58 | 59 | for i=2:length(maxfilter_cid_array) 60 | last = maxfilter_cid_array[i-1] 61 | cur = maxfilter_cid_array[i] 62 | if cur != last 63 | if particle_cnt > NUM_PARTICLES 64 | break 65 | else 66 | unique_indices = myappend(unique_indices, i) 67 | state=Dict() 68 | state["c"] = maxfilter_cid_array[i] 69 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[i]]["hidden_state"]["c_aggregate"], state["c"]) 70 | particles_t[particle_cnt]["hidden_state"]=state 71 | particles_t[particle_cnt]["weight"] = maxfilter_probability_array[i] 72 | particle_cnt+=1 73 | end 74 | end 75 | i+=1 76 | end 77 | 78 | if NUM_PARTICLES >= particle_cnt 79 | len_unique_indices = length(unique_indices) 80 | for p=particle_cnt:NUM_PARTICLES 81 | state=Dict() 82 | #indx = unique_indices[p%len_unique_indices + 1] 83 | #indx = unique_indices[randi(len_unique_indices)] 84 | indx = unique_indices[1] 85 | state["c"] = maxfilter_cid_array[indx] 86 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[indx]]["hidden_state"]["c_aggregate"], state["c"]) 87 | particles_t[p]["hidden_state"]=state 88 | particles_t[p]["weight"] = 0#maxfilter_probability_array[indx] 89 | end 90 | end 91 | 92 | #Normalizing particle weights 93 | Z=0 94 | for i=1:NUM_PARTICLES 95 | Z+=particles_t[i]["weight"] 96 | end 97 | for i=1:NUM_PARTICLES 98 | particles_t[i]["weight"] = particles_t[i]["weight"]/Z 99 | end 100 | 101 | 102 | #println(particles_t) 103 | """ 104 | unique_maxfilter_cid_array = unique(maxfilter_cid_array) 105 | unique_total_cids = length(unique_maxfilter_cid_array) 106 | unique_total_cids = min(unique_total_cids, NUM_PARTICLES) 107 | 108 | END = NaN 109 | if unique_total_cids >= NUM_PARTICLES 110 | END = NUM_PARTICLES 111 | end 112 | if NUM_PARTICLES > unique_total_cids 113 | END = unique_total_cids 114 | end 115 | 116 | for i=1:END 117 | state=Dict() 118 | state["c"] = maxfilter_cid_array[i] 119 | 120 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[i]]["hidden_state"]["c_aggregate"], state["c"]) 121 | particles_t[i]["hidden_state"]=state 122 | #println(state["c_aggregate"]) 123 | #print("(",maxfilter_particle_struct[i], ") ") 124 | end 125 | 126 | IND_CNT = 1 127 | if NUM_PARTICLES > unique_total_cids 128 | for i=END+1:NUM_PARTICLES 129 | state=Dict() 130 | #state["c"] = maxfilter_cid_array[(IND_CNT%unique_total_cids)+1]#[randi(unique_total_cids)] ##random vs deterministic stratified 131 | state["c"] = maxfilter_cid_array[randi(unique_total_cids)] ##random vs deterministic stratified 132 | IND_CNT +=1 133 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[i]]["hidden_state"]["c_aggregate"], state["c"]) 134 | particles_t[i]["hidden_state"]=state 135 | #println(state["c_aggregate"]) 136 | #print("(",maxfilter_particle_struct[i], ") ") 137 | end 138 | end""" 139 | 140 | 141 | end 142 | 143 | 144 | end -------------------------------------------------------------------------------- /DPMM_MaxFilter/picloud_runner.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import pickle 4 | 5 | import cloud 6 | cloud.setkey(7513, api_secretkey='ca43a3535fa17e28b687f0f1691c67db261392ae') 7 | cloud_environment = 'Julia' 8 | 9 | """ 10 | number_of_clusters = int(sys.argv[1]) 11 | if_zero_shortlearning = sys.argv[2] # Should be "yes" or "no" 12 | experiment_name = sys.argv[3]""" 13 | 14 | # Usage: python picloud_runner.py 200 10 20 15 | 16 | 17 | TRIALS = int(sys.argv[1]) 18 | NUM_PARTICLES = int(sys.argv[2]) 19 | REPETITIONS = int(sys.argv[3]) 20 | DATASET = int(sys.argv[4]) 21 | 22 | 23 | def run_on_instance(trial_id): 24 | global number_of_clusters 25 | global if_zero_shortlearning 26 | global experiment_name 27 | import subprocess 28 | import os 29 | os.environ['DISPLAY'] = ":1" 30 | print "Starting" 31 | ls_output = subprocess.Popen(["/home/picloud/julia/julia", "putative_runner.jl", str(NUM_PARTICLES), str(trial_id), str(REPETITIONS), str(DATASET)], \ 32 | cwd = "/home/picloud/DPMixtureModel/DPMM_MaxFilter/", \ 33 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) 34 | 35 | out, err = ls_output.communicate() 36 | return out 37 | 38 | #result = run_on_instance([1]) 39 | 40 | jids = cloud.map(run_on_instance, range(TRIALS), _env=cloud_environment, _type='c2', _cores=1) 41 | print jids 42 | result = cloud.result(jids) 43 | pickle.dump(result, open("DATASET="+str(DATASET)+"_putative_result_"+str(NUM_PARTICLES)+"particles_"+str(REPETITIONS)+"path.pkl","wb")) 44 | print "RESULT:", result 45 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/plotter.py: -------------------------------------------------------------------------------- 1 | #tejask@mit.edu 2 | 3 | from pylab import imread,imshow,figure,show,subplot 4 | from numpy import reshape,uint8,flipud 5 | from scipy.cluster.vq import kmeans,vq 6 | import numpy 7 | import scipy.misc 8 | import Image 9 | import copy 10 | import glob 11 | import pdb 12 | import pylab 13 | import pickle 14 | from matplotlib import pyplot 15 | from matplotlib.backends.backend_pdf import PdfPages 16 | import math 17 | import operator 18 | 19 | def saveAsPDF(fname,plot): 20 | pp = PdfPages(fname) 21 | pp.savefig(plot) 22 | pp.close() 23 | 24 | 25 | def genericPlot(X,Y,xlab,ylab,fname): 26 | f = pylab.figure() 27 | ax=f.add_subplot(111,title='') 28 | pyplot.plot( X,Y,'-',color='blue', linewidth=2) 29 | pyplot.xlabel(xlab,fontsize=30) 30 | pyplot.ylabel(ylab,fontsize=30) 31 | pylab.savefig(fname+'.png') 32 | #ax.grid(True) 33 | saveAsPDF(fname+'.pdf',f) 34 | 35 | 36 | 37 | 38 | fname = 'results/K=1_putative_result_1particles_1path' 39 | data = pickle.load(open(fname+".pkl","rb")) 40 | 41 | f = pylab.figure() 42 | ax=f.add_subplot(111,title='') 43 | X=[] 44 | Y=[] 45 | CNT=0 46 | 47 | with_maxf=[] 48 | without_maxf = [] 49 | with_eqmaxf = [] 50 | 51 | for i in range(len(data)): 52 | if len(data[i]) > 0: 53 | if len(data[i])>1: 54 | print i, float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[0]), float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[1]), float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[2]) 55 | without_maxf.append(float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[0])) 56 | with_maxf.append(float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[1])) 57 | with_eqmaxf.append(float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[2])) 58 | X.append(CNT);CNT+=1 59 | 60 | print 'Average (without_maxf):', sum(without_maxf)/len(without_maxf) 61 | print 'Average (with_maxf):', sum(with_maxf)/len(with_maxf) 62 | print 'Average (with_eqmaxf):', sum(with_eqmaxf)/len(with_eqmaxf) 63 | 64 | 65 | ax.bar(X,map(operator.sub, with_eqmaxf, with_maxf),0.05,color='black') 66 | 67 | """ax.plot(X,without_maxf, color="grey") 68 | ax.plot(X,with_maxf, color="black") 69 | ax.plot(X,with_eqmaxf, color="blue")""" 70 | 71 | 72 | pylab.xlabel('Dataset',fontsize=35) 73 | pylab.ylabel('V-Measure Diff',fontsize=35)# (30 samples avg/dataset) 74 | pylab.savefig(fname+'.png') 75 | #pylab.ylim([-0.35, 0.35]) 76 | #ax.grid(True) 77 | saveAsPDF(fname+'.pdf',f) 78 | 79 | 80 | -------------------------------------------------------------------------------- /DPMM_MaxFilter/results/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/results/.DS_Store -------------------------------------------------------------------------------- /DPMM_MaxFilter/results_100pts_putative_10particles_200rep_30perdatasetorseed/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_MaxFilter/results_100pts_putative_10particles_200rep_30perdatasetorseed/.DS_Store -------------------------------------------------------------------------------- /DPMM_SMC/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/.DS_Store -------------------------------------------------------------------------------- /DPMM_SMC/backup/original.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/backup/original.png -------------------------------------------------------------------------------- /DPMM_SMC/dataset.jl: -------------------------------------------------------------------------------- 1 | 2 | function dataset1() 3 | mu={[0,0], [2,2], [4,4]} 4 | std={[0.25,0.25], [0.25,0.25], [0.25,0.25]} 5 | mixture_weight = [1/2,1/6,1/3] 6 | return mu, std, mixture_weight 7 | end 8 | 9 | 10 | function dataset2() 11 | mu={[0,0], [1,1], [2,2]} 12 | std={[0.3,0.3], [0.3,0.3], [0.3,0.3]} 13 | mixture_weight = [1/3,1/3,1/3] 14 | return mu, std,mixture_weight 15 | end 16 | -------------------------------------------------------------------------------- /DPMM_SMC/original.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/original.png -------------------------------------------------------------------------------- /DPMM_SMC/picloud_runner.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import pickle 4 | 5 | import cloud 6 | cloud.setkey(7513, api_secretkey='ca43a3535fa17e28b687f0f1691c67db261392ae') 7 | cloud_environment = 'Julia' 8 | 9 | """ 10 | number_of_clusters = int(sys.argv[1]) 11 | if_zero_shortlearning = sys.argv[2] # Should be "yes" or "no" 12 | experiment_name = sys.argv[3]""" 13 | 14 | # Usage: python picloud_runner.py 100 50 10 2 15 | 16 | 17 | TRIALS = int(sys.argv[1]) 18 | NUM_PARTICLES = int(sys.argv[2]) 19 | DELTA = int(sys.argv[3]) 20 | INTEGRAL_PATHS = int(sys.argv[4]) 21 | 22 | 23 | def run_on_instance(trial_id): 24 | global number_of_clusters 25 | global if_zero_shortlearning 26 | global experiment_name 27 | import subprocess 28 | import os 29 | os.environ['DISPLAY'] = ":1" 30 | print "Starting" 31 | ls_output = subprocess.Popen(["/home/picloud/julia/julia", "runner.jl", str(NUM_PARTICLES), str(DELTA), str(INTEGRAL_PATHS)], \ 32 | cwd = "/home/picloud/DPMixtureModel/DPMM_SMC/", \ 33 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) 34 | 35 | out, err = ls_output.communicate() 36 | return out 37 | 38 | #result = run_on_instance([1]) 39 | 40 | jids = cloud.map(run_on_instance, range(TRIALS), _env=cloud_environment, _type='c2', _cores=1) 41 | print jids 42 | result = cloud.result(jids) 43 | pickle.dump(result, open("result_"+str(NUM_PARTICLES)+"particles_"+str(DELTA)+"delta_"+str(INTEGRAL_PATHS)+"path.pkl","wb")) 44 | print "RESULT:", result 45 | -------------------------------------------------------------------------------- /DPMM_SMC/plotter.py: -------------------------------------------------------------------------------- 1 | #tejask@mit.edu 2 | 3 | from pylab import imread,imshow,figure,show,subplot 4 | from numpy import reshape,uint8,flipud 5 | from scipy.cluster.vq import kmeans,vq 6 | import numpy 7 | import scipy.misc 8 | import Image 9 | import copy 10 | import glob 11 | import pdb 12 | import pylab 13 | import pickle 14 | from matplotlib import pyplot 15 | from matplotlib.backends.backend_pdf import PdfPages 16 | import math 17 | 18 | 19 | def saveAsPDF(fname,plot): 20 | pp = PdfPages(fname) 21 | pp.savefig(plot) 22 | pp.close() 23 | 24 | 25 | def genericPlot(X,Y,xlab,ylab,fname): 26 | f = pylab.figure() 27 | ax=f.add_subplot(111,title='') 28 | pyplot.plot( X,Y,'-',color='blue', linewidth=2) 29 | pyplot.xlabel(xlab,fontsize=30) 30 | pyplot.ylabel(ylab,fontsize=30) 31 | pylab.savefig(fname+'.png') 32 | #ax.grid(True) 33 | saveAsPDF(fname+'.pdf',f) 34 | 35 | 36 | 37 | 38 | fname = 'result_1particles_5delta_2path' 39 | data = pickle.load(open(fname+".pkl","rb")) 40 | 41 | f = pylab.figure() 42 | ax=f.add_subplot(111,title='') 43 | X=[] 44 | Y=[] 45 | CNT=0 46 | nolookArr =[] 47 | lookArr = [] 48 | for i in range(len(data)): 49 | if len(data[i]) > 0: 50 | nolook = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[0]) 51 | look = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[1]) 52 | nolookArr.append(nolook) 53 | lookArr.append(look) 54 | X.append(i) 55 | Y.append(look-nolook) 56 | if look > nolook: 57 | CNT+=1 58 | 59 | print len(data), CNT 60 | print 'Average (Look):', sum(lookArr)/len(data) 61 | print 'Average (Nolook):', sum(nolookArr)/len(data) 62 | 63 | ax.bar(X,Y,0.4,color='black') 64 | 65 | pylab.xlabel('Run Number',fontsize=30) 66 | pylab.ylabel('ARI Difference',fontsize=30) 67 | pylab.savefig(fname+'.png') 68 | pylab.ylim([-0.35, 0.35]) 69 | #ax.grid(True) 70 | saveAsPDF(fname+'.pdf',f) 71 | 72 | 73 | -------------------------------------------------------------------------------- /DPMM_SMC/result.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | S'[0.25916946770654387,0.3486808218826324]\n' 3 | p1 4 | aS'[0.35270611330460927,0.34227523446293456]\n' 5 | p2 6 | aS'[0.6255617723409441,0.6032104739563502]\n' 7 | p3 8 | aS'[0.3069819779591688,0.41299401294237226]\n' 9 | p4 10 | aS'[0.3063689231126936,0.33339044484193714]\n' 11 | p5 12 | aS'[0.40425560388089643,0.46669239439440097]\n' 13 | p6 14 | aS'[0.5407725963037724,0.5028641904654964]\n' 15 | p7 16 | aS'[0.45923060995089604,0.47724078887709187]\n' 17 | p8 18 | aS'[0.3812188407352182,0.46475664372766534]\n' 19 | p9 20 | aS'[0.41749383194357437,0.39553806526644225]\n' 21 | p10 22 | aS'[0.2896281800391387,0.6278010046700395]\n' 23 | p11 24 | aS'[0.253337516746445,0.4785976569861326]\n' 25 | p12 26 | aS'[0.5173927711791197,0.5472351352743627]\n' 27 | p13 28 | aS'[0.5040812219066572,0.5502622757015717]\n' 29 | p14 30 | aS'[0.44415773716712387,0.47037951529981964]\n' 31 | p15 32 | aS'[0.4888863307748689,0.49902154699202644]\n' 33 | p16 34 | aS'[0.2693409095991358,0.3954041530125205]\n' 35 | p17 36 | aS'[0.3945232247960148,0.406355111890192]\n' 37 | p18 38 | aS'[0.348978250956774,0.4193340724580663]\n' 39 | p19 40 | aS'[0.28995967202632744,0.2653041881561088]\n' 41 | p20 42 | aS'[0.4839966009418845,0.461936856435326]\n' 43 | p21 44 | aS'[0.4967358993908696,0.541607127468974]\n' 45 | p22 46 | aS'[0.29697451159290195,0.32683266506381364]\n' 47 | p23 48 | aS'[0.5579275888114524,0.5265242094389071]\n' 49 | p24 50 | aS'[0.43765805118051065,0.47029266933524794]\n' 51 | p25 52 | aS'[0.3644735957757375,0.455107051790299]\n' 53 | p26 54 | aS'[0.32964460523515615,0.3620630353390029]\n' 55 | p27 56 | aS'[0.4932731127780282,0.43900987342622744]\n' 57 | p28 58 | aS'[0.5556014794650954,0.5928702640248306]\n' 59 | p29 60 | aS'[0.40752092032643583,0.470135046243892]\n' 61 | p30 62 | aS'[0.3738730163964401,0.34210626246485504]\n' 63 | p31 64 | aS'[0.3752212372583945,0.3840786867992487]\n' 65 | p32 66 | aS'[0.4331195602382041,0.4653112490085995]\n' 67 | p33 68 | aS'[0.5880904979859913,0.41898969545666653]\n' 69 | p34 70 | aS'[0.5957478177537063,0.37717809626912036]\n' 71 | p35 72 | aS'[0.22295939044494328,0.2769598657829878]\n' 73 | p36 74 | aS'[0.4194614101707379,0.4289857497696582]\n' 75 | p37 76 | aS'[0.4611861776371851,0.41054978751826876]\n' 77 | p38 78 | aS'[0.519548841857151,0.43956141931337545]\n' 79 | p39 80 | aS'[0.4615916537756413,0.5203160776818099]\n' 81 | p40 82 | aS'[0.4294825002881399,0.4691321829452167]\n' 83 | p41 84 | aS'[0.4793210416340655,0.4834990938107903]\n' 85 | p42 86 | aS'[0.5113931587639091,0.6259397509307341]\n' 87 | p43 88 | aS'[0.4162974879535849,0.5029066466304831]\n' 89 | p44 90 | aS'[0.35638651470799,0.419538135289126]\n' 91 | p45 92 | aS'[0.4110447236402809,0.4270404837968649]\n' 93 | p46 94 | aS'[0.5090539497559852,0.49642425545212954]\n' 95 | p47 96 | aS'[0.4385224494290725,0.4635970617767345]\n' 97 | p48 98 | aS'[0.40366498223482417,0.483882989397296]\n' 99 | p49 100 | aS'[0.34068910294152255,0.3995705919048395]\n' 101 | p50 102 | aS'[0.5090539497559852,0.5090539497559852]\n' 103 | p51 104 | aS'[0.4322030428651459,0.4643804204404246]\n' 105 | p52 106 | aS'[0.364920788152272,0.5556167268849045]\n' 107 | p53 108 | aS'[0.49615923211833624,0.4333369808194335]\n' 109 | p54 110 | aS'[0.3169139363374149,0.3836992677559333]\n' 111 | p55 112 | aS'[0.4008783516477468,0.3951369499554639]\n' 113 | p56 114 | aS'[0.5014061625937759,0.5441582968624743]\n' 115 | p57 116 | aS'[0.48298328237519783,0.4771668886023778]\n' 117 | p58 118 | aS'[0.39345968376492907,0.45080039023928037]\n' 119 | p59 120 | aS'[0.3948356888886568,0.37375513957723444]\n' 121 | p60 122 | aS'[0.3452223478198126,0.454977407183118]\n' 123 | p61 124 | aS'[0.49634747074616486,0.5023086946137154]\n' 125 | p62 126 | aS'[0.36271116500136774,0.3765494182681391]\n' 127 | p63 128 | aS'[0.3679377919360679,0.49485843835928606]\n' 129 | p64 130 | aS'[0.3680986309625222,0.37311865380082876]\n' 131 | p65 132 | aS'[0.4024908857185001,0.4471225250560108]\n' 133 | p66 134 | aS'[0.4971080991900186,0.3921408826519915]\n' 135 | p67 136 | aS'[0.2903861661141472,0.39409978783292204]\n' 137 | p68 138 | aS'[0.3696186560224028,0.36061795326559043]\n' 139 | p69 140 | aS'[0.45883238988362746,0.49704557613750466]\n' 141 | p70 142 | aS'[0.38852238185871224,0.4134242125924186]\n' 143 | p71 144 | aS'[0.4132877344936028,0.46303431157336244]\n' 145 | p72 146 | aS'[0.37473290430079154,0.7153263509850287]\n' 147 | p73 148 | aS'[0.4381313875692322,0.4553770708245573]\n' 149 | p74 150 | aS'[0.5086339339894967,0.4933505829432244]\n' 151 | p75 152 | aS'[0.3517990420586061,0.46802407260027895]\n' 153 | p76 154 | aS'[0.36188420614184524,0.4267778828790091]\n' 155 | p77 156 | aS'[0.3392980948643695,0.3539936658523134]\n' 157 | p78 158 | aS'[0.33289051092181865,0.39762911968602693]\n' 159 | p79 160 | aS'[0.38655418394147845,0.39236030072557554]\n' 161 | p80 162 | aS'[0.41259081101997325,0.48246255014710776]\n' 163 | p81 164 | aS'[0.45227542113055136,0.400120884730804]\n' 165 | p82 166 | aS'[0.3086823491067158,0.3002802118596022]\n' 167 | p83 168 | aS'[0.4687844202820907,0.48353254794049333]\n' 169 | p84 170 | aS'[0.39895433052869145,0.42861997544794866]\n' 171 | p85 172 | aS'[0.3222091331162079,0.41543771572011273]\n' 173 | p86 174 | aS'[0.3579289768244223,0.33766621444849165]\n' 175 | p87 176 | aS'[0.4743013221045081,0.42480515600576724]\n' 177 | p88 178 | aS'[0.22949967511370994,0.38127400754096447]\n' 179 | p89 180 | aS'[0.39402391446654966,0.39884369853881685]\n' 181 | p90 182 | aS'[0.3900663141858587,0.43433129266954057]\n' 183 | p91 184 | aS'[0.3371700997813804,0.3800033909644542]\n' 185 | p92 186 | aS'[0.3633170745405555,0.4059786387565934]\n' 187 | p93 188 | aS'[0.3868881613516024,0.46922656914657196]\n' 189 | p94 190 | aS'[0.3802355643861736,0.38578483955806975]\n' 191 | p95 192 | aS'[0.46467820526685344,0.5140767567994174]\n' 193 | p96 194 | aS'[0.36127454612819543,0.5656909297626589]\n' 195 | p97 196 | aS'[0.3808376054761672,0.3421571830053249]\n' 197 | p98 198 | aS'[0.4387633583711785,0.720776424674833]\n' 199 | p99 200 | aS'[0.25135945890010597,0.3982577510035806]\n' 201 | p100 202 | a. -------------------------------------------------------------------------------- /DPMM_SMC/result_1_1particle_10delta_2path.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1_1particle_10delta_2path.pdf -------------------------------------------------------------------------------- /DPMM_SMC/result_1_1particle_10delta_2path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1_1particle_10delta_2path.png -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_20delta_1path.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_20delta_1path.pdf -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_20delta_1path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_20delta_1path.png -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_2delta_2path.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_2delta_2path.pdf -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_2delta_2path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_2delta_2path.png -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_3delta_1path.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_3delta_1path.pdf -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_3delta_1path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_3delta_1path.png -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_5delta_2path.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_5delta_2path.pdf -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_5delta_2path.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | S'[0.33118422392272334,0.3589827990426114]\n' 3 | p1 4 | aS'[0.4368262893289247,0.35820293012114235]\n' 5 | p2 6 | aS'[0.349786938046532,0.4861953999728876]\n' 7 | p3 8 | aS'[0.33968944940274703,0.4344547348016063]\n' 9 | p4 10 | aS'[0.5033627494707598,0.4011844167185746]\n' 11 | p5 12 | aS'[0.49504296748979054,0.4425415813051673]\n' 13 | p6 14 | aS'[0.41848357850605644,0.425653077142489]\n' 15 | p7 16 | aS'[0.38486360769135025,0.3242673738993008]\n' 17 | p8 18 | aS'[0.3998674387881318,0.46876525028201677]\n' 19 | p9 20 | aS'[0.4651893023980306,0.47668925583306243]\n' 21 | p10 22 | aS'[0.3305875101912423,0.4661296583361052]\n' 23 | p11 24 | aS'[0.37434782954457557,0.3824906306051669]\n' 25 | p12 26 | aS'[0.4893536647195118,0.4610190712698464]\n' 27 | p13 28 | aS'[0.4006456701579321,0.4432730167609783]\n' 29 | p14 30 | aS'[0.4141952767321662,0.42207892519069]\n' 31 | p15 32 | aS'[0.4561135522585141,0.43643233005473225]\n' 33 | p16 34 | aS'[0.30722033533414156,0.40502803368409757]\n' 35 | p17 36 | aS'[0.36300048277195995,0.4271360027540501]\n' 37 | p18 38 | aS'[0.41740557246432825,0.4289143475586346]\n' 39 | p19 40 | aS'[0.40505533710020425,0.43723007762395316]\n' 41 | p20 42 | aS'[0.3670678199582293,0.4538166621307404]\n' 43 | p21 44 | aS'[0.4683442834239726,0.46935327564470875]\n' 45 | p22 46 | aS'[0.3792720264380919,0.4865851505504329]\n' 47 | p23 48 | aS'[0.4422400354896148,0.4997609627797584]\n' 49 | p24 50 | aS'[0.402841278490105,0.4422716453059943]\n' 51 | p25 52 | aS'[0.33991635303647266,0.3846485118677389]\n' 53 | p26 54 | aS'[0.3002327532591484,0.28246148137528543]\n' 55 | p27 56 | aS'[0.40795847869020724,0.5158006847441055]\n' 57 | p28 58 | aS'[0.3442035866971446,0.37702967933145115]\n' 59 | p29 60 | aS'[0.443302583541451,0.39468078108317334]\n' 61 | p30 62 | aS'[0.3494125114141685,0.42228795887687287]\n' 63 | p31 64 | aS'[0.45096441288677025,0.4507310163453036]\n' 65 | p32 66 | aS'[0.36672672655502314,0.419205238170643]\n' 67 | p33 68 | aS'[0.44359479981313427,0.4752109232533184]\n' 69 | p34 70 | aS'[0.346764358097133,0.3473433357770626]\n' 71 | p35 72 | aS'[0.4498699171574943,0.42961284854013954]\n' 73 | p36 74 | aS'[0.4076367975453149,0.42612650736683627]\n' 75 | p37 76 | aS'[0.5488839021261305,0.5366885139294741]\n' 77 | p38 78 | aS'[0.6005827283738152,0.36905588720588034]\n' 79 | p39 80 | aS'[0.485103195843673,0.5549997373456531]\n' 81 | p40 82 | aS'[0.4582555829771863,0.558838761495814]\n' 83 | p41 84 | aS'[0.3269413684946418,0.41612032010300276]\n' 85 | p42 86 | aS'[0.5317895681361569,0.4824814982719943]\n' 87 | p43 88 | aS'[0.4312191289252166,0.5140767567994174]\n' 89 | p44 90 | aS'[0.244325199188498,0.3254980169512978]\n' 91 | p45 92 | aS'[0.4681655961059662,0.40701632049027997]\n' 93 | p46 94 | aS'[0.3283091531914124,0.4038256658832407]\n' 95 | p47 96 | aS'[0.3650141664511938,0.421054728881073]\n' 97 | p48 98 | aS'[0.36870352150674834,0.41736324133496583]\n' 99 | p49 100 | aS'[0.34105192765204123,0.33208227973737053]\n' 101 | p50 102 | aS'[0.3761068460180856,0.36630939240492577]\n' 103 | p51 104 | aS'[0.38339439000247305,0.4123156950731033]\n' 105 | p52 106 | aS'[0.4091577407988104,0.3631037573416922]\n' 107 | p53 108 | aS'[0.3529286030995124,0.3111051872113303]\n' 109 | p54 110 | aS'[0.4102054351775448,0.4202929464255502]\n' 111 | p55 112 | aS'[0.45514236420450416,0.4330759947063852]\n' 113 | p56 114 | aS'[0.3973922920044381,0.5101095046178685]\n' 115 | p57 116 | aS'[0.5369415842921603,0.31325431354025873]\n' 117 | p58 118 | aS'[0.4392971026921626,0.5950434167878244]\n' 119 | p59 120 | aS'[0.4091573465152572,0.47946534324470597]\n' 121 | p60 122 | aS'[0.48077455499464494,0.5263354866403448]\n' 123 | p61 124 | aS'[0.4087728090388378,0.3079616109689061]\n' 125 | p62 126 | aS'[0.4673418791374464,0.4044011296402257]\n' 127 | p63 128 | aS'[0.5139549700876269,0.4356205578844814]\n' 129 | p64 130 | aS'[0.35059449751259025,0.4209042100967433]\n' 131 | p65 132 | aS'[0.4247714897837373,0.45683758099272426]\n' 133 | p66 134 | aS'[0.41447733115672836,0.4814147944782491]\n' 135 | p67 136 | aS'[0.3441522287361213,0.38146168521290763]\n' 137 | p68 138 | aS'[0.40084454527077085,0.3319452252060329]\n' 139 | p69 140 | aS'[0.5195557593958647,0.37263389666663976]\n' 141 | p70 142 | aS'[0.5960843287146291,0.38496905985389246]\n' 143 | p71 144 | aS'[0.2135384797523179,0.4144205334778315]\n' 145 | p72 146 | aS'[0.4152405196232622,0.48058087909263386]\n' 147 | p73 148 | aS'[0.4701201527233848,0.4098055059979603]\n' 149 | p74 150 | aS'[0.4509974420530754,0.40528279029702735]\n' 151 | p75 152 | aS'[0.44153071013335604,0.38220569955867745]\n' 153 | p76 154 | aS'[0.23937963380323454,0.2665490091944437]\n' 155 | p77 156 | aS'[0.41378928448886604,0.48405809898164265]\n' 157 | p78 158 | aS'[0.46359166020488246,0.3865183495987672]\n' 159 | p79 160 | aS'[0.3564940012152653,0.3975745617324748]\n' 161 | p80 162 | aS'[0.37409014904654353,0.3619342823932305]\n' 163 | p81 164 | aS'[0.5243333674023786,0.4866684144949391]\n' 165 | p82 166 | aS'[0.3414447979267046,0.34380953622502824]\n' 167 | p83 168 | aS'[0.30365238308676706,0.4542695619298679]\n' 169 | p84 170 | aS'[0.3876457608854537,0.6643484712589162]\n' 171 | p85 172 | aS'[0.3628443464833737,0.4571076366912752]\n' 173 | p86 174 | aS'[0.2801994806118452,0.36718297081005397]\n' 175 | p87 176 | aS'[0.3835641843130992,0.34962262245238834]\n' 177 | p88 178 | aS'[0.37861334827728893,0.3672272638600124]\n' 179 | p89 180 | aS'[0.49539383987285013,0.4420050101747468]\n' 181 | p90 182 | aS'[0.3775708602567989,0.4552990361458802]\n' 183 | p91 184 | aS'[0.2870468688324508,0.32322675003573]\n' 185 | p92 186 | aS'[0.2895917624664987,0.46855190313033873]\n' 187 | p93 188 | aS'[0.44041216660131316,0.46657007853273824]\n' 189 | p94 190 | aS'[0.3668421518014431,0.4678627906836551]\n' 191 | p95 192 | aS'[0.5159373904819006,0.5606430931355088]\n' 193 | p96 194 | aS'[0.39423957919589553,0.393267371773001]\n' 195 | p97 196 | aS'[0.6211913114561021,0.31558666185389117]\n' 197 | p98 198 | aS'[0.3448002264596875,0.38010779120787347]\n' 199 | p99 200 | aS'[0.3509245158964882,0.4041543524924413]\n' 201 | p100 202 | a. -------------------------------------------------------------------------------- /DPMM_SMC/result_1particles_5delta_2path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_1particles_5delta_2path.png -------------------------------------------------------------------------------- /DPMM_SMC/result_50particles_3delta_1path.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_50particles_3delta_1path.pdf -------------------------------------------------------------------------------- /DPMM_SMC/result_50particles_3delta_1path.pkl: -------------------------------------------------------------------------------- 1 | (lp0 2 | S'[0.49545094190041117,0.5562444532601536]\n' 3 | p1 4 | aS'[0.3999850699543991,0.3621059624166896]\n' 5 | p2 6 | aS'[0.5573902557111101,0.39731430761811704]\n' 7 | p3 8 | aS'[0.5068420996545837,0.5311467982353553]\n' 9 | p4 10 | aS'[0.3622272350836108,0.4198847966658681]\n' 11 | p5 12 | aS'[0.4403492309473993,0.3578799478327709]\n' 13 | p6 14 | aS'[0.6952785452831527,0.40236020184167903]\n' 15 | p7 16 | aS'[0.4253031257411687,0.5575782835169395]\n' 17 | p8 18 | aS'[0.3136620109148087,0.46366915904182926]\n' 19 | p9 20 | aS'[0.4860213813617175,0.5151796861909984]\n' 21 | p10 22 | aS'[0.4546165273608252,0.4411097646310893]\n' 23 | p11 24 | aS'[0.4303719073014967,0.45414886311270947]\n' 25 | p12 26 | aS'[0.284793862972716,0.3552124094248099]\n' 27 | p13 28 | aS'[0.47173139381445106,0.39342160827387274]\n' 29 | p14 30 | aS'[0.41521893342024735,0.520563833788808]\n' 31 | p15 32 | aS'[0.3610194596583743,0.37364918580790046]\n' 33 | p16 34 | aS'[0.331342145982653,0.3443540855533331]\n' 35 | p17 36 | aS'[0.4199397041799408,0.30616096546191424]\n' 37 | p18 38 | aS'[0.4180195453334848,0.3973026763851588]\n' 39 | p19 40 | aS'[0.5187564754267819,0.588738524143161]\n' 41 | p20 42 | aS'[0.5294902035468884,0.5696553295436885]\n' 43 | p21 44 | aS'[0.35508001810387335,0.48353254794049333]\n' 45 | p22 46 | aS'[0.3523202652296186,0.6605838676918891]\n' 47 | p23 48 | aS'[0.3870550528137084,0.3539862723999068]\n' 49 | p24 50 | aS'[0.39773582362784826,0.4397000947316326]\n' 51 | p25 52 | aS'[0.4610491434657828,0.46454499794529897]\n' 53 | p26 54 | aS'[0.32614217463894296,0.41707909641134727]\n' 55 | p27 56 | aS'[0.455999501604124,0.5283846571785197]\n' 57 | p28 58 | aS'[0.44929126424169713,0.5448001000685813]\n' 59 | p29 60 | aS'[0.2921131530756392,0.3095005040892124]\n' 61 | p30 62 | aS'[0.5562350754465001,0.5099467388271466]\n' 63 | p31 64 | aS'[0.3401007683961768,0.3351743759270599]\n' 65 | p32 66 | aS'[0.3800644809475061,0.43251558283095065]\n' 67 | p33 68 | aS'[0.4107549210275361,0.45617265119473355]\n' 69 | p34 70 | aS'[0.30401550794457266,0.323213354568632]\n' 71 | p35 72 | aS'[0.39286976087632525,0.5757152402356799]\n' 73 | p36 74 | aS'[0.3402831926450782,0.28657698064441667]\n' 75 | p37 76 | aS'[0.3463169525355522,0.2959368766761413]\n' 77 | p38 78 | aS'[0.4301755675278426,0.37801016471089083]\n' 79 | p39 80 | aS'[0.3113835612203515,0.4885913716749271]\n' 81 | p40 82 | aS'[0.4985985560659634,0.4886240600251336]\n' 83 | p41 84 | aS'[0.447449309261313,0.5074311396288205]\n' 85 | p42 86 | aS'[0.49732767994858273,0.450040705187422]\n' 87 | p43 88 | aS'[0.5313176817118598,0.5594264113439933]\n' 89 | p44 90 | aS'[0.3070035749444207,0.36061099919475165]\n' 91 | p45 92 | aS'[0.38540847935641936,0.3914399743107479]\n' 93 | p46 94 | aS'[0.4804572179626709,0.794897104519654]\n' 95 | p47 96 | aS'[0.4858132254435004,0.4391381928844897]\n' 97 | p48 98 | aS'[0.4586458583503547,0.47714441248351064]\n' 99 | p49 100 | aS'[0.42413394593680387,0.47775157729982715]\n' 101 | p50 102 | aS'[0.4519731006598093,0.338235671930459]\n' 103 | p51 104 | aS'[0.661577465402098,0.36181518832230375]\n' 105 | p52 106 | aS'[0.4198273027284422,0.4374037230093973]\n' 107 | p53 108 | aS'[0.7338053992516815,0.45205874316427247]\n' 109 | p54 110 | aS'[0.3384717274262096,0.392513014846452]\n' 111 | p55 112 | aS'[0.41208001045888326,0.3615582970739501]\n' 113 | p56 114 | aS'[0.5248595342227935,0.4849794604519679]\n' 115 | p57 116 | aS'[0.4694698195759323,0.5999077759052468]\n' 117 | p58 118 | aS'[0.25505234497833307,0.3328640804635861]\n' 119 | p59 120 | aS'[0.3785869645720881,0.3892320101313899]\n' 121 | p60 122 | aS'[0.35510109226121284,0.41000850580748055]\n' 123 | p61 124 | aS'[0.3112754762131365,0.4160762307059442]\n' 125 | p62 126 | aS'[0.4814413002456255,0.40367937493487244]\n' 127 | p63 128 | aS'[0.5413995087301152,0.5868867591097827]\n' 129 | p64 130 | aS'[0.548623364956905,0.5859566922088273]\n' 131 | p65 132 | aS'[0.3802406396017818,0.3857357938787377]\n' 133 | p66 134 | aS'[0.27507046324112316,0.3596223752925506]\n' 135 | p67 136 | aS'[0.38772940064958494,0.3728347323482443]\n' 137 | p68 138 | aS'[0.4122067053295235,0.3975274658717145]\n' 139 | p69 140 | aS'[0.3888297300094444,0.5230498376039094]\n' 141 | p70 142 | aS'[0.4113289000392047,0.37275250038825475]\n' 143 | p71 144 | aS'[0.3518784354958614,0.4463832197474921]\n' 145 | p72 146 | aS'[0.4130057320225849,0.476655512934785]\n' 147 | p73 148 | aS'[0.34867300710731414,0.3447858978150089]\n' 149 | p74 150 | aS'[0.5303861396919367,0.5096553697998213]\n' 151 | p75 152 | aS'[0.34307518862197534,0.3732369808198397]\n' 153 | p76 154 | aS'[0.3338859421792676,0.3639359627507594]\n' 155 | p77 156 | aS'[0.38407427030015623,0.39836662197224276]\n' 157 | p78 158 | aS'[0.4701662472867572,0.4396941384369537]\n' 159 | p79 160 | aS'[0.5157796728898388,0.45958683033398984]\n' 161 | p80 162 | aS'[0.3989355028091756,0.29110047683633145]\n' 163 | p81 164 | aS'[0.41289864224550904,0.4071330124980519]\n' 165 | p82 166 | aS'[0.3101135671706149,0.3161008973221611]\n' 167 | p83 168 | aS'[0.3191565539989789,0.3276370333737268]\n' 169 | p84 170 | aS'[0.4750936243099576,0.5022424889913195]\n' 171 | p85 172 | aS'[0.4262373794498311,0.4299062934322347]\n' 173 | p86 174 | aS'[0.4767894418021077,0.484593780273836]\n' 175 | p87 176 | aS'[0.5927272727272724,0.586994158859602]\n' 177 | p88 178 | aS'[0.4680402208104023,0.4521408371845924]\n' 179 | p89 180 | aS'[0.39310318919228104,0.3447525641100711]\n' 181 | p90 182 | aS'[0.7196463568114507,0.42487055370966903]\n' 183 | p91 184 | aS'[0.43929338594724465,0.49289834698725066]\n' 185 | p92 186 | aS'[0.3602824259862192,0.3088955840672854]\n' 187 | p93 188 | aS'[0.4868373829874865,0.4952346882106956]\n' 189 | p94 190 | aS'[0.4028655710071636,0.39676607079682824]\n' 191 | p95 192 | aS'[0.3824617405022584,0.41110318179050515]\n' 193 | p96 194 | aS'[0.4149257276722267,0.41746807085939575]\n' 195 | p97 196 | aS'[0.4256501960570185,0.5666641266709438]\n' 197 | p98 198 | aS'[0.49358469605613825,0.47033987791629833]\n' 199 | p99 200 | aS'[0.46599336316603845,0.4963092445529988]\n' 201 | p100 202 | a. -------------------------------------------------------------------------------- /DPMM_SMC/result_50particles_3delta_1path.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_SMC/result_50particles_3delta_1path.png -------------------------------------------------------------------------------- /DPMM_Variational/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Variational/.DS_Store -------------------------------------------------------------------------------- /DPMM_Variational/backup/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Variational/backup/.DS_Store -------------------------------------------------------------------------------- /DPMM_Variational/backup/variational_no_lookahead/dataset.jl: -------------------------------------------------------------------------------- 1 | 2 | function dataset1() 3 | """theta = { 4 | [1,0,0], 5 | [0,1,0], 6 | [0,0,1]}""" 7 | theta={ 8 | [1/3,2/3,0], 9 | [4/5,1/10,1/10], 10 | [1/8,1/8,6/8]} #3 states""" 11 | 12 | 13 | pi = [1/3,1/3,1/3] 14 | 15 | V = 3 16 | NUM_TOPICS = 3 17 | return theta, pi, NUM_TOPICS, V 18 | end 19 | 20 | -------------------------------------------------------------------------------- /DPMM_Variational/backup/variational_no_lookahead/gradient.jl: -------------------------------------------------------------------------------- 1 | 2 | using Debug 3 | 4 | @debug begin 5 | 6 | function gradient_v(prev_soft_v, cid, z_posterior_array_probability, z_posterior_array_cid, LRATE, alpha, NUM_DOCS, time, N, is_new_cid) 7 | 8 | normalizing_constant = logsumexp(z_posterior_array_probability) 9 | 10 | EXP_z_posterior_array_probability = deepcopy(z_posterior_array_probability) 11 | EXP_z_posterior_array_probability -= normalizing_constant 12 | EXP_z_posterior_array_probability = exp(EXP_z_posterior_array_probability) 13 | 14 | sufficient_stats = 0 15 | for i=1:length(z_posterior_array_cid) 16 | if z_posterior_array_cid[i] > cid 17 | sufficient_stats += EXP_z_posterior_array_probability[i] 18 | end 19 | end 20 | 21 | if is_new_cid == false 22 | particles[time][N]["hidden_state"]["cache"]["soft_v"][cid] = prev_soft_v[cid] + LRATE*(-prev_soft_v[cid] + alpha + NUM_DOCS*sufficient_stats) 23 | else 24 | particles[time][N]["hidden_state"]["cache"]["soft_v"][cid] = LRATE*(alpha + NUM_DOCS*sufficient_stats) 25 | end 26 | end 27 | 28 | 29 | function gradient_lambda_u(cid, document, wordArr, posterior, time, N, eta, is_new_cid) 30 | if is_new_cid == false #existing cluster 31 | prev_soft_lambda_kw = particles[time-1][N]["hidden_state"]["soft_lambda"] 32 | prev_soft_u = particles[time-1][N]["hidden_state"]["soft_u"] 33 | end 34 | 35 | particles[time][N]["hidden_state"]["cache"]["soft_lambda"][cid] = Dict() 36 | particles[time][N]["hidden_state"]["cache"]["soft_u"][cid] = Dict() 37 | for word = 1:V 38 | if is_new_cid == false #existing cluster 39 | particles[time][N]["hidden_state"]["cache"]["soft_lambda"][cid][word] = prev_soft_lambda_kw[cid][word] + LRATE*(-prev_soft_lambda_kw[cid][word] + eta + NUM_DOCS*(posterior*wordArr[word])) 40 | else 41 | particles[time][N]["hidden_state"]["cache"]["soft_lambda"][cid][word] = LRATE*(eta + NUM_DOCS*(posterior*wordArr[word])) 42 | end 43 | end 44 | 45 | if is_new_cid == false 46 | particles[time][N]["hidden_state"]["cache"]["soft_u"][cid] = prev_soft_u[cid] + LRATE*(-prev_soft_u[cid] + 1 + NUM_DOCS*posterior) 47 | else 48 | particles[time][N]["hidden_state"]["cache"]["soft_u"][cid] = LRATE*(1 + NUM_DOCS*posterior) 49 | end 50 | end 51 | 52 | 53 | end 54 | 55 | 56 | -------------------------------------------------------------------------------- /DPMM_Variational/backup/variational_no_lookahead/variational_lookahead.jl: -------------------------------------------------------------------------------- 1 | #Variational Lookahead 2 | #Ardavan Saeedi & Tejas Kulkarni 3 | 4 | 5 | function get_weight_lookahead(prev_weight, prev_support, prev_c_aggregate, time, prev_cid, N, prev_lambda_kw) 6 | VARIATIONAL_ITERATIONS 7 | for iter=1:VARIATIONAL_ITERATIONS 8 | for t=time:time+LOOKAHEAD_DELTA 9 | 10 | end 11 | end 12 | end 13 | """ 14 | if LOOKAHEAD_DELTA == 0 || LOOKAHEAD_DELTA == 1 15 | return prev_weight 16 | end 17 | 18 | PATH_QUEUE = PriorityQueue() 19 | PCNT = 1 20 | 21 | #time is already t+1 22 | if prev_cid == max(prev_support) 23 | t_1_support = unique(myappend(prev_support, prev_cid + 1)) 24 | else 25 | t_1_support = deepcopy(prev_support) 26 | end 27 | 28 | println("====================[LAMBDA FROM TOP LEVEL]====================") 29 | println(prev_lambda_kw) 30 | println("====================[[get_weight_lookahead time:", time ," prev_cid: ", prev_cid ,"]]====================") 31 | 32 | z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr = generateCandidateChildren(t_1_support, time, prev_c_aggregate, N, prev_lambda_kw) 33 | current = node(t_1_support, prev_weight, 1, time, prev_c_aggregate, prev_lambda_kw) 34 | 35 | PATH_QUEUE, PCNT = pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 36 | 37 | 38 | #Now we propagate t+2 onwards ... 39 | while true 40 | current = dequeue!(PATH_QUEUE) 41 | if current.depth == LOOKAHEAD_DELTA 42 | wARR = [] 43 | #terminate and return with weight 44 | #weight = exp(current.weight) 45 | wARR = myappend(wARR, current.weight) 46 | while length(PATH_QUEUE) > 0 47 | elm = dequeue!(PATH_QUEUE) 48 | if elm.depth != LOOKAHEAD_DELTA 49 | #return log(weight) 50 | break 51 | end 52 | #weight += exp(elm.weight) 53 | wARR = myappend(wARR, elm.weight) 54 | end 55 | #return log(weight) 56 | #println("ESCAPE: ", wARR) 57 | return logsumexp(wARR) 58 | end 59 | z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr = generateCandidateChildren(current.support, current.time, current.prev_c_aggregate, N, current.prev_lambda_kw) 60 | PATH_QUEUE, PCNT = pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 61 | #println("*********") 62 | end 63 | end 64 | """ 65 | -------------------------------------------------------------------------------- /DPMM_Variational/branching_lookahead.jl: -------------------------------------------------------------------------------- 1 | #tejasdkulkarni@gmail.com | tejask@mit.edu 2 | 3 | type node 4 | support 5 | weight 6 | depth 7 | time 8 | prev_c_aggregate 9 | prev_lambda_kw 10 | end 11 | 12 | using Debug 13 | using NumericExtensions 14 | @debug begin 15 | 16 | function pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 17 | # Now choose 'p' children and add to queue 18 | time = current.time 19 | DEPTH = current.depth 20 | 21 | if ENUMERATION == 1 22 | normalizing_constant = sum(z_posterior_array_probability) 23 | z_posterior_array_probability /= normalizing_constant 24 | 25 | for ind=1:length(z_posterior_array_cid) 26 | child_support = unique(myappend(current.support, z_posterior_array_cid[ind])) 27 | child_c_aggregate = myappend(current.prev_c_aggregate, z_posterior_array_cid[ind]) 28 | weight = z_posterior_array_probability[ind] 29 | child = node(unique(child_support), current.weight+weight, DEPTH+1, time+1, child_c_aggregate) 30 | enqueue!(PATH_QUEUE, child, PCNT) 31 | PCNT+=1 32 | end 33 | else 34 | #print("ADDING: ") 35 | for p=1:INTEGRAL_PATHS 36 | #println(z_posterior_array_probability, " ", PCNT) 37 | weight, sampled_cid = sample_cid(z_posterior_array_probability, z_posterior_array_cid) 38 | if sampled_cid == max(current.support) 39 | child_support = myappend(current.support, sampled_cid+1) 40 | else 41 | #indx=findin(current.support, max(current.support))[1] 42 | #delete!(current.support, indx) 43 | child_support = deepcopy(current.support) 44 | end 45 | # Adding cluster for each data point until current time for child 46 | child_c_aggregate = myappend(current.prev_c_aggregate, sampled_cid) 47 | #print(" (curr:",current.weight, " weight:",weight,") ") 48 | child = node(unique(child_support), current.weight+weight, DEPTH+1, time+1, child_c_aggregate, deepcopy(lambda_kw_arr[sampled_cid])) 49 | enqueue!(PATH_QUEUE, child, PCNT) 50 | PCNT+=1 51 | end 52 | #println() 53 | end 54 | return PATH_QUEUE, PCNT 55 | end 56 | 57 | 58 | function generateCandidateChildren(current_support, time, prev_c_aggregate, N, prev_lambda_kw) 59 | z_posterior_array_probability = [] 60 | z_posterior_array_cid = [] 61 | lambda_kw_arr = [] 62 | for j in current_support 63 | 64 | current_c_aggregate = myappend(prev_c_aggregate, j) 65 | zj_probability, lambda_kw = get_posterior_zj(j, current_c_aggregate, time, N, current_support, 1,prev_lambda_kw) 66 | lambda_kw_arr = myappend(lambda_kw_arr, lambda_kw) 67 | 68 | """println("-=-=-[ ", j ," ]=-=-=-") 69 | println(prev_lambda_kw) 70 | println(lambda_kw) 71 | println("-=-=-=-=-=-")""" 72 | 73 | z_posterior_array_probability = myappend(z_posterior_array_probability, zj_probability) 74 | z_posterior_array_cid = myappend(z_posterior_array_cid, j) 75 | end 76 | #println("L [time:",time,"]", " ", z_posterior_array_probability) 77 | return z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr 78 | end 79 | 80 | 81 | 82 | function get_weight_lookahead(prev_weight, prev_support, prev_c_aggregate, time, prev_cid, N, prev_lambda_kw) 83 | 84 | if LOOKAHEAD_DELTA == 0 || LOOKAHEAD_DELTA == 1 85 | return prev_weight 86 | end 87 | 88 | PATH_QUEUE = PriorityQueue() 89 | PCNT = 1 90 | 91 | #time is already t+1 92 | if prev_cid == max(prev_support) 93 | t_1_support = unique(myappend(prev_support, prev_cid + 1)) 94 | else 95 | t_1_support = deepcopy(prev_support) 96 | end 97 | 98 | """println("====================[LAMBDA FROM TOP LEVEL]====================") 99 | println(prev_lambda_kw) 100 | println("====================[[get_weight_lookahead time:", time ," prev_cid: ", prev_cid ,"]]====================")""" 101 | 102 | z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr = generateCandidateChildren(t_1_support, time, prev_c_aggregate, N, prev_lambda_kw) 103 | current = node(t_1_support, prev_weight, 1, time, prev_c_aggregate, prev_lambda_kw) 104 | 105 | PATH_QUEUE, PCNT = pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 106 | 107 | 108 | #Now we propagate t+2 onwards ... 109 | while true 110 | current = dequeue!(PATH_QUEUE) 111 | if current.depth == LOOKAHEAD_DELTA 112 | wARR = [] 113 | #terminate and return with weight 114 | #weight = exp(current.weight) 115 | wARR = myappend(wARR, current.weight) 116 | while length(PATH_QUEUE) > 0 117 | elm = dequeue!(PATH_QUEUE) 118 | if elm.depth != LOOKAHEAD_DELTA 119 | #return log(weight) 120 | break 121 | end 122 | #weight += exp(elm.weight) 123 | wARR = myappend(wARR, elm.weight) 124 | end 125 | #return log(weight) 126 | #println("ESCAPE: ", wARR) 127 | return logsumexp(wARR) 128 | end 129 | z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr = generateCandidateChildren(current.support, current.time, current.prev_c_aggregate, N, current.prev_lambda_kw) 130 | PATH_QUEUE, PCNT = pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 131 | #println("*********") 132 | end 133 | end 134 | 135 | 136 | end 137 | -------------------------------------------------------------------------------- /DPMM_Variational/dataset.jl: -------------------------------------------------------------------------------- 1 | 2 | function dataset1() 3 | """theta = { 4 | [1,0,0], 5 | [0,1,0], 6 | [0,0,1]} 7 | """ 8 | theta={ 9 | [1/4,2/4,1/4], 10 | [1/3,1/3,1/3], 11 | [1/8,1/8,6/8]} #3 states """ 12 | 13 | pi = [1/2,1/2,0] 14 | V = 3 15 | NUM_TOPICS = 3 16 | return theta, pi, NUM_TOPICS, V 17 | end 18 | 19 | -------------------------------------------------------------------------------- /DPMM_Variational/gradient.jl: -------------------------------------------------------------------------------- 1 | 2 | using Debug 3 | 4 | @debug begin 5 | 6 | function gradient_v(sampled_cid, time, N, is_new_cid) 7 | alpha = hyperparameters["a"] 8 | v_sufficient_stats = 0 9 | 10 | if is_new_cid == true 11 | #creation of new cluster 12 | particles[time][N]["hidden_state"]["soft_v"][sampled_cid] = LRATE*(alpha) 13 | else 14 | prev_soft_v = particles[time-1][N]["hidden_state"]["soft_v"] 15 | particles[time][N]["hidden_state"]["soft_v"][sampled_cid] = prev_soft_v[sampled_cid] + LRATE*(-prev_soft_v[sampled_cid] + alpha + NUM_DOCS*v_sufficient_stats) 16 | end 17 | end 18 | 19 | 20 | function gradient_soft_lambda_u(sampled_cid, wordArr, posterior, time, N, is_new_cid) 21 | eta = hyperparameters["eta"] 22 | posterior = 1 23 | 24 | if is_new_cid == true 25 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid] = Dict() 26 | for word=1:V 27 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid][word] = LRATE*(eta + NUM_DOCS*wordArr[word]) 28 | end 29 | particles[time][N]["hidden_state"]["soft_u"][sampled_cid] = LRATE*(1 + NUM_DOCS) 30 | else 31 | prev_soft_lambda_kw = particles[time-1][N]["hidden_state"]["soft_lambda"] 32 | prev_soft_u = particles[time-1][N]["hidden_state"]["soft_u"] 33 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid] = Dict() 34 | for word=1:V 35 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid][word] = prev_soft_lambda_kw[sampled_cid][word] + LRATE*(-prev_soft_lambda_kw[sampled_cid][word] + eta + NUM_DOCS*(posterior*wordArr[word])) 36 | end 37 | particles[time][N]["hidden_state"]["soft_u"][sampled_cid] = prev_soft_u[sampled_cid] + LRATE*(-prev_soft_u[sampled_cid] + 1 + NUM_DOCS*posterior) 38 | end 39 | end 40 | 41 | 42 | 43 | function update_newcluster_statistics(sampled_cid, data, time, wordArr, posterior, N) 44 | ## create new lambda ## 45 | particles[time][N]["hidden_state"]["lambda"][sampled_cid] = Dict() 46 | for word = 1:V 47 | particles[time][N]["hidden_state"]["lambda"][sampled_cid][word] = hyperparameters["eta"] + wordArr[word] 48 | end 49 | #gradient_soft_lambda_u( sampled_cid, wordArr, posterior, time, N, true) 50 | #gradient_v(sampled_cid, time, N, true) 51 | end 52 | 53 | 54 | 55 | function update_existingcluster_statistics(sampled_cid, data, time, wordArr, posterior, N, lambda_statistics) 56 | for word=1:V 57 | particles[time][N]["hidden_state"]["lambda"][sampled_cid][word] = lambda_statistics[word] 58 | end 59 | #gradient_soft_lambda_u(sampled_cid, wordArr, posterior, time, N, false) 60 | #gradient_v(sampled_cid, time, N, false) 61 | end 62 | 63 | 64 | function update_all_not_chosen_ks(sampled_cid, support, time, N, max_root_support) 65 | eta = hyperparameters["eta"]; a = hyperparameters["a"] 66 | prev_soft_lambda = particles[time-1][N]["hidden_state"]["soft_lambda"] 67 | prev_soft_u = particles[time-1][N]["hidden_state"]["soft_u"] 68 | prev_soft_v = particles[time-1][N]["hidden_state"]["soft_v"] 69 | 70 | for cid in support 71 | if cid != sampled_cid && cid < max_root_support 72 | 73 | is_new_cid = (has(particles[time][N]["hidden_state"]["soft_lambda"], cid) == false) 74 | 75 | if is_new_cid == true 76 | particles[time][N]["hidden_state"]["soft_lambda"][cid] = Dict() 77 | prev_soft_u[cid] = 0 78 | prev_soft_v[cid] = 0 79 | end 80 | 81 | for word=1:V 82 | if is_new_cid == true 83 | particles[time][N]["hidden_state"]["soft_lambda"][cid][word] = LRATE*(eta) 84 | else 85 | particles[time][N]["hidden_state"]["soft_lambda"][cid][word] = prev_soft_lambda[cid][word] + LRATE*(-prev_soft_lambda[cid][word] + eta) 86 | end 87 | end 88 | 89 | particles[time][N]["hidden_state"]["soft_u"][cid]= prev_soft_u[cid] + LRATE*(-prev_soft_u[cid] + 1) 90 | particles[time][N]["hidden_state"]["soft_v"][cid]= prev_soft_v[cid] + LRATE*(-prev_soft_v[cid] + a) 91 | 92 | end 93 | end 94 | end 95 | 96 | 97 | 98 | end 99 | 100 | 101 | -------------------------------------------------------------------------------- /DPMM_Variational/picloud_runner.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import pickle 4 | 5 | import cloud 6 | cloud.setkey(7513, api_secretkey='ca43a3535fa17e28b687f0f1691c67db261392ae') 7 | cloud_environment = 'Julia' 8 | 9 | """ 10 | number_of_clusters = int(sys.argv[1]) 11 | if_zero_shortlearning = sys.argv[2] # Should be "yes" or "no" 12 | experiment_name = sys.argv[3]""" 13 | 14 | # Usage: python picloud_runner.py 150 50 50 15 | 16 | 17 | TRIALS = int(sys.argv[1]) 18 | NUM_PARTICLES = int(sys.argv[2]) 19 | DELTA = int(sys.argv[3]) 20 | 21 | def run_on_instance(trial_id): 22 | global number_of_clusters 23 | global if_zero_shortlearning 24 | global experiment_name 25 | import subprocess 26 | import os 27 | os.environ['DISPLAY'] = ":1" 28 | print "Starting" 29 | ls_output = subprocess.Popen(["/home/picloud/julia/julia", "variational_runner.jl", str(NUM_PARTICLES), str(DELTA), str(trial_id)], \ 30 | cwd = "/home/picloud/DPMixtureModel/DPMM_Variational/", \ 31 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) 32 | 33 | out, err = ls_output.communicate() 34 | return out 35 | 36 | #result = run_on_instance([1]) 37 | 38 | jids = cloud.map(run_on_instance, range(TRIALS), _env=cloud_environment, _type='c2', _cores=1) 39 | print jids 40 | result = cloud.result(jids) 41 | pickle.dump(result, open("150_20_50_result_variational_july_20_2013_particles_delta_path.pkl","wb")) 42 | print "RESULT:", result 43 | -------------------------------------------------------------------------------- /DPMM_Variational/plotter.py: -------------------------------------------------------------------------------- 1 | #tejask@mit.edu 2 | 3 | from pylab import imread,imshow,figure,show,subplot 4 | from numpy import reshape,uint8,flipud 5 | from scipy.cluster.vq import kmeans,vq 6 | import numpy 7 | import scipy.misc 8 | import Image 9 | import copy 10 | import glob 11 | import pdb 12 | import pylab 13 | import pickle 14 | from matplotlib import pyplot 15 | from matplotlib.backends.backend_pdf import PdfPages 16 | import math 17 | 18 | 19 | def saveAsPDF(fname,plot): 20 | pp = PdfPages(fname) 21 | pp.savefig(plot) 22 | pp.close() 23 | 24 | 25 | def genericPlot(X,Y,xlab,ylab,fname): 26 | f = pylab.figure() 27 | ax=f.add_subplot(111,title='') 28 | pyplot.plot( X,Y,'-',color='blue', linewidth=2) 29 | pyplot.xlabel(xlab,fontsize=30) 30 | pyplot.ylabel(ylab,fontsize=30) 31 | pylab.savefig(fname+'.png') 32 | #ax.grid(True) 33 | saveAsPDF(fname+'.pdf',f) 34 | 35 | 36 | 37 | 38 | fname = '150_50_50_result_variational_july_20_2013_particles_delta_path' 39 | data = pickle.load(open(fname+".pkl","rb")) 40 | 41 | f = pylab.figure() 42 | ax=f.add_subplot(111,title='') 43 | X=[] 44 | Y=[] 45 | CNT=0 46 | nolookArr =[] 47 | lookArr = [] 48 | for i in range(len(data)): 49 | if len(data[i]) > 0 and i != 47: 50 | nolook = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[0]) 51 | look = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[1]) 52 | nolookArr.append(nolook) 53 | lookArr.append(look) 54 | X.append(i) 55 | Y.append(look-nolook) 56 | if look > nolook: 57 | CNT+=1 58 | 59 | print len(data), CNT 60 | print 'Average (Look):', sum(lookArr)/len(data) 61 | print 'Average (Nolook):', sum(nolookArr)/len(data) 62 | 63 | ax.bar(X,Y,0.4,color='black') 64 | 65 | pylab.xlabel('Run Number',fontsize=30) 66 | pylab.ylabel('VSCORE Difference',fontsize=30) 67 | pylab.savefig(fname+'.png') 68 | #pylab.ylim([-0.35, 0.35]) 69 | #ax.grid(True) 70 | saveAsPDF(fname+'.pdf',f) 71 | 72 | 73 | -------------------------------------------------------------------------------- /DPMM_Variational_full_support/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Variational_full_support/.DS_Store -------------------------------------------------------------------------------- /DPMM_Variational_full_support/RESULTS/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Variational_full_support/RESULTS/.DS_Store -------------------------------------------------------------------------------- /DPMM_Variational_full_support/RESULTS/aug12_2013_test1/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Variational_full_support/RESULTS/aug12_2013_test1/.DS_Store -------------------------------------------------------------------------------- /DPMM_Variational_full_support/branching_lookahead.jl: -------------------------------------------------------------------------------- 1 | #tejasdkulkarni@gmail.com | tejask@mit.edu 2 | 3 | type node 4 | support 5 | weight 6 | depth 7 | time 8 | prev_c_aggregate 9 | prev_lambda_kw 10 | end 11 | 12 | using Debug 13 | using NumericExtensions 14 | @debug begin 15 | 16 | function pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 17 | # Now choose 'p' children and add to queue 18 | time = current.time 19 | DEPTH = current.depth 20 | 21 | if ENUMERATION == 1 22 | normalizing_constant = sum(z_posterior_array_probability) 23 | z_posterior_array_probability /= normalizing_constant 24 | 25 | for ind=1:length(z_posterior_array_cid) 26 | child_support = unique(myappend(current.support, z_posterior_array_cid[ind])) 27 | child_c_aggregate = myappend(current.prev_c_aggregate, z_posterior_array_cid[ind]) 28 | weight = z_posterior_array_probability[ind] 29 | child = node(unique(child_support), current.weight+weight, DEPTH+1, time+1, child_c_aggregate) 30 | enqueue!(PATH_QUEUE, child, PCNT) 31 | PCNT+=1 32 | end 33 | else 34 | #print("ADDING: ") 35 | for p=1:INTEGRAL_PATHS 36 | #println(z_posterior_array_probability, " ", PCNT) 37 | weight, sampled_cid = sample_cid(z_posterior_array_probability, z_posterior_array_cid) 38 | if sampled_cid == max(current.support) 39 | child_support = myappend(current.support, sampled_cid+1) 40 | else 41 | #indx=findin(current.support, max(current.support))[1] 42 | #delete!(current.support, indx) 43 | child_support = deepcopy(current.support) 44 | end 45 | # Adding cluster for each data point until current time for child 46 | child_c_aggregate = myappend(current.prev_c_aggregate, sampled_cid) 47 | #print(" (curr:",current.weight, " weight:",weight,") ") 48 | child = node(unique(child_support), current.weight+weight, DEPTH+1, time+1, child_c_aggregate, deepcopy(lambda_kw_arr[sampled_cid])) 49 | enqueue!(PATH_QUEUE, child, PCNT) 50 | PCNT+=1 51 | end 52 | #println() 53 | end 54 | return PATH_QUEUE, PCNT 55 | end 56 | 57 | 58 | function generateCandidateChildren(current_support, time, prev_c_aggregate, N, prev_lambda_kw) 59 | z_posterior_array_probability = [] 60 | z_posterior_array_cid = [] 61 | lambda_kw_arr = [] 62 | for j in current_support 63 | 64 | current_c_aggregate = myappend(prev_c_aggregate, j) 65 | zj_probability, lambda_kw = get_posterior_zj(j, current_c_aggregate, time, N, current_support, 1,prev_lambda_kw) 66 | lambda_kw_arr = myappend(lambda_kw_arr, lambda_kw) 67 | 68 | """println("-=-=-[ ", j ," ]=-=-=-") 69 | println(prev_lambda_kw) 70 | println(lambda_kw) 71 | println("-=-=-=-=-=-")""" 72 | 73 | z_posterior_array_probability = myappend(z_posterior_array_probability, zj_probability) 74 | z_posterior_array_cid = myappend(z_posterior_array_cid, j) 75 | end 76 | #println("L [time:",time,"]", " ", z_posterior_array_probability) 77 | return z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr 78 | end 79 | 80 | 81 | 82 | function get_weight_lookahead(prev_weight, prev_support, prev_c_aggregate, time, prev_cid, N, prev_lambda_kw) 83 | 84 | if LOOKAHEAD_DELTA == 0 || LOOKAHEAD_DELTA == 1 85 | return prev_weight 86 | end 87 | 88 | PATH_QUEUE = PriorityQueue() 89 | PCNT = 1 90 | 91 | #time is already t+1 92 | if prev_cid == max(prev_support) 93 | t_1_support = unique(myappend(prev_support, prev_cid + 1)) 94 | else 95 | t_1_support = deepcopy(prev_support) 96 | end 97 | 98 | """println("====================[LAMBDA FROM TOP LEVEL]====================") 99 | println(prev_lambda_kw) 100 | println("====================[[get_weight_lookahead time:", time ," prev_cid: ", prev_cid ,"]]====================")""" 101 | 102 | z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr = generateCandidateChildren(t_1_support, time, prev_c_aggregate, N, prev_lambda_kw) 103 | current = node(t_1_support, prev_weight, 1, time, prev_c_aggregate, prev_lambda_kw) 104 | 105 | PATH_QUEUE, PCNT = pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 106 | 107 | 108 | #Now we propagate t+2 onwards ... 109 | while true 110 | current = dequeue!(PATH_QUEUE) 111 | if current.depth == LOOKAHEAD_DELTA 112 | wARR = [] 113 | #terminate and return with weight 114 | #weight = exp(current.weight) 115 | wARR = myappend(wARR, current.weight) 116 | while length(PATH_QUEUE) > 0 117 | elm = dequeue!(PATH_QUEUE) 118 | if elm.depth != LOOKAHEAD_DELTA 119 | #return log(weight) 120 | break 121 | end 122 | #weight += exp(elm.weight) 123 | wARR = myappend(wARR, elm.weight) 124 | end 125 | #return log(weight) 126 | #println("ESCAPE: ", wARR) 127 | return logsumexp(wARR) 128 | end 129 | z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr = generateCandidateChildren(current.support, current.time, current.prev_c_aggregate, N, current.prev_lambda_kw) 130 | PATH_QUEUE, PCNT = pickNewChildren(current, z_posterior_array_probability, z_posterior_array_cid, lambda_kw_arr, PATH_QUEUE, PCNT) 131 | #println("*********") 132 | end 133 | end 134 | 135 | 136 | end 137 | -------------------------------------------------------------------------------- /DPMM_Variational_full_support/dataset.jl: -------------------------------------------------------------------------------- 1 | 2 | function dataset1() 3 | """theta = { 4 | [1,0,0], 5 | [0,1,0], 6 | [0,0,1]} 7 | """ 8 | """theta={ 9 | [1/4,2/4,1/4], 10 | [1/3,1/3,1/3], 11 | [2/5,2/5,1/5]} #3 states 12 | 13 | pi = [4/10,3/10,3/10] 14 | V = 3 15 | NUM_TOPICS = 3""" 16 | 17 | V = 20 18 | theta = Dict() 19 | for i=1:V 20 | theta[i] = rand(Dirichlet([2,2,2])) 21 | end 22 | 23 | pi = rand(Dirichlet(zeros(V)+2)) 24 | 25 | return theta, pi, V 26 | end 27 | 28 | -------------------------------------------------------------------------------- /DPMM_Variational_full_support/gradient.jl: -------------------------------------------------------------------------------- 1 | 2 | using Debug 3 | 4 | @debug begin 5 | 6 | function gradient_v(sampled_cid, time, N, is_new_cid) 7 | alpha = hyperparameters["a"] 8 | v_sufficient_stats = 0 9 | 10 | if is_new_cid == true 11 | #creation of new cluster 12 | particles[time][N]["hidden_state"]["soft_v"][sampled_cid] = LRATE*(alpha) 13 | else 14 | prev_soft_v = particles[time-1][N]["hidden_state"]["soft_v"] 15 | particles[time][N]["hidden_state"]["soft_v"][sampled_cid] = prev_soft_v[sampled_cid] + LRATE*(-prev_soft_v[sampled_cid] + alpha + NUM_DOCS*v_sufficient_stats) 16 | end 17 | end 18 | 19 | 20 | function gradient_soft_lambda_u(sampled_cid, wordArr, posterior, time, N, is_new_cid) 21 | eta = hyperparameters["eta"] 22 | posterior = 1 23 | 24 | if is_new_cid == true 25 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid] = Dict() 26 | for word=1:V 27 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid][word] = LRATE*(eta + NUM_DOCS*wordArr[word]) 28 | end 29 | particles[time][N]["hidden_state"]["soft_u"][sampled_cid] = LRATE*(1 + NUM_DOCS) 30 | else 31 | prev_soft_lambda_kw = particles[time-1][N]["hidden_state"]["soft_lambda"] 32 | prev_soft_u = particles[time-1][N]["hidden_state"]["soft_u"] 33 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid] = Dict() 34 | for word=1:V 35 | particles[time][N]["hidden_state"]["soft_lambda"][sampled_cid][word] = prev_soft_lambda_kw[sampled_cid][word] + LRATE*(-prev_soft_lambda_kw[sampled_cid][word] + eta + NUM_DOCS*(posterior*wordArr[word])) 36 | end 37 | particles[time][N]["hidden_state"]["soft_u"][sampled_cid] = prev_soft_u[sampled_cid] + LRATE*(-prev_soft_u[sampled_cid] + 1 + NUM_DOCS*posterior) 38 | end 39 | end 40 | 41 | 42 | 43 | function update_newcluster_statistics(sampled_cid, data, time, wordArr, posterior, N) 44 | ## create new lambda ## 45 | 46 | particles[time][N]["hidden_state"]["lambda"][sampled_cid] = Dict() 47 | for word = 1:V 48 | particles[time][N]["hidden_state"]["lambda"][sampled_cid][word] = hyperparameters["eta"] + wordArr[word] 49 | end 50 | #gradient_soft_lambda_u( sampled_cid, wordArr, posterior, time, N, true) 51 | #gradient_v(sampled_cid, time, N, true) 52 | end 53 | 54 | 55 | 56 | function update_existingcluster_statistics(sampled_cid, data, time, wordArr, posterior, N, lambda_statistics) 57 | for word=1:V 58 | particles[time][N]["hidden_state"]["lambda"][sampled_cid][word] = lambda_statistics[word] 59 | end 60 | #gradient_soft_lambda_u(sampled_cid, wordArr, posterior, time, N, false) 61 | #gradient_v(sampled_cid, time, N, false) 62 | end 63 | 64 | 65 | function update_all_not_chosen_ks(sampled_cid, support, time, N, max_root_support) 66 | eta = hyperparameters["eta"]; a = hyperparameters["a"] 67 | prev_soft_lambda = particles[time-1][N]["hidden_state"]["soft_lambda"] 68 | prev_soft_u = particles[time-1][N]["hidden_state"]["soft_u"] 69 | prev_soft_v = particles[time-1][N]["hidden_state"]["soft_v"] 70 | 71 | for cid in support 72 | if cid != sampled_cid && cid < max_root_support 73 | 74 | is_new_cid = (has(particles[time][N]["hidden_state"]["soft_lambda"], cid) == false) 75 | 76 | if is_new_cid == true 77 | particles[time][N]["hidden_state"]["soft_lambda"][cid] = Dict() 78 | prev_soft_u[cid] = 0 79 | prev_soft_v[cid] = 0 80 | end 81 | 82 | for word=1:V 83 | if is_new_cid == true 84 | particles[time][N]["hidden_state"]["soft_lambda"][cid][word] = LRATE*(eta) 85 | else 86 | particles[time][N]["hidden_state"]["soft_lambda"][cid][word] = prev_soft_lambda[cid][word] + LRATE*(-prev_soft_lambda[cid][word] + eta) 87 | end 88 | end 89 | 90 | particles[time][N]["hidden_state"]["soft_u"][cid]= prev_soft_u[cid] + LRATE*(-prev_soft_u[cid] + 1) 91 | particles[time][N]["hidden_state"]["soft_v"][cid]= prev_soft_v[cid] + LRATE*(-prev_soft_v[cid] + a) 92 | 93 | end 94 | end 95 | end 96 | 97 | 98 | 99 | end 100 | 101 | 102 | -------------------------------------------------------------------------------- /DPMM_Variational_full_support/maxfilter.jl: -------------------------------------------------------------------------------- 1 | 2 | using Debug 3 | @debug begin 4 | 5 | #Tejas D K : tejask@mit.edu 6 | function maxFilter(particles_t, particles_t_minus_1, maxfilter_probability_array, maxfilter_cid_array, maxfilter_particle_struct, NUM_PARTICLES) 7 | #Algorithm proposed by Sam Gershman 8 | perm = sortperm(maxfilter_probability_array, Sort.Reverse) 9 | maxfilter_cid_array = maxfilter_cid_array[perm] 10 | maxfilter_particle_struct = maxfilter_particle_struct[perm] 11 | 12 | #println(maxfilter_cid_array) 13 | for i=1:NUM_PARTICLES 14 | state=Dict() 15 | sampled_cid = maxfilter_cid_array[i] 16 | state["c"] = sampled_cid 17 | state["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[i]]["hidden_state"]["c_aggregate"], sampled_cid) 18 | particles_t[i]["hidden_state"]=state 19 | end 20 | end 21 | 22 | 23 | function stratifiedMaxFiltering(time, particles_t, particles_t_minus_1, maxfilter_probability_array, maxfilter_cid_array, maxfilter_particle_struct, NUM_PARTICLES, log_maxfilter_probability_array, support_array_putative) 24 | 25 | 26 | perm = sortperm(maxfilter_probability_array, Sort.Reverse) 27 | hash = Dict() 28 | record_indx = [] 29 | for i = 1:length(perm) 30 | if haskey(hash, maxfilter_cid_array[perm[i]]) == false 31 | hash[maxfilter_cid_array[perm[i]]] = perm[i] 32 | record_indx = myappend(record_indx, perm[i]) 33 | end 34 | end 35 | 36 | len_rind = length(record_indx) 37 | pind=1 38 | 39 | for i = 1:NUM_PARTICLES 40 | if pind > len_rind 41 | pind=1 42 | end 43 | sampled_indx = record_indx[pind] 44 | pind+=1 45 | 46 | particles_t[i]["hidden_state"]["c"] = maxfilter_cid_array[sampled_indx] 47 | particles_t[i]["hidden_state"]["sampled_indx"] = sampled_indx 48 | particles_t[i]["hidden_state"]["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[sampled_indx]]["hidden_state"]["c_aggregate"], particles_t[i]["hidden_state"]["c"]) 49 | particles_t[i]["weight"] = log_maxfilter_probability_array[sampled_indx] 50 | particles_t[i]["max_support"] = support_array_putative[sampled_indx] ####################################################### 51 | 52 | particles_t[i]["hidden_state"]["lambda"] = deepcopy(particles_t_minus_1[maxfilter_particle_struct[sampled_indx]]["hidden_state"]["lambda"]) 53 | particles_t[i]["hidden_state"]["soft_lambda"] = deepcopy(particles_t_minus_1[maxfilter_particle_struct[sampled_indx]]["hidden_state"]["soft_lambda"]) 54 | particles_t[i]["hidden_state"]["soft_u"] = deepcopy(particles_t_minus_1[maxfilter_particle_struct[sampled_indx]]["hidden_state"]["soft_u"]) 55 | particles_t[i]["hidden_state"]["soft_v"] = deepcopy(particles_t_minus_1[maxfilter_particle_struct[sampled_indx]]["hidden_state"]["soft_v"]) 56 | end 57 | 58 | """ 59 | perm = sortperm(maxfilter_probability_array, Sort.Reverse) 60 | maxfilter_cid_array = maxfilter_cid_array[perm] 61 | maxfilter_particle_struct = maxfilter_particle_struct[perm] 62 | log_maxfilter_probability_array = log_maxfilter_probability_array[perm] 63 | support_array_putative = support_array_putative[perm] 64 | 65 | particles_t[1]["hidden_state"]["c"] = maxfilter_cid_array[1] 66 | particles_t[1]["hidden_state"]["sampled_indx"] = perm[1] ####################################################### 67 | particles_t[1]["hidden_state"]["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[1]]["hidden_state"]["c_aggregate"], particles_t[1]["hidden_state"]["c"]) 68 | 69 | particles_t[1]["weight"] = log_maxfilter_probability_array[1] ####################################################### 70 | particles_t[1]["max_support"] = support_array_putative[1] ####################################################### 71 | 72 | particle_cnt = 2 73 | unique_indices = [] 74 | perm_index = [] 75 | 76 | for i=2:length(maxfilter_cid_array) 77 | last = maxfilter_cid_array[i-1] 78 | cur = maxfilter_cid_array[i] 79 | if cur != last 80 | if particle_cnt > NUM_PARTICLES 81 | break 82 | else 83 | unique_indices = myappend(unique_indices, i) 84 | perm_index = myappend(perm_index, perm[i]) 85 | particles_t[particle_cnt]["hidden_state"]["c"] = maxfilter_cid_array[i] 86 | particles_t[particle_cnt]["hidden_state"]["sampled_indx"] = perm[i] ####################################################### 87 | particles_t[particle_cnt]["hidden_state"]["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[i]]["hidden_state"]["c_aggregate"], particles_t[particle_cnt]["hidden_state"]["c"]) 88 | 89 | particles_t[particle_cnt]["weight"] = log_maxfilter_probability_array[i] ####################################################### 90 | particles_t[particle_cnt]["max_support"] = support_array_putative[i] ####################################################### 91 | particle_cnt+=1 92 | end 93 | end 94 | i+=1 95 | end 96 | 97 | if NUM_PARTICLES >= particle_cnt 98 | len_unique_indices = length(unique_indices) 99 | for p=particle_cnt:NUM_PARTICLES 100 | state=Dict() 101 | #indx = unique_indices[p%len_unique_indices + 1] 102 | #indx = unique_indices[randi(len_unique_indices)] 103 | indx = unique_indices[1] 104 | perm_indx = perm_index[1] 105 | 106 | particles_t[p]["hidden_state"]["c"] = maxfilter_cid_array[indx] 107 | particles_t[p]["hidden_state"]["sampled_indx"] = perm_indx ####################################################### 108 | particles_t[p]["hidden_state"]["c_aggregate"] = myappend(particles_t_minus_1[maxfilter_particle_struct[indx]]["hidden_state"]["c_aggregate"], particles_t[p]["hidden_state"]["c"]) 109 | 110 | particles_t[p]["weight"] = log_maxfilter_probability_array[indx] ####################################################### 111 | particles_t[p]["max_support"] = support_array_putative[indx] ####################################################### 112 | end 113 | end 114 | 115 | """ 116 | end 117 | 118 | 119 | end -------------------------------------------------------------------------------- /DPMM_Variational_full_support/media/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/DPMM_Variational_full_support/media/.DS_Store -------------------------------------------------------------------------------- /DPMM_Variational_full_support/picloud_runner.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import pickle 4 | import datetime 5 | 6 | import cloud 7 | cloud.setkey(7513, api_secretkey='ca43a3535fa17e28b687f0f1691c67db261392ae') 8 | cloud_environment = 'Julia' 9 | 10 | """ 11 | number_of_clusters = int(sys.argv[1]) 12 | if_zero_shortlearning = sys.argv[2] # Should be "yes" or "no" 13 | experiment_name = sys.argv[3]""" 14 | 15 | # Usage: python picloud_runner.py 100 1 5 & 16 | 17 | 18 | TRIALS = int(sys.argv[1]) 19 | NUM_PARTICLES = int(sys.argv[2]) 20 | DELTA = int(sys.argv[3]) 21 | 22 | 23 | def run_on_instance(trial_id): 24 | global number_of_clusters 25 | global if_zero_shortlearning 26 | global experiment_name 27 | import subprocess 28 | import os 29 | os.environ['DISPLAY'] = ":1" 30 | print "Starting" 31 | ls_output = subprocess.Popen(["/home/picloud/julia/julia", "variational_runner.jl", str(NUM_PARTICLES), str(DELTA), str(trial_id)], \ 32 | cwd = "/home/picloud/DPMixtureModel/DPMM_Variational_full_support/", \ 33 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) 34 | 35 | out, err = ls_output.communicate() 36 | return out 37 | 38 | #result = run_on_instance([1]) 39 | 40 | jids = cloud.map(run_on_instance, range(TRIALS), _env=cloud_environment, _type='c2', _cores=1) 41 | print jids 42 | result = cloud.result(jids) 43 | pickle.dump(result, open(str(datetime.datetime.now())+"_TRIALS_"+str(TRIALS)+ "_PARTICLES_"+ str(NUM_PARTICLES)+ "_DELTA_" + str(DELTA) +".pkl","wb")) 44 | print "RESULT:", result 45 | -------------------------------------------------------------------------------- /DPMM_Variational_full_support/plotter.py: -------------------------------------------------------------------------------- 1 | #tejask@mit.edu 2 | 3 | from pylab import imread,imshow,figure,show,subplot 4 | from numpy import reshape,uint8,flipud 5 | from scipy.cluster.vq import kmeans,vq 6 | import numpy 7 | import scipy.misc 8 | import Image 9 | import copy 10 | import glob 11 | import pdb 12 | import pylab 13 | import pickle 14 | from matplotlib import pyplot 15 | from matplotlib.backends.backend_pdf import PdfPages 16 | import math 17 | 18 | 19 | def saveAsPDF(fname,plot): 20 | pp = PdfPages(fname) 21 | pp.savefig(plot) 22 | pp.close() 23 | 24 | 25 | def genericPlot(X,Y,xlab,ylab,fname): 26 | f = pylab.figure() 27 | ax=f.add_subplot(111,title='') 28 | pyplot.plot( X,Y,'-',color='blue', linewidth=2) 29 | pyplot.xlabel(xlab,fontsize=30) 30 | pyplot.ylabel(ylab,fontsize=30) 31 | pylab.savefig(fname+'.png') 32 | #ax.grid(True) 33 | saveAsPDF(fname+'.pdf',f) 34 | 35 | 36 | 37 | 38 | fname = 'RESULTS/aug11_13/TRIALS_100_PARTICLES_1_DELTA_15' 39 | data = pickle.load(open(fname+".pkl","rb")) 40 | 41 | f = pylab.figure() 42 | ax=f.add_subplot(111,title='') 43 | X=[] 44 | Y=[] 45 | CNT=0 46 | nolookArr =[] 47 | lookArr = [] 48 | for i in range(len(data)): 49 | print i, 50 | if len(data[i]) > 0: 51 | nolook = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[0]) 52 | look = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[1]) 53 | nolookArr.append(nolook) 54 | lookArr.append(look) 55 | X.append(i) 56 | Y.append(look-nolook) 57 | if look > nolook: 58 | CNT+=1 59 | print 'nolook:', nolook, ' look:', look 60 | 61 | print len(data), CNT 62 | print 'Average (Look):', sum(lookArr)/len(data) 63 | print 'Average (Nolook):', sum(nolookArr)/len(data) 64 | 65 | ax.bar(X,Y,0.4,color='black') 66 | 67 | pylab.xlabel('Run Number',fontsize=30) 68 | pylab.ylabel('VSCORE Difference',fontsize=30) 69 | pylab.savefig(fname+'.png') 70 | #pylab.ylim([-0.35, 0.35]) 71 | #ax.grid(True) 72 | saveAsPDF(fname+'.pdf',f) 73 | 74 | 75 | -------------------------------------------------------------------------------- /IHMM_MaxFilter/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/IHMM_MaxFilter/.DS_Store -------------------------------------------------------------------------------- /IHMM_MaxFilter/ComputeInferenceError.jl: -------------------------------------------------------------------------------- 1 | 2 | using Debug 3 | using PyCall 4 | #require("HemmingDistance.py") 5 | @pyimport HemmingDistance as hm 6 | # @pyimport munkres as mk 7 | # matrix = {{5, 9, 1}, 8 | # {10, 3, 2}, 9 | # {8, 7, 4}} 10 | # t = hm.teest(matrix) 11 | 12 | @debug begin 13 | 14 | 15 | function encodeAlphabet(seq) 16 | alphabet = unique(seq) 17 | code_dict = Dict() 18 | index_to_alphabet_dict = Dict() 19 | for i = 1:length(alphabet) 20 | code_dict[alphabet[i]] = i 21 | index_to_alphabet_dict[i] = alphabet[i] 22 | end 23 | return {"code_dict" => code_dict, "index_to_alphabet_dict" => index_to_alphabet_dict} 24 | end 25 | 26 | function buildProfitMatrix(seq_true, seq_inferred) 27 | unique_inferred = unique(seq_inferred) 28 | unique_true = unique(seq_true) 29 | profit_matrix = zeros(length(unique_inferred), length(unique_true)) 30 | tru_dict = encodeAlphabet(seq_true)["code_dict"] 31 | inf_dict = encodeAlphabet(seq_inferred)["code_dict"] 32 | 33 | for inf in unique_inferred 34 | for tru in unique_true 35 | replaced_indices = findin(seq_inferred, inf) 36 | for k in replaced_indices 37 | if seq_true[k] == tru 38 | profit_matrix[inf_dict[inf], tru_dict[tru]] += 1 39 | end 40 | end 41 | end 42 | end 43 | 44 | return profit_matrix 45 | 46 | end 47 | 48 | 49 | function computeError(seq_inferred, seq_true) 50 | matrix = buildProfitMatrix(seq_true, seq_inferred) 51 | indices = hm.teest(matrix) 52 | tru_dict = encodeAlphabet(seq_true)["index_to_alphabet_dict"] 53 | inf_dict = encodeAlphabet(seq_inferred)["index_to_alphabet_dict"] 54 | modified_indices = [] 55 | # @bp 56 | for pair in indices 57 | encoded_index = pair[1] + 1 58 | target_encoded_index = pair[2] + 1 59 | alphabet = inf_dict[encoded_index] 60 | target_alphabet = tru_dict[target_encoded_index] 61 | for i = 1:length(seq_true) 62 | if seq_inferred[i] == alphabet && contains(modified_indices, i) == false 63 | seq_inferred[i] = target_alphabet 64 | modified_indices = vcat(modified_indices, i) 65 | end 66 | end 67 | end 68 | 69 | #println(seq_inferred) 70 | count_error = 0 71 | for t = 1:length(seq_true) 72 | if seq_true[t] != seq_inferred[t] 73 | count_error += 1 74 | end 75 | end 76 | return count_error 77 | end 78 | 79 | 80 | 81 | # seq_inferred = [1,1,1,3,3,3] 82 | # seq_true = [1,1,3,3,4,4] 83 | 84 | # matrix = buildProfitMatrix(seq_true, seq_inferred) 85 | 86 | 87 | # indices = hm.teest(matrix) 88 | # println(indices) 89 | # inferred = computeError(indices, seq_inferred, seq_true) 90 | # println(inferred) 91 | 92 | 93 | end 94 | 95 | 96 | -------------------------------------------------------------------------------- /IHMM_MaxFilter/GenerateData.jl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | using Distributions 5 | using Debug 6 | using PyCall 7 | #require("BasicHerding.jl") 8 | # @pyimport numpy.random as nr 9 | # println(nr.rand(3,4)) 10 | # @pyimport scipy.optimize as so 11 | # so.newton(x -> cos(x) - x, 1) 12 | # @pyimport matplotlib.pylab as plt 13 | 14 | # plt.plot(x, y) 15 | # plt.savefig("foo.png", bbox_inches=0) 16 | #plt.show() 17 | @debug begin 18 | 19 | ##################PARAMETERS################### 20 | 21 | 22 | LENGTH_SEQ = 50 23 | 24 | transition_matrix = [[0.8, 0.1, 0.05, 0.05] [0.05, 0.8, 0.1, 0.05] [0.05, 0.05, 0.8, 0.1] [0.1, 0.05, 0.05, 0.8]] 25 | emission_matrix = [[0.8, 0.1, 0.05, 0.05] [0.05, 0.8, 0.1, 0.05] [0.05, 0.05, 0.8, 0.1] [0.1, 0.05, 0.05, 0.8]] 26 | initial_matrix = [0.25, 0.25, 0.25, 0.25] 27 | 28 | 29 | 30 | 31 | ##########GENERATING DATA FUNCTIONS 32 | 33 | function transition(currentState) 34 | sample_arr = rand(Multinomial(1, (transition_matrix)[:, currentState])) 35 | nxtState = findin(sample_arr, 1)[1] 36 | return nxtState 37 | end 38 | 39 | 40 | function emission(currentState) 41 | sample_arr = rand(Multinomial(1, (emission_matrix)[:, currentState])) 42 | currentObs = findin(sample_arr, 1)[1] 43 | return currentObs 44 | end 45 | 46 | function initialize() 47 | sample_arr = rand(Multinomial(1, initial_matrix)) 48 | nxtState = findin(sample_arr, 1)[1] 49 | return nxtState 50 | end 51 | #############GENERATE DATA######################### 52 | 53 | function main_generate(seed) 54 | 55 | srand(seed) 56 | hidden_state_seq = zeros(LENGTH_SEQ) 57 | observation_seq = zeros(LENGTH_SEQ) 58 | 59 | hidden_state_seq[1] = initialize() 60 | observation_seq[1] = emission(hidden_state_seq[1]) 61 | for t = 2:LENGTH_SEQ 62 | hidden_state_seq[t] = transition(hidden_state_seq[t-1]) 63 | observation_seq[t] = emission(hidden_state_seq[t]) 64 | end 65 | 66 | 67 | println(hidden_state_seq) 68 | println(observation_seq) 69 | return {"hid" => hidden_state_seq, "obs" => observation_seq} 70 | end 71 | 72 | 73 | 74 | 75 | end #debug 76 | 77 | 78 | -------------------------------------------------------------------------------- /IHMM_MaxFilter/HemmingDistance.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | from munkres import Munkres, print_matrix 6 | 7 | 8 | def teest(): 9 | matrix = [[5, 9, 1], 10 | [10, 3, 2], 11 | [8, 7, 4]] 12 | m = Munkres() 13 | indexes = m.compute(matrix) 14 | print_matrix(matrix, msg='Lowest cost through this matrix:') 15 | total = 0 16 | for row, column in indexes: 17 | value = matrix[row][column] 18 | total += value 19 | return total 20 | -------------------------------------------------------------------------------- /IHMM_MaxFilter/HemmingDistance.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/IHMM_MaxFilter/HemmingDistance.pyc -------------------------------------------------------------------------------- /IHMM_MaxFilter/RunnerMaxFilterIhmm.jl: -------------------------------------------------------------------------------- 1 | using Distributions 2 | using Debug 3 | using PyCall 4 | 5 | require("maxFilterihmm.jl") 6 | @debug begin 7 | NUM_SEEDS = 100 8 | maxFilter_error_vect = zeros(NUM_SEEDS) 9 | smc_error_vect = zeros(NUM_SEEDS) 10 | 11 | for seed = 1:NUM_SEEDS 12 | 13 | errors_dict = main(seed) 14 | println(errors_dict) 15 | maxFilter_error_vect[seed] = errors_dict["max_filter_error"] 16 | smc_error_vect[seed] = errors_dict["SMC_error"] 17 | println("seed ", seed) 18 | println("smc error ", errors_dict["SMC_error"] ) 19 | println("maxfilter error ", errors_dict["max_filter_error"]) 20 | println() 21 | end 22 | 23 | mean_diff = -sum(maxFilter_error_vect - smc_error_vect)/NUM_SEEDS 24 | println(mean_diff) 25 | end 26 | 27 | -------------------------------------------------------------------------------- /IHMM_MaxFilter/alice_dataprep.jl: -------------------------------------------------------------------------------- 1 | # #Alice in Wonderland - CH1 2 | 3 | # function get_AW_dataset(seed) 4 | # fp = open("alice_in_wonderland.txt") 5 | # data = readall(fp) 6 | # data = lowercase(data) 7 | # data=split(data,"") 8 | 9 | # LENGTH_SEQ = 500 10 | # hidden_state_seq = zeros(LENGTH_SEQ) 11 | # observation_seq = zeros(LENGTH_SEQ) 12 | 13 | # tindx=1 14 | # test_dict = Dict() 15 | # for i=1:length(data) 16 | # if data[i] != "!" && data[i] != "'" && data[i] != "—" && data[i] != "?" && 17 | # data[i] != "-" && data[i] != "." && data[i] != " " && data[i] != "\n" && 18 | # data[i] != "," && data[i] != ")" && data[i] != "(" && data[i] != ":" && data[i] != ";" 19 | 20 | # if tindx <= LENGTH_SEQ 21 | # observation_seq[tindx] = data[i][1] - 97 + 1 22 | # hidden_state_seq[tindx] = -1 23 | # tindx += 1 24 | # test_dict[data[i]] = 1 25 | # else 26 | # break 27 | # end 28 | # end 29 | # end 30 | 31 | # # println(test_dict) 32 | # # println(length(test_dict)) 33 | 34 | 35 | # # NUM_OBS = length(test_dict) 36 | # NUM_OBS = 26 37 | # return {"hid" => hidden_state_seq, "obs" => observation_seq}, NUM_OBS 38 | 39 | # end 40 | 41 | 42 | # get_AW_dataset(0) 43 | 44 | #Alice in Wonderland - CH1 45 | 46 | function get_AW_dataset(seed, _start, _end) 47 | fp = open("alice_in_wonderland.txt") 48 | data = readall(fp) 49 | data = lowercase(data) 50 | data=split(data,"") 51 | 52 | LENGTH_SEQ = _end - _start#5000 53 | hidden_state_seq = zeros(LENGTH_SEQ) 54 | observation_seq = zeros(LENGTH_SEQ) 55 | 56 | tindx=1 57 | test_dict = Dict() 58 | <<<<<<< HEAD 59 | for i=_start:_end#length(data) 60 | ======= 61 | for i=1:LENGTH_SEQ#length(data) 62 | >>>>>>> 129634c0c2fd332d6c98b1fa729b28950bc33b33 63 | if data[i] != "!" && data[i] != "'" && data[i] != "—" && data[i] != "?" && 64 | data[i] != "-" && data[i] != "." && data[i] != " " && data[i] != "\n" && 65 | data[i] != "," && data[i] != ")" && data[i] != "(" && data[i] != ":" && data[i] != ";" 66 | 67 | if tindx <= LENGTH_SEQ 68 | observation_seq[tindx] = data[i][1] - 97 + 1 69 | hidden_state_seq[tindx] = -1 70 | tindx += 1 71 | test_dict[data[i]] = 1 72 | else 73 | break 74 | end 75 | end 76 | end 77 | 78 | observation_seq = observation_seq[1:tindx-1] 79 | hidden_state_seq = hidden_state_seq[1:tindx - 1] 80 | # println(test_dict) 81 | # println(length(test_dict)) 82 | println(length(observation_seq)) 83 | println(sort(unique(observation_seq))) 84 | println(length(unique(observation_seq))) 85 | NUM_OBS = 26 86 | return {"hid" => hidden_state_seq, "obs" => observation_seq}, NUM_OBS 87 | 88 | end 89 | 90 | 91 | <<<<<<< HEAD 92 | get_AW_dataset(0, 1, 2000) 93 | ======= 94 | get_AW_dataset(0) 95 | >>>>>>> 129634c0c2fd332d6c98b1fa729b28950bc33b33 96 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ###Dirichlet Process Mixture Model - Gaussian Observations 2 | ============== 3 | 4 | This repo contains gibbs, SMC and Variational implementation of DPMM. Julia was partly used as a learning exercise but more importantly to explore speed up due to its LLVM-JIT compilation. 5 | 6 | #### SMC Sampler 7 | To run: 8 | - Julia DPMM_SMC/runner.jl 9 | 10 | #### Max Filtering 11 | To run: 12 | - Julia DPMM_MaxFilter/runner.jl 13 | 14 | #### Variational Particle Filtering for lookahead 15 | To run: 16 | - Julia DPMM_Variational/variational_runner.jl 17 | -------------------------------------------------------------------------------- /nonlinearGaussianSSM/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/nonlinearGaussianSSM/.DS_Store -------------------------------------------------------------------------------- /nonlinearGaussianSSM/README.md: -------------------------------------------------------------------------------- 1 | ParticleFilterLookahead 2 | ======================= 3 | 4 | ParticleFilterLookahead 5 | -------------------------------------------------------------------------------- /nonlinearGaussianSSM/picloud_runner_nonlinearModel.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import pickle 4 | 5 | import cloud 6 | cloud.setkey(7513, api_secretkey='ca43a3535fa17e28b687f0f1691c67db261392ae') 7 | cloud_environment = 'Julia' 8 | 9 | """ 10 | number_of_clusters = int(sys.argv[1]) 11 | if_zero_shortlearning = sys.argv[2] # Should be "yes" or "no" 12 | experiment_name = sys.argv[3]""" 13 | 14 | # Usage: python picloud_runner.py 100 50 10 2 15 | 16 | 17 | TRIALS = int(sys.argv[1]) 18 | NUM_PARTICLES = int(sys.argv[2]) 19 | #DELTA = int(sys.argv[3]) 20 | #INTEGRAL_PATHS = int(sys.argv[4]) 21 | 22 | 23 | def run_on_instance(trial_id): 24 | global number_of_clusters 25 | global if_zero_shortlearning 26 | global experiment_name 27 | import subprocess 28 | import os 29 | os.environ['DISPLAY'] = ":1" 30 | print "Starting" 31 | ls_output = subprocess.Popen(["/home/picloud/julia/julia", "lookaheadPartilce.jl", str(NUM_PARTICLES)], \ 32 | cwd = "/home/picloud/ParticleFilterLookahead/", \ 33 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) 34 | 35 | out, err = ls_output.communicate() 36 | return out 37 | 38 | #result = run_on_instance([1]) 39 | 40 | jids = cloud.map(run_on_instance, range(TRIALS), _env=cloud_environment, _type='c2', _cores=1) 41 | print jids 42 | result = cloud.result(jids) 43 | pickle.dump(result, open("result_"+str(NUM_PARTICLES)+"particles_"+"path.pkl","wb")) 44 | print "RESULT:", result 45 | 46 | -------------------------------------------------------------------------------- /nonlinearGaussianSSM/plotter.py: -------------------------------------------------------------------------------- 1 | #tejask@mit.edu 2 | 3 | from pylab import imread,imshow,figure,show,subplot 4 | from numpy import reshape,uint8,flipud 5 | from scipy.cluster.vq import kmeans,vq 6 | import numpy 7 | import scipy.misc 8 | #import Image 9 | import copy 10 | import glob 11 | import pdb 12 | import pylab 13 | import pickle 14 | from matplotlib import pyplot 15 | from matplotlib.backends.backend_pdf import PdfPages 16 | import math 17 | 18 | 19 | def saveAsPDF(fname,plot): 20 | pp = PdfPages(fname) 21 | pp.savefig(plot) 22 | pp.close() 23 | 24 | 25 | def genericPlot(X,Y,xlab,ylab,fname): 26 | f = pylab.figure() 27 | ax=f.add_subplot(111,title='') 28 | pyplot.plot( X,Y,'-',color='blue', linewidth=2) 29 | pyplot.xlabel(xlab,fontsize=30) 30 | pyplot.ylabel(ylab,fontsize=30) 31 | pylab.savefig(fname+'.png') 32 | #ax.grid(True) 33 | saveAsPDF(fname+'.pdf',f) 34 | 35 | 36 | 37 | 38 | fname = 'result_100particles_path' 39 | data = pickle.load(open(fname+".pkl","rb")) 40 | 41 | f = pylab.figure() 42 | ax=f.add_subplot(111,title='') 43 | X=[] 44 | Y=[] 45 | CNT=0 46 | nolookArr =[] 47 | lookArr = [] 48 | for i in range(len(data)): 49 | if len(data[i]) > 0: 50 | nolook = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[0]) 51 | look = float(data[i].split('\n')[0].replace("[","").replace("]","").split(",")[1]) 52 | nolookArr.append(nolook) 53 | lookArr.append(look) 54 | X.append(i) 55 | Y.append(look-nolook) 56 | if look > nolook: 57 | CNT+=1 58 | 59 | print len(data), CNT 60 | print 'Average (Look):', sum(lookArr)/len(data) 61 | print 'Average (Nolook):', sum(nolookArr)/len(data) 62 | 63 | ax.bar(X,Y,0.4,color='black') 64 | 65 | pylab.xlabel('Run Number',fontsize=30) 66 | pylab.ylabel('Error Difference',fontsize=30) 67 | pylab.savefig(fname+'.png') 68 | #ax.grid(True) 69 | saveAsPDF(fname+'.pdf',f) 70 | 71 | 72 | -------------------------------------------------------------------------------- /nonlinearGaussianSSM/pmapLoading.jl: -------------------------------------------------------------------------------- 1 | using Distributions 2 | function sampleMultinomialVect(NUM_PARTICLES_BATCHES) 3 | NUM_PARTICLES = NUM_PARTICLES_BATCHES["s"] 4 | normalizeWeightVect = NUM_PARTICLES_BATCHES["weightVect"] 5 | indices = zeros(NUM_PARTICLES) 6 | for i = 1:NUM_PARTICLES 7 | sample_arr = rand(Multinomial(1,normalizeWeightVect)) 8 | indices[i] = findin(sample_arr, 1)[1] 9 | end 10 | return indices 11 | end 12 | -------------------------------------------------------------------------------- /nonlinearGaussianSSM/test.jl: -------------------------------------------------------------------------------- 1 | 2 | # a = zeros(10) 3 | # @async for i=1:10 4 | # a[i] = i 5 | 6 | # end 7 | # println(a) 8 | 9 | function recursiveLookahead(currentState, currentProb, currentDepth, numBranches, depth, obsSeq, currentTime) 10 | a = 1 11 | if a ==1 12 | println("print") 13 | end 14 | end 15 | 16 | -------------------------------------------------------------------------------- /random_sum.jl: -------------------------------------------------------------------------------- 1 | function random_sum(args) 2 | return args[2]*2 3 | end -------------------------------------------------------------------------------- /test.jl: -------------------------------------------------------------------------------- 1 | require("random_sum.jl") 2 | arg=Dict() 3 | arg["ta"]=10 4 | arg["va"]=1 5 | nsteps = [arg, arg] 6 | 7 | tic() 8 | out = pmap(random_sum, nsteps) 9 | 10 | #for i in nsteps 11 | # random_sum(i) 12 | #end 13 | toc() 14 | println(out[]) 15 | 16 | -------------------------------------------------------------------------------- /thirdparty/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mrkulk/SequentialInference/c4c84acb8ad839680f78b2398522d5af0f58b173/thirdparty/.DS_Store -------------------------------------------------------------------------------- /thirdparty/vdpgm/README.txt: -------------------------------------------------------------------------------- 1 | Variational Dirichlet Process Gaussian Mixture Models 2 | written by Kenichi Kurihara 3 | distributed under the modified BSD license. 4 | 5 | 6 | 7 | ALGORITHMS (see Refrences, the end of this document) 8 | 9 | 1. accelerated variational Dirichlet process Gaussian mixture model 10 | 2. collapsed variational stick-breaking Dirichlet process Gaussian mixture model 11 | 3. variational Gaussian mixture model with a collapsed Dirichlet prior. 12 | 4. variational Dirichlet process Gaussian mixture model by Blei and Jordan. 13 | 14 | 15 | USAGE 16 | 17 | >> result = vdpgm(X, opts); 18 | 19 | The first argument is data. Each data point is a column vector of X. 20 | 21 | The second argument opts is the option of this program which 22 | determines an algorithm and hyperparameters. You can set opts as you 23 | want, or basic option generators are also available. 24 | 25 | >> opts = mkopts_avdp; % for the algorithm 1 26 | >> opts = mkopts_csb(10); % for the algorithm 2 with T=10 27 | >> opts = mkopts_cdp(10); % for the algorithm 3 with K=10 28 | >> opts = mkopts_bj(10); % for the algorithm 4 with T=10 29 | 30 | Although opts accepts many options, some options are exclusive. 31 | 32 | The output result is a structure containing parameters for posteriors. 33 | Maybe, the most useful result is result.q_of_z which is the posterior 34 | probability of assignments. q_of_z is a N by K (or T) matrix 35 | s.t. sum_c q_of_z(i,c) = 1 for any c. q_of_z is available only when 36 | opts.get_q_of_z is set to 1. 37 | 38 | Other useful stats: 39 | - The expected value of the covariance of component c, 40 | >> result.hp_posterior.B{c} / results.hp_posterior.eta(c) 41 | 42 | - The expected value of the centroid of component c, 43 | >> result.hp_posterior.m(:,c) 44 | 45 | One may want to know the number of discovered clusters. 46 | If opts.algorithms is 'vdp', it is 47 | >> results.K - 1 48 | 49 | Otherwise, results.K is initialized by opts, and does not change. In 50 | these cases, the number of clusters is K s.t. 51 | result.hp_posterior.Nc(K+1) is close enough to zero. 52 | 53 | 54 | 55 | REFERENCES 56 | 57 | * Kenichi Kurihara, Max Welling and Yee Whye Teh, 58 | Collapsed Variational Dirichlet Process Mixture Models, 59 | the Twentieth International Joint Conference on Artificial Intelligence (IJCAI 2007). 60 | 61 | * Kenichi Kurihara, Max Welling and Nikos Vlassis, 62 | Accelerated Variational Dirichlet Mixture Models, 63 | Advances in Neural Information Processing Systems 19 (NIPS 2006). 64 | 65 | * David M. Blei and Michael I. Jordan, 66 | Variational Inference for Dirichlet Process Mixtures, 67 | Bayesian Analysis, Vol.1, No.1, 2005. 68 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/gamma_multivariate_ln.m: -------------------------------------------------------------------------------- 1 | function val = gamma_multivariate_ln(x,p); 2 | % function val = gamma_multivariate_ln(x,p); 3 | % 4 | % x: array(1,K) 5 | % p: scalor 6 | % 7 | % x must be more than (p-1)/2 8 | % x should be more than p/2 9 | % 10 | % Gamma_p(x) = pi^(p(p-1)/4) prod_(j=1)^p Gamma(x+(1-j)/2) 11 | % log Gamma_p(x) = p(p-1)/4 log pi + sum_(j=1)^p log Gamma(x+(1-j)/2) 12 | 13 | K = length(x); 14 | gammaln_val = gammaln(repmat(x,p,1)+0.5*(1-repmat([1:p]',1,K))); % p by K 15 | val = p*(p-1)*0.25 * log(pi) + sum(gammaln_val,1); 16 | 17 | 18 | % Local Variables: *** 19 | % mode: matlab *** 20 | % End: *** 21 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/generate_data.m: -------------------------------------------------------------------------------- 1 | mixing_wights = [1/3,1/3,1/3]; 2 | 3 | %DATASET 1 4 | mu=[0,0; 5 | 1,1; 6 | 2,2;]; 7 | std=[0.3,0.3; 8 | 0.3,0.3; 9 | 0.3,0.3;]; 10 | 11 | 12 | 13 | NUM_POINTS = 200; 14 | X=zeros(2,NUM_POINTS); 15 | 16 | for i=1:NUM_POINTS 17 | cid = mnrnd(1,mixing_wights); 18 | cid = find(cid==1); 19 | X(1,i) = mu(cid,1) + std(cid,1)*randn(); 20 | X(2,i) = mu(cid,2) + std(cid,2)*randn(); 21 | end 22 | 23 | 24 | opts = mkopts_avdp; 25 | opts.get_q_of_z = 1; 26 | results = vdpgm(X, opts); 27 | 28 | results.q_of_z 29 | 30 | col = []; 31 | 32 | tmp={'r','c','g','y', 'b'}; 33 | 34 | for i=1:length(results.q_of_z) 35 | cid = mnrnd(1,results.q_of_z(i,:)); 36 | cid = find(cid==1); 37 | col(i,:) = tmp{cid}; 38 | end 39 | 40 | scatter(X(1,:), X(2,:),36,col,'.'); 41 | 42 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/log_no_w.m: -------------------------------------------------------------------------------- 1 | function val = log_no_w( x ); 2 | % function val = log_no_w( x ); 3 | % 4 | % val = log(x) 5 | % 6 | % log_no_w does not warn anything. 7 | 8 | % to know the msgid 9 | % log(0) 10 | % [msg, id] = lastwarn 11 | 12 | warning('off', 'MATLAB:log:logOfZero'); 13 | val = log(x); 14 | warning('on', 'MATLAB:log:logOfZero'); 15 | 16 | 17 | % Local Variables: *** 18 | % mode: matlab *** 19 | % End: *** 20 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/log_sum_exp.m: -------------------------------------------------------------------------------- 1 | function y = log_sum_exp(x,dim); 2 | % function y = log_sum_exp(x,dim); 3 | % 4 | % y = log( sum( exp(x), dim ) ) 5 | % y = log( sum( exp(x).*y, dim ) ) 6 | % 7 | % x can be -inf but cannot be +inf. 8 | 9 | % if has_inf(x) 10 | % warning(['x contains inf; x=' num2str(x)]) 11 | % end 12 | % if has_nan(x) 13 | % x 14 | % error('x has NaN(s).') 15 | % end 16 | 17 | x_size = size(x); 18 | if dim > 2 && dim == length(x_size) 19 | y_size = x_size(1:end-1); 20 | else 21 | y_size = x_size; 22 | y_size(dim) = 1; 23 | end 24 | 25 | x_max = reshape(max(x, [], dim), y_size); 26 | x_max(find(x_max==-inf)) = 0; 27 | dims = ones(1, ndims(x)); 28 | dims(dim) = size(x, dim); 29 | x = x - repmat(x_max, dims); 30 | y = x_max + log(sum(exp(x), dim)); 31 | 32 | % Local Variables: *** 33 | % mode: matlab *** 34 | % End: *** 35 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/logmvtpdf.m: -------------------------------------------------------------------------------- 1 | function logpdf = logmvtpdf(t,mu,f,Sigma); 2 | % function logpdf = logmvtpdf(t,mu,f,Sigma); 3 | % log pdf of multivariate t-student dist. 4 | % t : D by N 5 | 6 | [d,n] = size(t); 7 | 8 | c = gammaln((d+f)*0.5) - (d*0.5)*log(f*pi) - gammaln(f*0.5) - 0.5*detln(Sigma); 9 | diff = t - repmat(mu,1,n); % d by n 10 | logpdf = c - (f+d)*0.5 * log(1 + sum(diff.*(inv(f*Sigma)*diff),1)); 11 | 12 | % Local Variables: *** 13 | % mode: matlab *** 14 | % End: *** 15 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_avdp.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_avdp(); 2 | 3 | opts.algorithm = 'vdp'; 4 | opts.use_kd_tree = 1; 5 | opts.initial_K = 1; 6 | opts.do_greedy = 1; 7 | opts.do_split = 0; 8 | opts.do_merge = 0; 9 | opts.do_sort = 1; 10 | opts.initial_depth = 4; 11 | opts.max_target_ratio = 0.1; 12 | opts.recursive_expanding_depth = 2; 13 | opts.recursive_expanding_threshold = 0.1; 14 | opts.recursive_expanding_frequency = 3; 15 | 16 | % Local Variables: *** 17 | % mode: matlab *** 18 | % End: *** 19 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_bj.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_bj(T); 2 | 3 | opts.algorithm = 'bj'; 4 | opts.use_kd_tree = 0; 5 | opts.sis = 20; 6 | opts.initial_K = T; 7 | opts.do_greedy = 0; 8 | opts.do_split = 0; 9 | opts.do_merge = 0; 10 | opts.do_sort = 0; 11 | 12 | % Local Variables: *** 13 | % mode: matlab *** 14 | % End: *** 15 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_bjrnd.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_bjrnd(T); 2 | 3 | opts.algorithm = 'bj'; 4 | opts.use_kd_tree = 0; 5 | opts.sis = 0; 6 | opts.initial_K = T; 7 | opts.do_greedy = 0; 8 | opts.do_split = 0; 9 | opts.do_merge = 0; 10 | opts.do_sort = 0; 11 | 12 | % Local Variables: *** 13 | % mode: matlab *** 14 | % End: *** 15 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_cdp.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_cdp(K); 2 | 3 | opts.algorithm = 'cdp'; 4 | opts.use_kd_tree = 0; 5 | opts.initial_K = K; 6 | opts.do_greedy = 0; 7 | opts.do_split = 0; 8 | opts.do_merge = 0; 9 | opts.do_sort = 0; 10 | 11 | % Local Variables: *** 12 | % mode: matlab *** 13 | % End: *** 14 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_csb.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_csb(T); 2 | 3 | opts.algorithm = 'csb'; 4 | opts.use_kd_tree = 0; 5 | opts.initial_K = T; 6 | opts.do_greedy = 0; 7 | opts.do_split = 0; 8 | opts.do_merge = 0; 9 | opts.do_sort = 0; 10 | 11 | % Local Variables: *** 12 | % mode: matlab *** 13 | % End: *** 14 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_vb.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_vb(K); 2 | 3 | opts.algorithm = 'non_dp'; 4 | opts.use_kd_tree = 0; 5 | opts.initial_K = K; 6 | opts.do_greedy = 0; 7 | opts.do_split = 0; 8 | opts.do_merge = 0; 9 | 10 | % Local Variables: *** 11 | % mode: matlab *** 12 | % End: *** 13 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/mkopts_vdp.m: -------------------------------------------------------------------------------- 1 | function opts = mkopts_vdp(); 2 | 3 | opts.algorithm = 'vdp'; 4 | opts.use_kd_tree = 0; 5 | opts.initial_K = 1; 6 | opts.do_greedy = 1; 7 | opts.do_split = 0; 8 | opts.do_merge = 0; 9 | 10 | % Local Variables: *** 11 | % mode: matlab *** 12 | % End: *** 13 | -------------------------------------------------------------------------------- /thirdparty/vdpgm/variational_lookahead_proposal.m: -------------------------------------------------------------------------------- 1 | function cluster_ids = variational_lookahead_proposal( ) 2 | 3 | end 4 | 5 | --------------------------------------------------------------------------------