├── README ├── week1 ├── laboratory_wk_1.pdf ├── lect_1.pdf ├── lect_2.pdf ├── long_jump_data.txt ├── tutorial_wk_1.pdf ├── wk_1.pdf └── wk_1_matlab.m ├── week2 ├── cross_val.m ├── cv_demo.m ├── laboratory_wk_2.pdf ├── long_jump_cv.m ├── wk_2_1.pdf ├── wk_2_lect_1.pdf └── wk_2_lect_2.pdf ├── week3 ├── brdemo.m ├── cross_val_wk3.m ├── gauss.m ├── kernel_func.m ├── laboratory_wk_3.pdf ├── max_like_demo.m ├── regdemo.m ├── wk3_lab_1_sol.m ├── wk_3.pdf ├── wk_3_lect_1.pdf └── wk_3_lect_2.pdf ├── week4 ├── 20news_w100.mat ├── Course_Work_2006.pdf ├── laboratory_wk_4.pdf ├── laplace_demo.m ├── logistic_classification_demo.m ├── naive_bayes_binary.m ├── rip_dat_te.txt ├── rip_dat_tr.txt ├── wk_4.pdf ├── wk_4_lect_1.pdf └── wk_4_lect_2.pdf ├── week5 ├── cout.m ├── cross_val_wk5.m ├── digits_3_8.mat ├── knn_multi_class.m ├── laboratory_wk_5.pdf ├── lect_1.pdf ├── lect_2.pdf ├── monqp0.m ├── svm_demo.m ├── svm_demo_kernels.m └── wk_5.pdf ├── week6 ├── Gauss_Mix_Data.mat ├── Lab_6_EM_Data.mat ├── gauss_density_est.m ├── gauss_mix_em_demo.m ├── lect_1.pdf ├── mix_gauss_density.m ├── multi_var_gauss_sampler.m ├── wk_6.pdf └── wk_6_laboratory.pdf ├── week7 ├── faces_demo.m ├── lect_1.pdf ├── olivettifaces.mat ├── power_pca.m ├── wk_7.pdf └── wk_7_laboratory.pdf └── week8 ├── kern_km.pdf ├── kernel_func.m ├── kernel_kmeans.m ├── kmeans.m ├── lect_1.pdf ├── olivettifaces.mat ├── olivettifaces.mat.crdownload ├── water_lillies.jpg ├── wee_dog.jpg ├── wk8_demo_1.m ├── wk8_demo_2.m ├── wk8_demo_dat.mat ├── wk_8.pdf └── wk_8_laboratory.pdf /README: -------------------------------------------------------------------------------- 1 | This is a machine learning module I found here: 2 | 3 | http://www.dcs.gla.ac.uk/~girolami/Machine_Learning_Module_2006/week_2/Lectures/wk_2_lect_2.pdf 4 | 5 | None of this material is mine, it has all been created by Professor. M. A .Girolami. 6 | This is hands down the best machine-learning tutorials I have found on the web, and I was afraid 7 | the university link would be taken down, so now its on github. 8 | 9 | i hope you enjoy this as much as i did. 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /week1/laboratory_wk_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week1/laboratory_wk_1.pdf -------------------------------------------------------------------------------- /week1/lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week1/lect_1.pdf -------------------------------------------------------------------------------- /week1/lect_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week1/lect_2.pdf -------------------------------------------------------------------------------- /week1/long_jump_data.txt: -------------------------------------------------------------------------------- 1 | -4.0000 249.7500 2 | 0 282.8750 3 | 4.0000 289.0000 4 | 8.0000 294.5000 5 | 12.0000 299.2500 6 | 20.0000 281.5000 7 | 24.0000 293.1250 8 | 28.0000 304.7500 9 | 32.0000 300.7500 10 | 36.0000 317.3125 11 | 48.0000 308.0000 12 | 52.0000 298.0000 13 | 56.0000 308.2500 14 | 60.0000 319.7500 15 | 64.0000 317.7500 16 | 68.0000 350.5000 17 | 72.0000 324.5000 18 | 76.0000 328.5000 19 | 80.0000 336.2500 20 | 84.0000 336.2500 21 | 88.0000 343.2500 22 | 92.0000 342.5000 23 | 96.0000 334.7237 24 | 100.0000 336.5741 25 | 104.0000 338.1883 -------------------------------------------------------------------------------- /week1/tutorial_wk_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week1/tutorial_wk_1.pdf -------------------------------------------------------------------------------- /week1/wk_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week1/wk_1.pdf -------------------------------------------------------------------------------- /week1/wk_1_matlab.m: -------------------------------------------------------------------------------- 1 | %Machine Learning Module 2 | %Week One Laboratory Exercise 3 | 4 | x=load('long_jump_data.txt'); 5 | 6 | x_train = x(1:20,1); 7 | t_train = x(1:20,2); 8 | x_test = x(21:end,1); 9 | t_test = x(21:end,2); 10 | 11 | Polynomial_Order = 4; 12 | X_train = []; 13 | X_test = []; 14 | 15 | for i = 0:Polynomial_Order 16 | X_train = [X_train x_train.^i]; 17 | X_test = [X_test x_test.^i]; 18 | 19 | w_hat = inv(X_train'*X_train)*X_train'*t_train; 20 | 21 | t_train_hat = X_train*w_hat; 22 | t_test_hat = X_test*w_hat; 23 | mse_train(i+1) = mean((t_train - t_train_hat).^2); 24 | mse_test(i+1) = mean((t_test - t_test_hat).^2); 25 | end 26 | 27 | subplot(2,2,1); 28 | plot(0:Polynomial_Order,mse_train,'dg-'); 29 | title('Train Error'); 30 | 31 | subplot(2,2,2); 32 | plot(0:Polynomial_Order,mse_test,'dr-'); 33 | title('Test Error'); 34 | 35 | [min_test_val,min_test_index] = min(mse_test); 36 | [min_train_val,min_train_index] = min(mse_train); 37 | 38 | t_train_hat_min = X_train(:,1:min_train_index)*... 39 | inv(X_train(:,1:min_train_index)'*X_train(:,1:min_train_index))*... 40 | X_train(:,1:min_train_index)'*t_train; 41 | 42 | t_test_hat_min = X_train(:,1:min_test_index)*... 43 | inv(X_train(:,1:min_test_index)'*X_train(:,1:min_test_index))*... 44 | X_train(:,1:min_test_index)'*t_train; 45 | 46 | subplot(2,2,3) 47 | plot(x_train, t_train,'og'); 48 | hold on; 49 | plot(x_train,t_train_hat_min); 50 | title('Minimum Train Error Model'); 51 | 52 | subplot(2,2,4) 53 | plot(x_train, t_train,'og'); 54 | hold on; 55 | plot(x_train,t_test_hat_min); 56 | title('Minimum Test Error Model'); -------------------------------------------------------------------------------- /week2/cross_val.m: -------------------------------------------------------------------------------- 1 | function [cv_err, cv_std] = cross_val(x,f) 2 | N = size(x,1); 3 | CV = []; 4 | 5 | for n=1:N 6 | X = x; 7 | t = f; 8 | X(n,:) = []; 9 | t(n) = []; 10 | Xt = x(n,:); 11 | tt = f(n); 12 | w_hat = inv(X'*X)*X'*t; 13 | f_t = Xt*w_hat; 14 | CV = [CV; (f_t - tt).^2]; 15 | end 16 | 17 | cv_err = mean(CV); 18 | cv_std = sqrt(((N-1)/N)*sum((CV - cv_err).^2)); 19 | -------------------------------------------------------------------------------- /week2/cv_demo.m: -------------------------------------------------------------------------------- 1 | clear 2 | Range = 10; 3 | Nos_Samps = 50; 4 | Nd = 100; 5 | Max_Model_Order = 7; 6 | noise_var = 150; 7 | 8 | T=[]; 9 | Tt=[]; 10 | Tcv=[]; 11 | Tcvs=[]; 12 | 13 | x = Range*rand(Nos_Samps,1)-Range/2; 14 | 15 | f = 5*x.^3 - x.^2 + x; 16 | f_n = 5*x.^3 - x.^2 + x + noise_var*randn(size(x)); 17 | 18 | xt = (-Range/2:0.01:Range/2)'; 19 | tt = 5*xt.^3 - xt.^2 + xt; 20 | 21 | [i,j]=sort(x); 22 | 23 | X=x.^0; 24 | Xt=xt.^0; 25 | 26 | for k=1:Max_Model_Order 27 | X=[X x.^k]; 28 | Xt=[Xt xt.^k]; 29 | 30 | w_hat = inv(X'*X)*X'*f_n; 31 | f_hat = X*w_hat; 32 | f_test = Xt*w_hat; 33 | 34 | [cve, cvs] = cross_val(X, f_n); 35 | T = [T; mean((f_n - f_hat).^2)]; 36 | Tt = [Tt; mean((tt - f_test).^2)]; 37 | Tcv = [Tcv; cve]; 38 | Tcvs=[Tcvs;cvs]; 39 | 40 | plot(i,f(j),'-'); 41 | hold on 42 | plot(i,f_n(j),'.g') 43 | plot(i,f_hat(j),'-r') 44 | hold off 45 | pause(1) 46 | end 47 | 48 | figure 49 | plot(1:Max_Model_Order,T,'dr--'); 50 | hold; 51 | plot(1:Max_Model_Order,Tt,'og-'); 52 | plot(1:Max_Model_Order,Tcv,'ok-'); 53 | %errorbar(1:Max_Model_Order,Tcv,Tcvs,'sk-'); 54 | 55 | -------------------------------------------------------------------------------- /week2/laboratory_wk_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week2/laboratory_wk_2.pdf -------------------------------------------------------------------------------- /week2/long_jump_cv.m: -------------------------------------------------------------------------------- 1 | clear 2 | Max_Model_Order = 3; 3 | N_years = 20; 4 | T=[]; 5 | Tt=[]; 6 | Tcv=[]; 7 | Tcvs=[]; 8 | 9 | D=load('long_jump_data.txt'); 10 | x=D(1:N_years,1); 11 | t=D(1:N_years,2); 12 | 13 | xt=D(N_years+1:end,1); 14 | tt=D(N_years+1:end,2); 15 | 16 | X=x.^0; 17 | Xt=xt.^0; 18 | 19 | for k=1:Max_Model_Order 20 | X=[X x.^k]; 21 | Xt=[Xt xt.^k]; 22 | f_hat = X*inv(X'*X)*X'*t; 23 | ft_hat = Xt*inv(X'*X)*X'*t; 24 | [cve, cvs] = cross_val(X, t); 25 | T = [T; mean((t - f_hat).^2)]; 26 | Tt = [Tt; mean((tt - ft_hat).^2)]; 27 | Tcv = [Tcv; cve]; 28 | Tcvs = [Tcvs; cvs]; 29 | end 30 | subplot(121) 31 | plot(1:Max_Model_Order,T,'rd-'); 32 | hold on; 33 | plot(1:Max_Model_Order,Tcv,'gd-'); 34 | %errorbar(1:Max_Model_Order,Tcv,Tcvs,'sk-'); 35 | subplot(122) 36 | plot(1:Max_Model_Order,Tt,'ro-'); -------------------------------------------------------------------------------- /week2/wk_2_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week2/wk_2_1.pdf -------------------------------------------------------------------------------- /week2/wk_2_lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week2/wk_2_lect_1.pdf -------------------------------------------------------------------------------- /week2/wk_2_lect_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week2/wk_2_lect_2.pdf -------------------------------------------------------------------------------- /week3/brdemo.m: -------------------------------------------------------------------------------- 1 | N=25; 2 | xx=rand(N,1)-0.5; 3 | X=[xx.^2 xx]; 4 | w0=-0.5; 5 | w1=0.5; 6 | sigma=0.05; 7 | alpha = 1; 8 | 9 | f=X*[w1; w0]; 10 | t=f+sigma*randn(N,1); 11 | that = X*inv(X'*X + eye(2)*sigma/alpha)*X'*t; 12 | 13 | [x,y]=meshgrid(-1:0.05:1,-1:0.05:1); 14 | [n,n]=size(x); 15 | W=[reshape(x,n*n,1) reshape(y,n*n,1)]; 16 | mu = inv(X'*X + eye(2)*sigma/alpha)*X'*t; 17 | C = sigma*inv(X'*X + eye(2)*sigma/alpha); 18 | prior=reshape(gauss([0 0],eye(2)*alpha,W),[n n]); 19 | likelihood=reshape(gauss(t',eye(N)*sigma,W*X'),[n n]); 20 | posterior=reshape(gauss(mu',C, W),[n n]); 21 | 22 | figure 23 | subplot(2,2,1) 24 | contour(x,y,prior) 25 | hold 26 | plot(w1,w0,'o') 27 | subplot(2,2,2) 28 | contour(x,y,likelihood) 29 | hold 30 | plot(w1,w0,'o') 31 | subplot(2,2,3) 32 | contour(x,y,posterior); 33 | hold 34 | plot(w1,w0,'o') 35 | subplot(2,2,4) 36 | hold 37 | [X_sort,ind_sort]=sort(xx(:,1)); 38 | plot(xx(ind_sort,1),f(ind_sort)); 39 | plot(xx(ind_sort,1),t(ind_sort),'.'); 40 | plot(xx(ind_sort,1),that(ind_sort),'r'); 41 | -------------------------------------------------------------------------------- /week3/cross_val_wk3.m: -------------------------------------------------------------------------------- 1 | function [cv_err, cv_std] = cross_val_wk3(x,f,a,b,c) 2 | N = size(x,1); 3 | CV = []; 4 | 5 | for n=1:N 6 | X = x; 7 | t = f; 8 | X(n,:) = []; 9 | t(n) = []; 10 | Xt = x(n,:); 11 | tt = f(n); 12 | K=kernel_func(X,X,'gauss',c,c)'; 13 | Kt=kernel_func(X,Xt,'gauss',c,c)'; 14 | f_t = Kt*inv(K'*K + (a/b)*eye(N-1))*K'*t; 15 | CV = [CV; (f_t - tt).^2]; 16 | end 17 | 18 | cv_err = mean(CV); 19 | cv_std = ((N-1)/N)*sum((CV-cv_err).^2); 20 | -------------------------------------------------------------------------------- /week3/gauss.m: -------------------------------------------------------------------------------- 1 | function y = gauss(mu, covar, x) 2 | %GAUSS Evaluate a Gaussian distribution. 3 | % 4 | % Description 5 | % 6 | % Y = GAUSS(MU, COVAR, X) evaluates a multi-variate Gaussian density 7 | % in D-dimensions at a set of points given by the rows of the matrix X. 8 | % The Gaussian density has mean vector MU and covariance matrix COVAR. 9 | % 10 | % See also 11 | % GSAMP, DEMGAUSS 12 | % 13 | 14 | % Copyright (c) Ian T Nabney (1996-2001) 15 | 16 | [n, d] = size(x); 17 | 18 | [j, k] = size(covar); 19 | 20 | % Check that the covariance matrix is the correct dimension 21 | if ((j ~= d) | (k ~=d)) 22 | error('Dimension of the covariance matrix and data should match'); 23 | end 24 | 25 | invcov = inv(covar); 26 | mu = reshape(mu, 1, d); % Ensure that mu is a row vector 27 | 28 | x = x - ones(n, 1)*mu; 29 | fact = sum(((x*invcov).*x), 2); 30 | 31 | y = exp(-0.5*fact); 32 | 33 | y = y./sqrt((2*pi)^d*det(covar)); 34 | -------------------------------------------------------------------------------- /week3/kernel_func.m: -------------------------------------------------------------------------------- 1 | function K = kernel_func(X1,X2,kernel_,T,p) 2 | 3 | [N1 d] = size(X1); 4 | [N2 d] = size(X2); 5 | 6 | switch kernel_ 7 | 8 | case 'gauss', 9 | K = exp(-distSqrd(X1,X2,T)); 10 | 11 | case 'poly', 12 | K = (1+X1*T*X2').^p; 13 | end 14 | 15 | function D2=distSqrd(X,Y,T) 16 | nx = size(X,1); 17 | ny = size(Y,1); 18 | 19 | D2 = sum((X.^2)*T,2)*ones(1,ny) + ones(nx,1)*sum((Y.^2)*T,2)' - 2*(X*T*Y'); -------------------------------------------------------------------------------- /week3/laboratory_wk_3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week3/laboratory_wk_3.pdf -------------------------------------------------------------------------------- /week3/max_like_demo.m: -------------------------------------------------------------------------------- 1 | clear 2 | Range = 10; 3 | Max_Model_Order = 10; 4 | noise_var = 100; 5 | 6 | L=[]; 7 | x = [-Range/2:0.2:Range/2]'; 8 | N=size(x,1); 9 | f = 5*x.^3 - x.^2 + x; 10 | f_n = f + noise_var*randn(size(x)); 11 | 12 | [i,j]=sort(x); 13 | X=x.^0; 14 | 15 | for k=1:Max_Model_Order 16 | X=[X x.^k]; 17 | w_hat = inv(X'*X)*X'*f_n; 18 | f_hat = X*w_hat; 19 | sigma_hat = mean((f_n - f_hat).^2); 20 | sigma = sigma_hat*diag(X*inv(X'*X)*X'); 21 | 22 | L = [L; -N*log(sqrt(sigma_hat)) - 0.5*N*(1 + log(2*pi))]; 23 | figure 24 | plot(i,f(j),'b'); 25 | hold on 26 | plot(i,f_n(j),'.k','MarkerSize',15) 27 | errorbar(i,f_hat(j),sigma(j),'-r.') 28 | hold off 29 | pause(1) 30 | end 31 | 32 | figure 33 | plot(1:Max_Model_Order,L,'dr--'); 34 | title('Maximum Likelihood '); 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /week3/regdemo.m: -------------------------------------------------------------------------------- 1 | clear 2 | N=25; 3 | xx=rand(N,1)-0.5; 4 | xt=[-1:0.01:1]'; 5 | Nt=size(xt,1); 6 | 7 | L=[]; 8 | Lt=[]; 9 | 10 | X=[xx.^2 xx]; 11 | Xt=[xt.^2 xt]; 12 | 13 | w0=-0.5; 14 | w1=0.5; 15 | sigma=0.05; 16 | 17 | f=X*[w1; w0]; 18 | ft=Xt*[w1;w0]; 19 | 20 | t=f+sigma*randn(N,1); 21 | 22 | for alpha = 10:-0.1:0.1 23 | w_hat = inv(X'*X + eye(2)*sigma/alpha)*X'*t; 24 | 25 | likelihood=gauss(t',eye(N)*sigma,w_hat'*X'); 26 | test_likelihood=gauss(ft',eye(Nt)*sigma,w_hat'*Xt'); 27 | L=[L;likelihood]; 28 | Lt=[Lt;test_likelihood]; 29 | end 30 | figure 31 | subplot(211) 32 | plot(10:-0.1:0.1,L) 33 | subplot(212) 34 | plot(10:-0.1:0.1,Lt,'r') 35 | 36 | 37 | -------------------------------------------------------------------------------- /week3/wk3_lab_1_sol.m: -------------------------------------------------------------------------------- 1 | clear 2 | Max_Model_Order = 1; 3 | Range = 20; 4 | N=30; 5 | sigma = 0.25; 6 | xt = [-Range/2:0.1:Range/2]'; 7 | xt(find(xt == 0))=[]; 8 | ft = sin(xt)./xt; 9 | x = Range.*rand(N,1) - Range/2; 10 | f = sin(x)./x; 11 | [i,j] = sort(x); 12 | e = sigma*randn(N,1); 13 | t = f + e; 14 | 15 | alpha = 100; 16 | k=0.15; 17 | X=kernel_func(x,x,'gauss',k,k)'; 18 | Xt=kernel_func(x,xt,'gauss',k,k)'; 19 | pos_cov = sigma*inv(X'*X + (sigma/alpha)*eye(N)); 20 | mu = pos_cov*X'*t./sigma; 21 | pred_mean = Xt*mu; 22 | pred_cov = diag(Xt*pos_cov*Xt'); 23 | 24 | %plot(i,f(j),'-'); 25 | hold on 26 | plot(i,t(j),'k.'); 27 | plot(xt,ft) 28 | plot(xt,pred_mean,'r') 29 | plot(xt,pred_mean+sqrt(pred_cov),'r'); 30 | plot(xt,pred_mean-sqrt(pred_cov),'r'); 31 | -------------------------------------------------------------------------------- /week3/wk_3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week3/wk_3.pdf -------------------------------------------------------------------------------- /week3/wk_3_lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week3/wk_3_lect_1.pdf -------------------------------------------------------------------------------- /week3/wk_3_lect_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week3/wk_3_lect_2.pdf -------------------------------------------------------------------------------- /week4/20news_w100.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week4/20news_w100.mat -------------------------------------------------------------------------------- /week4/Course_Work_2006.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week4/Course_Work_2006.pdf -------------------------------------------------------------------------------- /week4/laboratory_wk_4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week4/laboratory_wk_4.pdf -------------------------------------------------------------------------------- /week4/laplace_demo.m: -------------------------------------------------------------------------------- 1 | clear 2 | %A small constant used to avoid log of zero problems 3 | SMALL_NOS = 1e-200; 4 | 5 | %Define Sample size and data dimensionality 6 | N=30; 7 | D=2; 8 | 9 | %Limits and grid size for contour plotting 10 | Range=8; 11 | Step=0.1; 12 | 13 | %Two classes will have 2 features distributed each with a 2-Gaussian 14 | %with means mu1 & mu2 and isotropic covariances 15 | mu1=[ones(N,1) 5*ones(N,1)]; 16 | mu2=[-5*ones(N,1) 1*ones(N,1)]; 17 | class1_std = 1; 18 | class2_std = 1.1; 19 | 20 | %generate class features and target labels 21 | X = [class1_std*randn(N,2)+mu1;2*class2_std*randn(N,2)+mu2]; 22 | t = [ones(N,1);zeros(N,1)]; 23 | 24 | %Variance of prior 25 | alpha=100; 26 | 27 | %Define contour grid of weighting coefficients w1 & w2 for 28 | %logistic regression model 29 | [w1,w2]=meshgrid(-Range:Step:Range,-Range:Step:Range); 30 | [n,n]=size(w1); 31 | W=[reshape(w1,n*n,1) reshape(w2,n*n,1)]; 32 | 33 | %Compute the log-prior and likelihood and joint likelihood 34 | %at each point on grid defined above 35 | f=W*X'; 36 | Log_Prior = log(gauss(zeros(1,D),eye(D).*alpha,W)); 37 | Log_Like = W*X'*t - sum(log(1+exp(f)),2); 38 | Log_Joint = Log_Like + Log_Prior; 39 | 40 | %display contours of log-prio, likelihood and joint 41 | figure 42 | subplot(131) 43 | contour(w1,w2,reshape(-Log_Prior,[n,n]),30); 44 | title('Log-Prior'); 45 | subplot(132) 46 | contour(w1,w2,reshape(-Log_Like,[n,n]),30); 47 | title('Log-Likelihood'); 48 | subplot(133) 49 | contour(w1,w2,reshape(-Log_Joint,[n,n]),30); 50 | title('Log-Unnormalised Posterior') 51 | hold 52 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 53 | 54 | %Identify the parameters w1 & w2 which maximise the posterior (joint) 55 | [i,j]=max(Log_Joint); 56 | plot(W(j,1),W(j,2),'.','MarkerSize',40); 57 | 58 | %Newton routine to find MAP values of w1 & w2 59 | %Fix number of steps to 10 and initial estimate to w1=0, w2=0 60 | N_Steps = 10; 61 | w = [0;0]; 62 | 63 | for m=1:N_Steps 64 | %store updated parameter values and plot evolution of estimates 65 | ww(m,:) = w; 66 | plot(ww(:,1),ww(:,2),'k.-'); 67 | drawnow 68 | pause(0.1) 69 | 70 | %Newton Step 71 | P = 1./(1 + exp(-X*w)); 72 | A = diag(P.*(1-P)); 73 | H = inv(X'*A*X + eye(D)./alpha); 74 | w = H*X'*(A*X*w + t - P); 75 | 76 | %Compute new likelihood and unormalised posterior values 77 | f=X*w; 78 | lpr = log(gauss(zeros(1,D),eye(D).*alpha,w')); 79 | llk = f'*t - sum(log(1+exp(f))); 80 | ljt = llk + lpr; 81 | fprintf('Log-Likelihood = %f, Joint-Likelihood = %f\n',llk,ljt) 82 | end 83 | 84 | fprintf('Maximum of Joint Likelihood = (%f, %f)\n',W(j,1),W(j,2)); 85 | fprintf('Estimate of Maximum of Joint Likelihood = (%f, %f)\n',w(1),w(2)); 86 | 87 | %plot the data points from both classes and show the contour of P(C=1|x) 88 | figure 89 | Posterior = 1./(1+exp(-W*w)); 90 | contour(w1,w2,reshape(Posterior,[n,n]),30); 91 | hold on 92 | plot(X(find(t==1),1),X(find(t==1),2),'r.'); 93 | plot(X(find(t==0),1),X(find(t==0),2),'bo'); 94 | 95 | %plot the data points and the values of P(C=1|x)=0.5 i.e. the separating 96 | %plane which distinguishes both classes 97 | figure 98 | contour(w1,w2,reshape(Posterior,[n,n]),[0.5 0.5]); 99 | hold on 100 | plot(X(find(t==1),1),X(find(t==1),2),'r.'); 101 | plot(X(find(t==0),1),X(find(t==0),2),'bo'); 102 | 103 | %Compute the Laplace Approximation 104 | 105 | %Numerically compute normalising constant for posterior by 106 | %drawing 10000 samples from the prior 107 | Nsamps = 10000; 108 | Wsamp = alpha*randn(Nsamps,D); 109 | f=Wsamp*X'; 110 | pr = gauss(zeros(1,D),eye(D).*alpha,Wsamp); 111 | lk = exp(Wsamp*X'*t - sum(log(1+exp(f)),2)); 112 | Z = sum(lk.*pr); 113 | 114 | %Show contour plots of the Posterior and the Laplace approximation to the 115 | %posterior 116 | Log_Laplace_Posterior = log(gauss(w',H,W)+SMALL_NOS); %to prevent zero-log 117 | figure 118 | subplot(121) 119 | contour(w1,w2,reshape(-Log_Joint + log(Z),[n,n]),30); 120 | hold; 121 | plot(W(j,1),W(j,2),'.','MarkerSize',40); 122 | title('Log Posterior') 123 | subplot(122) 124 | contour(w1,w2,reshape(-Log_Laplace_Posterior,[n,n]),30); 125 | hold 126 | plot(W(j,1),W(j,2),'.','MarkerSize',40); 127 | title('Laplace Approximation to Posterior') 128 | 129 | 130 | -------------------------------------------------------------------------------- /week4/logistic_classification_demo.m: -------------------------------------------------------------------------------- 1 | clear 2 | %A small constant used to avoid log of zero problems 3 | SMALL_NOS = 1e-200; 4 | 5 | %Two hyperparameters of model 6 | Polynomial_Order = 3; 7 | alpha=100; 8 | 9 | %Load and prepare train & test data 10 | X=load('rip_dat_tr.txt'); 11 | Xt=load('rip_dat_te.txt'); 12 | t=X(:,3); 13 | X(:,3)=[]; 14 | tt=Xt(:,3); 15 | Xt(:,3)=[]; 16 | 17 | %Plot two classes in train set 18 | subplot(221) 19 | plot(X(find(t==1),1),X(find(t==1),2),'r.'); 20 | hold 21 | plot(X(find(t==0),1),X(find(t==0),2),'o'); 22 | title('Scatter Plot of Data from Classes'); 23 | fprintf('Two overlapping Non-Gaussian Class Distributions\n') 24 | fprintf('Hit Enter to Continue....'); 25 | pause; 26 | 27 | %Limits and grid size for contour plotting 28 | Range=1.3; 29 | Step=0.1; 30 | %Define contour grid 31 | [w1,w2]=meshgrid(-Range:Step:Range,-Range:Step:Range); 32 | [n,n]=size(w1); 33 | W=[reshape(w1,n*n,1) reshape(w2,n*n,1)]; 34 | 35 | %Create Polynomial Basis 36 | XX = []; XXt = []; WW = []; 37 | for i = 0:Polynomial_Order 38 | XX = [XX X.^i]; 39 | XXt = [XXt Xt.^i]; 40 | WW = [WW W.^i]; 41 | end 42 | [N,D] = size(XX); 43 | Nt = size(XXt,1); 44 | 45 | %Newton routine to find MAP values of w 46 | %Fix number of steps to 20 and initial estimate to w=0 47 | N_Steps = 10; 48 | w = zeros(D,1); 49 | 50 | for m=1:N_Steps 51 | %Newton Step 52 | P = 1./(1 + exp(-XX*w)); 53 | A = diag(P.*(1-P)); 54 | H = inv(XX'*A*XX + eye(D)./alpha); 55 | w = H*XX'*(A*XX*w + t - P); 56 | 57 | %Compute new likelihood and unormalised posterior values 58 | f=XX*w; % train 59 | ft=XXt*w; %test 60 | lpr = log(gauss(zeros(1,D),eye(D).*alpha,w')); 61 | llk = f'*t - sum(log(1+exp(f))); %training likelihood 62 | ljt = llk + lpr; 63 | fprintf('Log-Likelihood = %f, Joint-Likelihood = %f\n',llk,ljt) 64 | end 65 | 66 | %Compute Overall performance 67 | %Train 68 | Train_Like = llk; 69 | Test_Like = ft'*tt - sum(log(1+exp(ft))); 70 | Train_Error = 100 - 100*sum( (1./(1+exp(-XX*w)) > 0.5) == t)/N; %number of miss-classifications 71 | Test_Error = 100 - 100*sum( (1./(1+exp(-XXt*w)) > 0.5) == tt)/Nt; 72 | fprintf('\n\nClassifier Performance Statistics using MAP Value\n'); 73 | fprintf('Training Likelihood = %f, Training 0-1 Error = %f\n',Train_Like,Train_Error); 74 | fprintf('Test Likelihood = %f, Test 0-1 Error = %f\n',Test_Like,Test_Error); 75 | 76 | %plot the data points from both classes and show the contour of P(C=1|x) 77 | subplot(222) 78 | Posterior = 1./(1+exp(-WW*w)); 79 | contour(w1,w2,reshape(Posterior,[n,n])); 80 | hold on 81 | plot(X(find(t==1),1),X(find(t==1),2),'r.'); 82 | plot(X(find(t==0),1),X(find(t==0),2),'bo'); 83 | title('Contour of Posterior P(C=1|x)'); 84 | 85 | %plot the data points and the values of P(C=1|x)=0.5 i.e. the separating 86 | %plane which distinguishes both classes 87 | subplot(223) 88 | contour(w1,w2,reshape(Posterior,[n,n]),[0.5 0.5]); 89 | hold on 90 | plot(X(find(t==1),1),X(find(t==1),2),'r.'); 91 | plot(X(find(t==0),1),X(find(t==0),2),'bo'); 92 | title('Decision Boundary P(C=1|x) = 0.5') 93 | 94 | subplot(224) 95 | bar(w./sqrt(diag(H))) 96 | title('Hessian Normalised Basis Weighting Coefficients') 97 | 98 | 99 | 100 | 101 | -------------------------------------------------------------------------------- /week4/naive_bayes_binary.m: -------------------------------------------------------------------------------- 1 | %Demo of Naive Bayes Classifier for binary data 2 | %using the 4 class subset of 20newsgroups with 100 terms 3 | clear 4 | load 20news_w100; 5 | X = documents'; 6 | t = full(newsgroups'); 7 | 8 | Nclasses = full(max(t)); 9 | train_test_frac = 0.60; 10 | [N,D] = size(X); 11 | 12 | I = randperm(N); 13 | Ntrain = floor(N*train_test_frac); 14 | Ntest = N - floor(N*train_test_frac); 15 | 16 | fprintf('Random Partition of %d Train Samples, %d Test Samples\n', Ntrain, Ntest); 17 | 18 | Xtrain = X(I(1:Ntrain),:); 19 | ttrain = t(I(1:Ntrain)); 20 | 21 | Xtest = X(I(Ntrain+1:end),:); 22 | ttest = t(I(Ntrain+1:end)); 23 | 24 | %Naive Bayes Classifier for general Nclasses 25 | for c=1:Nclasses 26 | Prior(c) = mean((ttrain==c)); 27 | p(c,:) = (sum(Xtrain(find(ttrain==c),:)) + 1)./(length(find(ttrain==c)) + 2); 28 | Log_Like = sum((Xtest.*log(repmat(p(c,:),Ntest,1))),2) +... 29 | sum((1-Xtest).*log((1-repmat(p(c,:),Ntest,1))),2); 30 | Log_Posterior(:,c) = Log_Like + log(Prior(c)).*ones(Ntest,1); 31 | end 32 | 33 | Posterior = exp(Log_Posterior); 34 | Posterior = Posterior./repmat(sum(Posterior,2),1,Nclasses); 35 | 36 | [i,j]=sort(ttest); 37 | for k=1:Nclasses 38 | subplot(Nclasses,1,k) 39 | plot(Posterior(j,k)); 40 | title(sprintf('Predictive Posterior %s for News Groups',groupnames{k})); 41 | fprintf('Class %d = %s\n', k, groupnames{k}); 42 | end 43 | 44 | [max_post, t_pred] = max(Posterior,[],2); 45 | fprintf('Percentage Predictions Correct %f\n',100*mean(t_pred == ttest)); 46 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 47 | % Some fun stuff 48 | % List the top ten most probable words in each class 49 | MaxI = 5; 50 | for k=1:Nclasses 51 | [most_probable_words,index_mpw]=sort( -p(k,:) ); 52 | fprintf('Most probable words in Class %s\n',groupnames{k}); 53 | wordlist(index_mpw(1:MaxI))' 54 | pause 55 | fprintf('Hit enter to continue...\n\n'); 56 | end 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | -------------------------------------------------------------------------------- /week4/rip_dat_te.txt: -------------------------------------------------------------------------------- 1 | -0.970990139 0.429424950 0 2 | -0.631997027 0.251952852 0 3 | -0.773605760 0.690750778 0 4 | -0.606211523 0.175677956 0 5 | -0.539409005 0.376744239 0 6 | -0.960325850 0.110040710 0 7 | -1.041375608 0.328508085 0 8 | -0.822600536 0.175874200 0 9 | -0.943714771 -0.180633309 0 10 | -0.968763299 0.296070217 0 11 | -0.853637980 0.644010559 0 12 | -0.771994930 0.476344773 0 13 | -0.718952712 0.090457675 0 14 | -0.539520701 0.447837856 0 15 | -0.540093447 0.551067215 0 16 | -0.792923186 0.531235891 0 17 | -0.861472850 0.287352652 0 18 | -0.470131571 0.544251260 0 19 | -0.770683778 0.482733051 0 20 | -0.803031230 0.228632039 0 21 | -0.962520756 0.367759881 0 22 | -0.681960494 0.495354977 0 23 | -0.433007837 0.213645636 0 24 | -0.336831640 0.293614869 0 25 | -0.696425307 0.315194495 0 26 | -0.355766886 0.269794553 0 27 | -0.547898136 0.277054714 0 28 | -0.799663889 0.292931173 0 29 | -0.780012402 0.038437662 0 30 | -0.853938355 0.198423604 0 31 | -0.896295454 0.286916469 0 32 | -0.824028270 0.295231859 0 33 | -0.901075546 0.321018371 0 34 | -0.556718720 0.358145252 0 35 | -0.871004652 0.258992681 0 36 | -0.800820459 0.363123198 0 37 | -0.699003238 0.417050087 0 38 | -0.759409251 0.366156047 0 39 | -0.775268090 0.306716684 0 40 | -0.893576947 -0.096908084 0 41 | -0.284857192 0.307321395 0 42 | -0.665571750 0.365820514 0 43 | -0.741374392 0.298498149 0 44 | -0.767733049 0.245811163 0 45 | -0.779306345 0.319092986 0 46 | -0.892190952 0.201459901 0 47 | -0.122811626 0.516497113 0 48 | -0.731730651 0.055992550 0 49 | -1.011976425 0.344692082 0 50 | -0.573762197 0.059676643 0 51 | -0.641425285 0.333730563 0 52 | -0.985902178 0.162020997 0 53 | -0.661140507 0.136840396 0 54 | -0.749218489 0.185148533 0 55 | -0.540329548 0.387396621 0 56 | -0.592092859 0.447510299 0 57 | -0.860077357 0.218917745 0 58 | -0.867516891 -0.137491677 0 59 | -0.590055695 0.466004783 0 60 | -0.775966325 0.403399745 0 61 | -0.849687489 0.315466589 0 62 | -0.746283040 0.256242513 0 63 | -0.700854929 0.518361424 0 64 | -0.923680439 0.449453255 0 65 | -0.912092992 0.407980138 0 66 | -0.650765709 0.412200546 0 67 | -0.980330108 0.299281948 0 68 | -0.744408938 0.203087089 0 69 | -0.604170665 0.326156917 0 70 | -0.735903002 0.655288145 0 71 | -0.643607616 0.513819006 0 72 | -0.963376987 0.249000843 0 73 | -0.426980732 0.282178155 0 74 | -0.654762824 0.562181098 0 75 | -0.843491783 0.345421521 0 76 | -0.553968009 0.538960351 0 77 | -0.716946447 0.122102049 0 78 | -0.775328790 0.498892271 0 79 | -0.640289822 0.435762487 0 80 | -0.516878864 0.182337108 0 81 | -0.952125366 0.298280511 0 82 | -0.723017513 0.256182935 0 83 | -0.658805240 0.269147489 0 84 | -0.464552773 0.218324319 0 85 | -0.564517221 0.196511498 0 86 | -0.814096964 0.228304066 0 87 | -0.396184143 0.511765539 0 88 | -0.996637001 0.209223029 0 89 | -0.815950989 0.235966820 0 90 | -0.526626592 0.418687316 0 91 | -0.667763995 0.428833798 0 92 | -0.658898181 0.031828081 0 93 | -0.923935948 0.530254142 0 94 | -0.909973792 0.451785093 0 95 | -0.410551229 0.252159645 0 96 | -0.462064440 0.230673805 0 97 | -0.366146922 -0.036140226 0 98 | -0.595861370 0.400288539 0 99 | -0.704392096 0.238984335 0 100 | -0.841225771 0.577095745 0 101 | -0.969828933 0.155360193 0 102 | -0.557037265 0.314190393 0 103 | -0.671104208 0.361767035 0 104 | -0.503286446 0.566417412 0 105 | -0.950325858 0.078493347 0 106 | -0.675813120 0.319308250 0 107 | -0.831561973 0.143581661 0 108 | -0.435074090 0.492855894 0 109 | -0.793021028 0.118140919 0 110 | -0.848627588 0.082762982 0 111 | -0.820269797 0.395714263 0 112 | -0.422092727 0.477760711 0 113 | -0.408676218 0.374918252 0 114 | -0.546953839 0.473748255 0 115 | -0.735444130 0.266138774 0 116 | -0.582205470 0.271991191 0 117 | -0.338346632 0.242426860 0 118 | -0.535045557 0.118043648 0 119 | -0.493743519 0.717856305 0 120 | -0.760932705 0.416245530 0 121 | -0.515677444 0.184242721 0 122 | -0.673504588 0.296239478 0 123 | -0.459705697 0.186931282 0 124 | -0.694881314 0.381840980 0 125 | -0.387447545 0.080890693 0 126 | -0.596036129 0.184974829 0 127 | -0.664372536 0.423940859 0 128 | -0.883742635 0.614943083 0 129 | -0.509344933 0.290033636 0 130 | -0.925124882 0.604748154 0 131 | -0.841007867 0.290327096 0 132 | -0.894120137 0.157169952 0 133 | -0.646573229 0.609447746 0 134 | -1.017873059 0.148721295 0 135 | -0.582528753 0.184940557 0 136 | -0.897329196 0.532091737 0 137 | -0.465016860 0.285520226 0 138 | -0.726508681 0.181867205 0 139 | -0.514352969 0.156961029 0 140 | -0.739246011 0.408845252 0 141 | -0.537049319 0.307417180 0 142 | -0.923407832 0.492249753 0 143 | -0.663217181 0.241275721 0 144 | -0.871900824 0.191786697 0 145 | -0.574764695 0.216699985 0 146 | -0.778723382 0.417127421 0 147 | -0.717491428 0.169911784 0 148 | -0.293985190 0.341692708 0 149 | -0.732183039 0.611673182 0 150 | -0.672451661 0.290330390 0 151 | -0.392906014 0.314507904 0 152 | -0.821496561 0.383502471 0 153 | -0.441649840 0.131552989 0 154 | -0.734149425 0.138366727 0 155 | -0.353467324 0.403725989 0 156 | -0.756729286 0.140926608 0 157 | -0.985271855 0.307051129 0 158 | -0.734362749 0.131915653 0 159 | -0.843814454 0.508797861 0 160 | -0.871470989 0.409534472 0 161 | -0.643774042 0.386072579 0 162 | -0.617659001 0.067340392 0 163 | -0.282068649 0.693923139 0 164 | -0.402555368 0.204385656 0 165 | -0.458583969 0.420739380 0 166 | -0.846296983 0.277152491 0 167 | -1.048542317 0.338822747 0 168 | -0.799795307 0.309430762 0 169 | -0.852040552 0.307281614 0 170 | -0.616474678 0.252952510 0 171 | -0.691690351 0.272750414 0 172 | -0.809142202 0.441901584 0 173 | -0.837139722 0.269171931 0 174 | -0.743520251 0.247417602 0 175 | -0.660650230 -0.028489077 0 176 | -0.594815839 0.109164679 0 177 | -0.597128033 -0.037465241 0 178 | -0.921420258 -0.069844290 0 179 | -0.877566913 0.304297059 0 180 | -0.765371773 0.596974416 0 181 | -0.699840550 0.167126769 0 182 | -0.523434825 -0.064742897 0 183 | -0.656387744 0.012460495 0 184 | -1.036967640 0.141450813 0 185 | -0.715165192 0.217239838 0 186 | -0.747858131 0.569994813 0 187 | -0.625684541 0.320122450 0 188 | -0.756699924 0.174518616 0 189 | -0.679690670 0.438410861 0 190 | -0.612004202 -0.134269826 0 191 | -0.647906789 0.239638558 0 192 | -0.691066413 0.255635309 0 193 | -0.675112764 0.550169559 0 194 | -0.851072790 0.474955936 0 195 | -0.837051482 0.408050507 0 196 | -0.961405831 0.588207922 0 197 | -0.642774716 0.163487304 0 198 | -0.892075711 0.064132978 0 199 | -0.927798777 0.072240031 0 200 | -0.751800726 0.409258566 0 201 | -0.805341030 0.064157327 0 202 | -0.692838235 0.171715163 0 203 | -0.703943931 0.476730183 0 204 | -0.694804098 0.268655402 0 205 | -0.567758798 0.207116645 0 206 | -0.822380000 0.268404036 0 207 | -0.565082539 0.327015498 0 208 | -0.724181702 0.625763803 0 209 | -0.916357511 0.236124996 0 210 | -0.430182548 0.268033748 0 211 | -0.632645741 0.522382761 0 212 | -0.850972862 0.345168936 0 213 | -0.609691020 0.501872186 0 214 | -0.705661024 0.220694983 0 215 | -0.693161871 0.100244402 0 216 | -0.633922642 0.390701059 0 217 | -0.710406768 0.015180240 0 218 | -1.055052036 0.517833140 0 219 | -0.621276063 0.167382599 0 220 | -0.613423246 0.266134950 0 221 | -0.989565379 0.166693580 0 222 | -0.923580375 0.412606504 0 223 | -0.889581095 0.426760653 0 224 | -0.930040388 0.240533824 0 225 | -0.691421356 0.006339557 0 226 | -1.031412255 0.482277646 0 227 | -0.701394895 0.462356010 0 228 | -0.627721178 0.243813111 0 229 | -0.829380326 0.487867261 0 230 | -0.612200851 0.121715064 0 231 | -0.528139634 0.449962538 0 232 | -0.616674472 0.058254182 0 233 | -0.649202842 0.263909873 0 234 | -0.655384302 0.225793561 0 235 | -0.750085240 0.119545244 0 236 | -0.471920626 0.278830975 0 237 | -0.219905912 0.315052974 0 238 | -0.871701260 0.240570129 0 239 | -0.730197977 0.295504781 0 240 | -0.620676222 0.046383576 0 241 | -0.657830687 0.265899761 0 242 | -0.475352116 0.279850946 0 243 | -0.734794644 0.365235616 0 244 | -0.772673638 0.355477724 0 245 | -0.620710470 0.770796635 0 246 | -0.529626406 0.091067609 0 247 | -0.730846476 0.642803364 0 248 | -0.938694493 0.324275071 0 249 | -0.723706354 -0.017999841 0 250 | -0.979569099 -0.003034376 0 251 | 0.448754392 0.015050386 0 252 | -0.077907282 0.245842052 0 253 | 0.316786631 0.252917817 0 254 | 0.229597046 0.067681573 0 255 | 0.197949376 0.310003887 0 256 | 0.048404642 -0.037865268 0 257 | 0.270601003 0.260199166 0 258 | 0.516192043 0.258256258 0 259 | 0.154718993 0.040306842 0 260 | -0.005611276 0.223658499 0 261 | 0.365076313 -0.001956641 0 262 | 0.086615547 0.138482814 0 263 | 0.198645891 0.047611642 0 264 | 0.131870660 0.402255360 0 265 | 0.585894768 0.433203159 0 266 | -0.023498655 0.379919943 0 267 | 0.394174061 0.533936878 0 268 | 0.595983773 0.680516952 0 269 | 0.388419733 0.321931614 0 270 | 0.270452263 0.360309566 0 271 | 0.336909893 0.176262915 0 272 | 0.481432232 0.326027716 0 273 | 0.246865240 0.532700400 0 274 | -0.020439631 0.132155124 0 275 | 0.389941424 0.309223343 0 276 | 0.048115168 0.104763308 0 277 | 0.284816331 -0.048775617 0 278 | 0.529166911 0.285314795 0 279 | 0.349208427 0.063167392 0 280 | 0.323888259 0.192358455 0 281 | 0.321213977 0.101190083 0 282 | 0.303365953 0.286689359 0 283 | -0.075979803 0.312196126 0 284 | 0.317894059 0.110578558 0 285 | 0.136145272 0.223509762 0 286 | 0.086777443 0.397316175 0 287 | 0.330555298 -0.018831347 0 288 | 0.202260475 0.212061643 0 289 | 0.276704436 0.541792424 0 290 | 0.244814590 -0.033434890 0 291 | 0.429043775 0.183967494 0 292 | 0.340412789 0.237474210 0 293 | 0.382064022 0.123295299 0 294 | 0.381833239 0.085809636 0 295 | 0.424417864 0.321954582 0 296 | 0.206306313 0.348957865 0 297 | 0.091614953 0.309132098 0 298 | 0.627597689 0.472188745 0 299 | 0.270244718 0.361936451 0 300 | 0.127928396 0.368238186 0 301 | 0.399192895 0.120050819 0 302 | 0.450618123 0.452328633 0 303 | 0.254900382 0.410220018 0 304 | 0.259523390 0.124427489 0 305 | 0.417004689 0.300805900 0 306 | 0.346581338 0.283479475 0 307 | 0.748854615 0.246812787 0 308 | 0.428530072 0.636260298 0 309 | 0.127369504 0.321732050 0 310 | 0.528722462 0.227075837 0 311 | 0.618168220 0.327309276 0 312 | 0.286029472 0.215643450 0 313 | 0.142578461 0.112955825 0 314 | 0.282764909 0.091628143 0 315 | 0.788220007 0.464545152 0 316 | 0.119165220 0.239567886 0 317 | 0.244772936 0.014906673 0 318 | 0.160442893 0.455259044 0 319 | 0.454067300 0.332582882 0 320 | -0.057868287 0.498675578 0 321 | -0.111365306 0.079756044 0 322 | 0.198824819 0.476017542 0 323 | 0.595468169 0.162120124 0 324 | 0.085627364 0.315262031 0 325 | 0.465261497 0.123331422 0 326 | 0.359673625 0.364504393 0 327 | 0.111822093 0.296370162 0 328 | 0.509269078 0.464037322 0 329 | 0.470888018 0.285556829 0 330 | 0.393262912 0.093782124 0 331 | 0.311897634 0.286626364 0 332 | 0.151594554 0.268411495 0 333 | 0.084423498 0.319282396 0 334 | 0.208641564 0.230226362 0 335 | 0.361230606 0.506867239 0 336 | 0.425667999 0.239049251 0 337 | 0.399549324 0.136827304 0 338 | 0.279615939 0.310402719 0 339 | 0.109049911 0.630255432 0 340 | 0.102929855 0.446152743 0 341 | 0.551085316 0.313983603 0 342 | 0.579201159 0.179353765 0 343 | 0.356514867 0.178396614 0 344 | 0.259861364 0.096917764 0 345 | 0.545480531 0.272730569 0 346 | 0.398789597 0.149343536 0 347 | 0.383441254 0.243298247 0 348 | 0.405415302 0.351024129 0 349 | 0.249091946 0.423059272 0 350 | 0.293535767 0.133960638 0 351 | 0.149869213 0.305675082 0 352 | 0.224986842 0.464864831 0 353 | 0.240826479 0.233973445 0 354 | 0.122917552 0.406179372 0 355 | 0.301231733 0.178773911 0 356 | 0.257698819 0.537312141 0 357 | 0.446288764 0.206483371 0 358 | 0.511214849 0.156330717 0 359 | 0.474675267 0.454212426 0 360 | 0.373402327 0.107531816 0 361 | 0.453575217 0.013564367 0 362 | 0.363708989 0.324209899 0 363 | 0.323172397 0.308234424 0 364 | 0.263568182 0.097321560 0 365 | 0.375989273 0.511128488 0 366 | 0.483416817 -0.027606822 0 367 | 0.412708967 0.353260156 0 368 | 0.294590710 0.338631607 0 369 | 0.148425126 0.313998286 0 370 | 0.476236614 0.009138517 0 371 | 0.051021769 0.518229423 0 372 | 0.488029582 0.492206314 0 373 | 0.193703118 0.356127440 0 374 | 0.390385684 0.402548715 0 375 | 0.166515062 0.077486533 0 376 | 0.378346001 0.205554127 0 377 | 0.059890677 0.615481812 0 378 | -0.077252668 0.325973024 0 379 | 0.519325984 0.352901733 0 380 | 0.271955420 0.031010063 0 381 | 0.027254987 0.289394991 0 382 | 0.437437673 -0.027210937 0 383 | 0.028370640 0.166304765 0 384 | 0.433657082 0.604909277 0 385 | 0.280505393 0.022916023 0 386 | 0.300735977 0.188023897 0 387 | 0.182031568 0.292354741 0 388 | 0.316158641 0.423973591 0 389 | 0.530601146 0.287109075 0 390 | 0.210237556 0.384357431 0 391 | 0.399444521 0.496882692 0 392 | 0.272113433 0.437262474 0 393 | 0.418146305 0.145521656 0 394 | 0.504825239 0.154106314 0 395 | 0.166974207 0.180641380 0 396 | 0.106527356 0.500370591 0 397 | 0.607348514 0.184680121 0 398 | 0.517847638 0.396858357 0 399 | 0.231553652 0.403086636 0 400 | 0.255029497 0.430592319 0 401 | 0.287511011 0.219412906 0 402 | 0.200852107 0.272097495 0 403 | 0.226547849 0.244596483 0 404 | 0.011878373 0.352803074 0 405 | 0.380569910 0.434089493 0 406 | 0.519215428 0.072764703 0 407 | 0.623854880 0.338983888 0 408 | 0.183173455 0.255322403 0 409 | 0.226420389 0.075341621 0 410 | 0.455356509 0.367957232 0 411 | 0.332301375 -0.011058516 0 412 | 0.376306021 0.188460770 0 413 | 0.428169526 0.054583036 0 414 | 0.145829529 0.368253163 0 415 | 0.493757540 0.376063674 0 416 | 0.529391969 0.074698658 0 417 | 0.409826160 0.280322788 0 418 | 0.612354746 0.120926664 0 419 | 0.221568084 0.273458368 0 420 | 0.427545649 0.106200846 0 421 | 0.533325611 0.591671136 0 422 | 0.462109537 0.357955560 0 423 | 0.182362120 0.298520960 0 424 | 0.310107790 0.301510248 0 425 | 0.159799550 0.257640193 0 426 | 0.254288145 0.374308080 0 427 | 0.316374077 0.029411804 0 428 | 0.285942260 0.338773678 0 429 | 0.552541865 -0.016858031 0 430 | -0.004090460 0.399012387 0 431 | 0.060484031 0.277592649 0 432 | 0.545097739 0.218461339 0 433 | 0.268284924 0.267903340 0 434 | 0.159022649 0.531382417 0 435 | 0.492658208 0.486286052 0 436 | -0.128240252 0.533333926 0 437 | 0.447760080 0.284865402 0 438 | 0.239374886 0.462386877 0 439 | 0.138634894 0.395550274 0 440 | 0.417284343 0.200022118 0 441 | 0.178303979 0.306720386 0 442 | 0.221552636 0.396534895 0 443 | -0.009120409 0.724738825 0 444 | 0.292748806 0.414432640 0 445 | 0.300563713 0.214325496 0 446 | 0.242506812 0.232690286 0 447 | 0.234494302 0.247006083 0 448 | 0.352550448 0.351581175 0 449 | 0.185994378 0.269914887 0 450 | 0.409680307 0.212370722 0 451 | 0.163919950 0.026130185 0 452 | 0.169756191 0.104358886 0 453 | 0.354398935 0.227524046 0 454 | 0.388870060 0.042378087 0 455 | 0.344788486 0.246053805 0 456 | 0.193145216 0.271352787 0 457 | 0.430800164 0.263193765 0 458 | 0.232808591 0.445516712 0 459 | 0.326059317 0.563886858 0 460 | 0.330837091 0.256040145 0 461 | 0.323691216 0.356872920 0 462 | 0.367737090 -0.088857683 0 463 | 0.530750561 0.327389964 0 464 | 0.089596372 0.338423910 0 465 | 0.432192982 0.394261493 0 466 | 0.186694048 0.438187113 0 467 | 0.458275145 0.324647633 0 468 | 0.480078071 0.374810492 0 469 | 0.582758378 0.390433695 0 470 | 0.437808065 0.389265557 0 471 | 0.208830936 0.010096493 0 472 | 0.377797466 0.474572076 0 473 | 0.183803076 -0.090083970 0 474 | 0.155682547 0.537563127 0 475 | 0.071926861 0.572783083 0 476 | 0.364435618 -0.123841713 0 477 | 0.408213991 0.254483065 0 478 | 0.466073956 0.398618252 0 479 | 0.614281743 0.283302172 0 480 | -0.047151673 0.214579449 0 481 | 0.326917150 0.468066389 0 482 | 0.458840582 0.443470083 0 483 | 0.109537926 0.189505910 0 484 | 0.161895892 0.123705078 0 485 | 0.450055408 0.501518844 0 486 | 0.368869484 0.557190529 0 487 | 0.334209119 0.413960488 0 488 | -0.031121068 0.228014456 0 489 | 0.176753850 0.430199990 0 490 | 0.552527788 0.224902508 0 491 | 0.304266409 0.220287210 0 492 | 0.210462653 0.415336683 0 493 | 0.063953710 0.045543235 0 494 | -0.063149684 0.351389125 0 495 | 0.073535710 0.252143534 0 496 | 0.665453703 0.203720086 0 497 | 0.539642761 0.279986737 0 498 | 0.250981585 0.069569958 0 499 | 0.392679888 0.090261998 0 500 | 0.431409216 0.288456378 0 501 | -0.516451834 0.501256111 1 502 | -0.116775286 0.483404773 1 503 | -0.327960793 0.546240228 1 504 | -0.394572192 0.755243715 1 505 | -0.110201988 0.553402230 1 506 | -0.160538577 0.579525838 1 507 | -0.124742465 0.323661757 1 508 | -0.109742769 0.696514698 1 509 | -0.687328305 0.807033124 1 510 | -0.358374262 0.807265743 1 511 | -0.335836520 0.392482381 1 512 | -0.321604223 0.591913273 1 513 | -0.091546228 0.562483354 1 514 | -0.660890881 0.611049023 1 515 | -0.561938441 0.907495412 1 516 | -0.244433911 0.451367292 1 517 | -0.392885460 0.550604753 1 518 | -0.429608736 0.644152661 1 519 | -0.090462865 0.522251590 1 520 | -0.436484641 0.520039359 1 521 | -0.519966218 0.940830736 1 522 | -0.418391404 1.011277424 1 523 | -0.405807798 0.738999068 1 524 | -0.085688384 0.847932361 1 525 | -0.210347223 0.416696729 1 526 | -0.531896660 0.452618557 1 527 | -0.294588066 0.846012850 1 528 | -0.092753982 0.693082777 1 529 | -0.314549926 0.797236706 1 530 | -0.262918395 0.787474678 1 531 | -0.389819133 0.579880509 1 532 | -0.162163174 0.315021403 1 533 | -0.418250429 0.684349895 1 534 | -0.356533257 0.896022491 1 535 | -0.461800168 0.782142975 1 536 | -0.149067005 0.837864969 1 537 | -0.376621128 0.553207248 1 538 | -0.235807559 0.642937572 1 539 | -0.433816383 0.568682995 1 540 | 0.003602461 0.804352974 1 541 | -0.286855152 0.710632583 1 542 | -0.424066790 0.994872459 1 543 | -0.270030002 0.833427152 1 544 | -0.239212386 0.378268423 1 545 | -0.255304685 0.822105360 1 546 | -0.196569409 0.703182679 1 547 | -0.125203354 0.844725933 1 548 | -0.338351441 0.680964321 1 549 | -0.383184405 0.839383812 1 550 | -0.398513962 0.750284450 1 551 | 0.027844709 0.537770177 1 552 | -0.295483256 0.846722230 1 553 | -0.552989277 0.794817114 1 554 | -0.004901838 0.608282407 1 555 | -0.029384352 0.614072912 1 556 | -0.444694587 0.779042878 1 557 | -0.338928122 0.789725990 1 558 | 0.122195503 0.784475027 1 559 | -0.186584991 0.560614872 1 560 | -0.295015658 0.840559001 1 561 | -0.102630670 0.675938267 1 562 | -0.430785693 0.645617846 1 563 | -0.099297566 0.894434898 1 564 | -0.009264193 1.012595196 1 565 | -0.560973647 0.807423104 1 566 | -0.536294204 0.529432752 1 567 | -0.563297476 0.646381268 1 568 | -0.292902091 0.620924549 1 569 | -0.107464304 0.615869773 1 570 | -0.261216307 0.699646352 1 571 | -0.105100716 0.868085863 1 572 | -0.362473095 0.683245848 1 573 | -0.548222187 0.726739882 1 574 | -0.522717054 0.636324411 1 575 | -0.406753361 0.858975870 1 576 | -0.272149948 1.009788333 1 577 | -0.058505372 0.722037722 1 578 | -0.286284031 0.564831018 1 579 | -0.145641743 0.527786275 1 580 | -0.254951568 0.909735133 1 581 | -0.200910922 0.911648155 1 582 | -0.397769966 0.398117280 1 583 | -0.547436085 0.779495789 1 584 | -0.231129177 0.491139768 1 585 | -0.473894736 0.682466158 1 586 | -0.231075189 0.453157246 1 587 | -0.268776826 0.676814477 1 588 | -0.180889587 0.880462410 1 589 | -0.326237906 0.599734095 1 590 | -0.252657163 0.575832499 1 591 | -0.294967226 0.707617098 1 592 | -0.441714737 0.649258390 1 593 | -0.434336942 0.859634714 1 594 | -0.080950672 0.608362742 1 595 | -0.256056671 0.465280126 1 596 | -0.767972482 0.818894418 1 597 | -0.250929687 0.807765177 1 598 | -0.233531508 0.536107452 1 599 | -0.166252171 0.578022234 1 600 | -0.399389870 0.961981117 1 601 | -0.383257048 0.918196737 1 602 | -0.246208261 0.728269018 1 603 | -0.112873567 0.825689335 1 604 | -0.096666032 0.707306804 1 605 | -0.457949369 0.704015342 1 606 | -0.255003562 0.504258034 1 607 | -0.073434667 0.722783609 1 608 | -0.409375468 0.526062925 1 609 | -0.363348126 0.881713044 1 610 | -0.257217769 0.607597755 1 611 | -0.349331300 0.703112332 1 612 | -0.151880213 0.492886000 1 613 | -0.404171363 0.737139545 1 614 | -0.462320910 0.423673110 1 615 | -0.546143281 0.835222198 1 616 | -0.229962943 0.611218821 1 617 | -0.246561278 0.550748181 1 618 | -0.392635644 0.396901704 1 619 | -0.175983074 0.659236133 1 620 | -0.160444346 0.856989440 1 621 | -0.341235994 0.536421185 1 622 | -0.333233675 0.558945553 1 623 | -0.274226030 0.677337101 1 624 | -0.394217634 1.084965709 1 625 | -0.177110920 1.174990894 1 626 | -0.403972304 0.705580257 1 627 | -0.387046408 0.654499407 1 628 | -0.044038573 0.753839485 1 629 | -0.278389636 0.349432166 1 630 | -0.272249470 0.234622985 1 631 | -0.191592271 0.380898603 1 632 | -0.590368203 0.698331693 1 633 | -0.374188840 0.819242381 1 634 | -0.351703587 0.730361507 1 635 | -0.281959049 0.469288157 1 636 | -0.751945036 0.885219702 1 637 | -0.306929899 0.574182522 1 638 | -0.762727447 0.890352701 1 639 | -0.564448380 0.729602705 1 640 | 0.040323664 0.779572618 1 641 | -0.462188702 0.998868915 1 642 | -0.447915766 0.843500207 1 643 | -0.217001799 0.796623800 1 644 | -0.112509220 0.611900551 1 645 | -0.131149777 0.948975611 1 646 | -0.403054671 0.786868546 1 647 | 0.008848708 0.652933806 1 648 | 0.090647590 0.654317764 1 649 | -0.358620932 0.936462477 1 650 | -0.441265488 0.326283245 1 651 | -0.479842420 0.788087594 1 652 | -0.588843824 0.648214630 1 653 | -0.562606783 0.754763105 1 654 | -0.514270007 0.324312047 1 655 | -0.392905106 0.821041597 1 656 | -0.075132059 0.685702990 1 657 | -0.196830870 0.714112820 1 658 | -0.301481674 0.552313534 1 659 | -0.181585205 0.659988770 1 660 | -0.114373131 0.736877415 1 661 | -0.331936585 0.440209520 1 662 | -0.266807581 0.545085006 1 663 | -0.475109818 0.947483833 1 664 | -0.557037972 0.778719573 1 665 | -0.193240214 0.574512048 1 666 | -0.029348731 0.829601881 1 667 | -0.383376526 0.624385592 1 668 | -0.035071125 0.812800625 1 669 | -0.060506093 0.772166835 1 670 | -0.160710931 0.530042141 1 671 | -0.210362275 0.567446850 1 672 | -0.283272444 0.798839816 1 673 | -0.520613526 0.837372559 1 674 | -0.263870495 0.687937002 1 675 | -0.060226406 0.688228649 1 676 | -0.429473669 0.654717940 1 677 | -0.325250467 0.791105596 1 678 | 0.094837102 0.750572909 1 679 | -0.326848641 0.823553280 1 680 | -0.537630937 0.827068887 1 681 | -0.589458171 0.897096209 1 682 | -0.255109811 0.737443245 1 683 | -0.350722503 0.739648314 1 684 | -0.111745167 0.705987527 1 685 | -0.213435551 0.466547665 1 686 | -0.272518877 0.683481004 1 687 | -0.440414101 0.974317798 1 688 | -0.303362790 0.576264653 1 689 | -0.221200040 0.987888085 1 690 | -0.286914561 0.619578181 1 691 | 0.096845361 0.511673423 1 692 | -0.363110834 0.661562448 1 693 | -0.211246704 0.813171823 1 694 | -0.222052903 0.686080299 1 695 | -0.321828330 0.624357510 1 696 | -0.473737950 0.506318972 1 697 | -0.212793549 0.774693470 1 698 | 0.008463870 0.614591369 1 699 | -0.205693420 0.644919563 1 700 | -0.378486601 0.778361218 1 701 | -0.229442899 0.594732866 1 702 | -0.162703081 0.930991126 1 703 | -0.321296905 0.828610911 1 704 | -0.400332594 0.688297191 1 705 | -0.312050685 0.618494750 1 706 | -0.039349153 0.959790721 1 707 | -0.273914659 0.599403497 1 708 | -0.348565665 0.612606769 1 709 | -0.413758325 0.696448995 1 710 | -0.098831839 0.854519409 1 711 | -0.287690535 0.883301183 1 712 | -0.383124103 0.672367628 1 713 | -0.561271474 1.067278573 1 714 | -0.166431846 0.897151624 1 715 | -0.635114720 0.688087392 1 716 | -0.332175204 0.501477407 1 717 | -0.474805835 0.711218005 1 718 | -0.116004389 0.708363990 1 719 | -0.477937453 0.702949001 1 720 | -0.126810442 0.971409951 1 721 | -0.156822576 0.457687275 1 722 | -0.293523863 0.856486819 1 723 | -0.129615545 0.891819146 1 724 | -0.108242313 0.644814421 1 725 | -0.501979824 0.370050434 1 726 | -0.138108021 0.612928438 1 727 | -0.179322731 0.366517387 1 728 | -0.458093963 0.571370985 1 729 | -0.028565637 0.486501211 1 730 | -0.426175577 0.461765467 1 731 | -0.310680953 0.544905689 1 732 | -0.180247439 0.876336671 1 733 | -0.217870537 0.390856979 1 734 | -0.315992257 0.736172703 1 735 | 0.236276902 0.714179743 1 736 | -0.185456072 0.702294953 1 737 | -0.203065705 0.317910002 1 738 | -0.296142711 0.648026589 1 739 | -0.448939545 0.650603998 1 740 | 0.077064746 0.797884087 1 741 | 0.034024500 0.788213418 1 742 | -0.439519067 0.946446539 1 743 | -0.471452461 0.708540945 1 744 | -0.263821096 0.565778110 1 745 | -0.676333519 1.064998541 1 746 | -0.394630195 0.732544473 1 747 | -0.334698783 0.638313660 1 748 | 0.043828297 0.782970773 1 749 | 0.073254562 0.639405607 1 750 | -0.358305948 0.638878595 1 751 | 0.289824646 0.645297701 1 752 | 0.479141353 0.769272264 1 753 | 0.180670084 0.518893193 1 754 | 0.199825830 0.747216818 1 755 | 0.735249202 0.833027044 1 756 | 0.249991814 0.350660256 1 757 | 0.413137889 0.854044549 1 758 | 0.518581462 0.386362750 1 759 | 0.465359263 0.854392557 1 760 | 0.348309276 0.680024754 1 761 | 0.174782318 0.544423218 1 762 | 0.549911988 0.472172493 1 763 | 0.203934276 0.410263392 1 764 | 0.338644108 1.028370469 1 765 | 0.161322119 0.950855699 1 766 | 0.350961307 0.686427652 1 767 | 0.090257414 0.846995122 1 768 | 0.764373743 0.615571296 1 769 | 0.414756998 0.893306725 1 770 | 0.679361421 0.659759084 1 771 | 0.640285978 0.804268545 1 772 | 0.630876040 0.710028594 1 773 | 0.366370214 0.772543364 1 774 | 0.314611449 0.755070836 1 775 | 0.745924055 0.706345767 1 776 | 0.489768059 0.684198041 1 777 | 0.075247977 0.621422345 1 778 | 0.499573139 0.679632119 1 779 | 0.350405143 0.443980792 1 780 | 0.636928363 0.603842916 1 781 | 0.224908918 0.840917922 1 782 | -0.032261912 0.655726651 1 783 | 0.627052189 0.808688697 1 784 | 0.263348975 0.455434849 1 785 | 0.520257017 0.762965338 1 786 | 0.151882522 0.966544141 1 787 | 0.098482589 0.517323437 1 788 | 0.201212077 0.549826846 1 789 | 0.371298202 0.761389940 1 790 | 0.497766489 0.769076360 1 791 | 0.409493154 0.305118700 1 792 | 0.340849813 0.766677739 1 793 | 0.391675543 0.489773920 1 794 | 0.516131854 0.412661585 1 795 | 0.522760611 0.520845425 1 796 | 0.446358722 0.869775036 1 797 | 0.224400728 0.559199836 1 798 | 0.583149627 0.871728559 1 799 | 0.420184227 0.768544337 1 800 | 0.340883764 0.582414682 1 801 | 0.407626346 1.016274588 1 802 | 0.226804848 0.997357208 1 803 | 0.461550030 0.728402685 1 804 | 0.275762111 0.773039119 1 805 | 0.304760108 0.405069957 1 806 | 0.636786149 0.521153930 1 807 | 0.544820787 0.902598154 1 808 | 0.816098957 0.643244361 1 809 | 0.454637082 0.627059827 1 810 | 0.416886517 0.498139441 1 811 | 0.585814059 0.472857968 1 812 | 0.158972903 0.877325952 1 813 | 0.218197123 0.791103192 1 814 | 0.436713777 0.582375556 1 815 | 0.465359340 0.619108530 1 816 | 0.346901746 0.776639489 1 817 | 0.599207277 0.605698565 1 818 | 0.463002935 0.972725613 1 819 | 0.694263789 0.550710864 1 820 | 1.000277812 0.669240364 1 821 | 0.503660224 0.451743317 1 822 | 0.609419010 0.560098000 1 823 | 0.352923549 0.639530833 1 824 | 0.313797682 0.428469344 1 825 | 0.275593847 0.624510853 1 826 | 0.310310776 0.757815199 1 827 | 0.200769573 1.068014129 1 828 | 0.393611386 0.489922085 1 829 | 0.293284180 0.564537846 1 830 | 0.150904334 0.874953285 1 831 | 0.359648477 0.984800311 1 832 | 0.425437016 0.605205704 1 833 | 0.550057275 0.953322346 1 834 | 0.369377777 0.717383758 1 835 | 0.483823544 0.776401643 1 836 | 0.665201554 0.609337149 1 837 | 0.367662676 0.432857589 1 838 | 0.603654120 0.439204275 1 839 | 0.361992913 0.607744455 1 840 | 0.365320313 0.193465958 1 841 | 0.565587013 0.766374185 1 842 | 0.459978544 0.421990201 1 843 | 0.389662454 0.697573566 1 844 | 0.662029374 0.545080251 1 845 | 0.193287037 0.660104813 1 846 | 0.770581129 0.678276952 1 847 | 0.517729293 0.709447233 1 848 | 0.666759179 0.738395921 1 849 | 0.507357601 0.504291821 1 850 | 0.074897782 0.726624656 1 851 | 0.267419803 0.669125800 1 852 | 0.570998498 0.905961669 1 853 | 0.234076185 0.680851488 1 854 | 0.204728441 0.915150466 1 855 | 0.463600872 0.831022543 1 856 | 0.551695270 0.877530083 1 857 | 0.375064997 0.706265086 1 858 | 0.548113044 0.683542273 1 859 | 0.436411367 0.523946916 1 860 | 0.171669265 0.706402907 1 861 | 0.228628170 0.696358973 1 862 | 0.258176000 0.750019031 1 863 | 0.427636052 0.726640752 1 864 | 0.551129128 1.041844415 1 865 | 0.382357212 0.485587245 1 866 | 0.627187520 0.857796470 1 867 | 0.759430378 0.897903714 1 868 | 0.385966401 0.649098802 1 869 | 0.216206061 0.886147391 1 870 | 0.107421934 0.525437056 1 871 | 0.466619974 0.649300564 1 872 | 0.483552867 0.519368234 1 873 | 0.188288155 0.704849311 1 874 | 0.123111648 0.618943465 1 875 | 0.149201404 0.674098357 1 876 | 0.541125439 0.641048950 1 877 | 0.707584972 1.048980926 1 878 | 0.250259605 0.738434506 1 879 | 0.388929309 0.980538827 1 880 | 0.163559795 0.768820434 1 881 | 0.290938989 0.858416660 1 882 | 0.671326658 0.887569891 1 883 | 0.419646183 0.833301601 1 884 | 0.297576300 0.815635781 1 885 | 0.488205349 0.928912516 1 886 | 0.274956333 0.622947292 1 887 | 0.364636103 0.552039161 1 888 | 0.020765563 0.400801476 1 889 | 0.503582267 0.462402974 1 890 | 0.129743512 0.478205376 1 891 | 0.205737679 0.652800375 1 892 | 0.491663362 0.919029482 1 893 | 0.541928820 0.592238748 1 894 | 0.352448258 0.438954474 1 895 | 0.340546986 0.610581184 1 896 | 0.087362845 0.722352081 1 897 | 0.544510425 0.310570940 1 898 | 0.426834451 0.697519317 1 899 | 0.505026501 0.203961507 1 900 | 0.393952243 0.701709243 1 901 | 0.341212359 0.487823226 1 902 | 0.443882109 0.515215865 1 903 | 0.216623801 0.641423278 1 904 | 0.325421774 0.565006133 1 905 | 0.339954219 0.500219969 1 906 | 0.757953402 0.646113630 1 907 | 0.166511560 0.675639720 1 908 | 0.394924171 0.795156547 1 909 | 0.581373272 0.769434777 1 910 | 0.469451043 0.686613394 1 911 | 0.180074959 0.917903510 1 912 | 0.314960733 0.919406796 1 913 | 0.781475499 1.074871466 1 914 | 0.261043992 0.883671133 1 915 | 0.149151175 0.475484999 1 916 | 0.236371870 0.975832107 1 917 | 0.646323770 0.522312176 1 918 | 0.518347874 0.876936157 1 919 | 0.089471338 0.658664051 1 920 | 0.498070451 0.902620720 1 921 | 0.248059552 0.746906831 1 922 | 0.550195316 0.737298487 1 923 | 0.280602842 0.603132684 1 924 | 0.431834416 0.533887741 1 925 | 0.267799611 0.603699345 1 926 | 0.507750995 0.826989974 1 927 | -0.064478127 0.834070122 1 928 | 0.342112413 0.661643764 1 929 | 0.332313982 0.509083774 1 930 | 0.665012582 0.878512787 1 931 | 0.382910589 0.749228951 1 932 | 0.361027556 0.645111929 1 933 | 0.571981147 0.794214002 1 934 | 0.536918322 0.898472992 1 935 | 0.331872670 0.570367930 1 936 | 0.044037168 0.476641964 1 937 | 0.410716663 0.798924771 1 938 | 0.455083777 0.551831167 1 939 | 0.474594596 0.889946347 1 940 | 0.413672127 0.867650039 1 941 | 0.682171442 0.972182362 1 942 | 0.425353451 0.535316350 1 943 | 0.262277420 0.637457666 1 944 | 0.007860344 0.806598462 1 945 | 0.380999590 0.653580787 1 946 | 0.538437280 0.907997360 1 947 | 0.180415465 0.914334885 1 948 | 0.237060285 0.752505492 1 949 | 0.829663295 0.697894513 1 950 | 0.307664951 1.074702414 1 951 | 0.239849381 0.753987444 1 952 | 0.275375404 0.806554305 1 953 | 0.416984789 0.452953422 1 954 | 0.476493007 0.858473259 1 955 | 0.564497576 0.915314697 1 956 | 0.198295169 0.534934547 1 957 | 0.294198911 0.374100529 1 958 | 0.684760671 0.892746414 1 959 | 0.168075136 0.794230658 1 960 | 0.502763522 0.712129784 1 961 | 0.129722603 0.697110450 1 962 | 0.285983065 0.796121883 1 963 | 0.097239329 0.681159777 1 964 | 0.210574775 0.792652629 1 965 | 0.593896992 0.530407106 1 966 | 0.358836790 0.671400853 1 967 | 0.197591638 0.710584968 1 968 | 0.540587182 0.774780451 1 969 | 0.175106338 0.609394118 1 970 | 0.448304389 0.663333083 1 971 | 0.289880687 0.204721503 1 972 | 0.300130047 0.934825869 1 973 | 0.152511070 0.851596486 1 974 | 0.495317475 0.631046756 1 975 | 0.072423805 0.678667079 1 976 | 0.500846416 0.689706961 1 977 | 0.159104712 0.628206422 1 978 | 0.710308164 0.777809751 1 979 | 0.750642087 0.828037270 1 980 | 0.559868855 0.783081248 1 981 | 0.400801648 0.786167018 1 982 | 0.356480531 0.911823818 1 983 | 0.844132265 0.561509712 1 984 | 0.426337951 0.777438407 1 985 | 0.461052514 0.615763585 1 986 | 0.205997206 0.785369909 1 987 | 0.118613656 0.832647177 1 988 | 0.444428480 0.747145725 1 989 | 0.278467451 0.755943870 1 990 | 0.329683958 0.704522943 1 991 | 0.338924385 0.739418880 1 992 | 0.427674817 0.962589298 1 993 | 0.324169980 0.808410845 1 994 | 0.526486063 0.856427139 1 995 | 0.664857776 0.773954077 1 996 | 0.327675416 0.608013752 1 997 | 0.247589562 0.279270348 1 998 | 0.418514564 1.044157214 1 999 | 0.232314519 0.819642835 1 1000 | 0.762040971 0.573218465 1 1001 | -------------------------------------------------------------------------------- /week4/rip_dat_tr.txt: -------------------------------------------------------------------------------- 1 | 0.05100797 0.16086164 0 2 | -0.74807425 0.08904024 0 3 | -0.77293371 0.26317168 0 4 | 0.21837360 0.12706142 0 5 | 0.37268336 0.49656200 0 6 | -0.62931544 0.63202159 0 7 | -0.43307167 0.14479166 0 8 | -0.84151970 -0.19131316 0 9 | 0.47525648 0.22483671 0 10 | 0.32082976 0.32721288 0 11 | 0.32061253 0.33407547 0 12 | -0.89077472 0.41168783 0 13 | 0.17850119 0.44691359 0 14 | 0.31558002 0.38853383 0 15 | 0.55777224 0.47272748 0 16 | 0.03191877 0.01222964 0 17 | 0.25090585 0.30716705 0 18 | 0.23571547 0.22493837 0 19 | -0.07236203 0.33376524 0 20 | 0.50440241 0.08054579 0 21 | -0.63223351 0.44552458 0 22 | -0.76784656 0.23614689 0 23 | -0.70017557 0.21038848 0 24 | -0.64713491 0.15921366 0 25 | -0.76739248 0.09259038 0 26 | -0.51788734 0.03288107 0 27 | 0.17516644 0.34534871 0 28 | -0.68031190 0.47612156 0 29 | 0.01595199 0.32167526 0 30 | -0.71481078 0.51421443 0 31 | 0.07837946 0.32284981 0 32 | -0.80872251 0.47036593 0 33 | -0.84211234 0.09294232 0 34 | -0.98591577 0.48309267 0 35 | 0.29104081 0.34275967 0 36 | 0.24321541 0.51488295 0 37 | -0.60104419 0.05060116 0 38 | -1.24652451 0.45923165 0 39 | -0.82769016 0.36187460 0 40 | -0.62117301 -0.10912158 0 41 | -0.70584105 0.65907662 0 42 | 0.06718867 0.60574850 0 43 | 0.30505147 0.47417973 0 44 | 0.60788138 0.39361588 0 45 | -0.78937483 0.17591675 0 46 | -0.53123209 0.42652809 0 47 | 0.25202071 0.17029707 0 48 | -0.57880357 0.26553665 0 49 | -0.83176749 0.54447377 0 50 | -0.69859164 0.38566851 0 51 | -0.73642607 0.11857527 0 52 | -0.93496195 0.11370707 0 53 | 0.43959309 0.41430638 0 54 | -0.54690854 0.24956276 0 55 | -0.08405550 0.36521058 0 56 | 0.32211458 0.69087105 0 57 | 0.10764739 0.57946932 0 58 | -0.71864030 0.25645757 0 59 | -0.87877752 0.45064757 0 60 | -0.69846046 0.95053870 0 61 | 0.39757434 0.11810207 0 62 | -0.50451354 0.57196376 0 63 | 0.25023622 0.39783889 0 64 | 0.61709156 0.10185808 0 65 | 0.31832860 0.08790562 0 66 | -0.57453363 0.18624195 0 67 | 0.09761865 0.55176786 0 68 | 0.48449339 0.35372973 0 69 | 0.52400684 0.46616851 0 70 | -0.78138463 -0.07534713 0 71 | -0.49704591 0.59948077 0 72 | -0.96984525 0.46624927 0 73 | 0.43541407 0.12192386 0 74 | -0.67942462 0.30753942 0 75 | -0.62529036 0.07099046 0 76 | -0.02318116 0.40442601 0 77 | 0.23200141 0.71066846 0 78 | 0.09384354 0.46674396 0 79 | 0.14234301 0.17898711 0 80 | -0.61686357 0.25507763 0 81 | 0.23636288 0.51543839 0 82 | 0.38914177 0.40429568 0 83 | -0.95178678 -0.03772239 0 84 | 0.24087822 0.71948890 0 85 | 0.12446266 0.45178849 0 86 | -0.60566430 0.26906478 0 87 | -0.71397188 0.30871780 0 88 | 0.31008428 0.34675335 0 89 | 0.18018786 0.46204643 0 90 | -0.42663885 0.64723225 0 91 | 0.06143230 0.32491150 0 92 | 0.07736952 0.32183287 0 93 | 0.42814970 0.13445957 0 94 | -0.80250753 0.66878999 0 95 | 0.40142623 0.42516398 0 96 | 0.37084776 0.26407123 0 97 | -0.80774748 0.41485899 0 98 | 0.50163585 0.23934856 0 99 | 0.58238323 0.22842741 0 100 | -0.59136100 0.30230321 0 101 | -0.87037236 0.26941446 0 102 | -0.72086765 0.19676678 0 103 | 0.27778443 0.21792253 0 104 | 0.33240813 0.27349865 0 105 | -0.14092068 0.39247351 0 106 | -0.59759518 0.14790267 0 107 | -0.85581534 0.14513961 0 108 | -0.88912232 0.26896001 0 109 | 0.21345680 0.43611756 0 110 | -0.53467949 0.57901229 0 111 | 0.31686848 0.39705856 0 112 | -0.68121733 0.04209840 0 113 | -0.97586127 0.45964811 0 114 | 0.41457183 0.27141230 0 115 | 0.32751292 0.36780137 0 116 | -0.93209192 0.09362034 0 117 | 0.58395341 0.47147282 0 118 | -0.44437309 0.23010142 0 119 | 0.29109441 0.19365556 0 120 | -0.51080722 0.41496003 0 121 | -0.96597511 0.17931052 0 122 | 0.18741315 0.29747132 0 123 | 0.17965417 0.45175449 0 124 | -0.72689602 0.35728387 0 125 | -0.54339877 0.41012013 0 126 | -0.59823393 0.98701425 1 127 | -0.20194736 0.62101680 1 128 | 0.47146103 0.48221146 1 129 | -0.09821987 0.58755577 1 130 | -0.35657658 0.63709705 1 131 | 0.63881392 0.42112135 1 132 | 0.62980614 0.28146085 1 133 | -0.46223286 0.61661031 1 134 | -0.07331555 0.55821736 1 135 | -0.55405533 0.51253129 1 136 | -0.43761773 0.87811781 1 137 | -0.22237814 0.88850773 1 138 | 0.09346162 0.67310494 1 139 | 0.53174745 0.54372650 1 140 | 0.40207539 0.51638462 1 141 | 0.47555171 0.65056336 1 142 | -0.23383266 0.63642580 1 143 | -0.31579316 0.75031340 1 144 | -0.47351720 0.63854125 1 145 | 0.59239464 0.89256953 1 146 | -0.22605324 0.79789454 1 147 | -0.43995011 0.52099256 1 148 | -0.54645044 0.74577198 1 149 | 0.46404306 0.51065152 1 150 | -0.15194296 0.81218439 1 151 | 0.48536395 0.82018093 1 152 | 0.34725649 0.70813773 1 153 | 0.43897015 0.62817158 1 154 | -0.21415914 0.64363951 1 155 | 0.57380231 0.63713466 1 156 | 0.38717361 0.58578395 1 157 | 0.32038322 0.53529127 1 158 | -0.20781491 0.65132467 1 159 | -0.18651283 0.81754816 1 160 | 0.24752692 0.39081936 1 161 | 0.66049881 0.89919213 1 162 | -0.28658801 0.73375946 1 163 | -0.32588080 0.39865509 1 164 | -0.25204565 0.67358326 1 165 | 0.37259022 0.49785904 1 166 | -0.29096564 1.04372060 1 167 | -0.30469807 0.86858292 1 168 | -0.21389978 1.09317811 1 169 | -0.36830015 0.75639546 1 170 | -0.46928218 0.88775091 1 171 | 0.39350146 0.77975197 1 172 | -0.45639966 0.80523454 1 173 | 0.51128242 0.76606136 1 174 | 0.22550468 0.46451215 1 175 | 0.01462984 0.40190926 1 176 | -0.19172785 0.80943313 1 177 | 0.38323479 0.75601744 1 178 | 0.49791612 0.61334375 1 179 | 0.35335230 0.77324337 1 180 | -0.34722575 0.70177856 1 181 | 0.58380468 0.76357539 1 182 | -0.13727764 0.71246351 1 183 | 0.38827268 0.44977123 1 184 | -0.53172709 0.61934293 1 185 | -0.11684624 0.87851210 1 186 | 0.54335864 0.41174865 1 187 | -0.45399302 0.66512988 1 188 | -0.21913200 0.83484947 1 189 | 0.30485742 0.98028760 1 190 | 0.65676798 0.75766017 1 191 | 0.61420447 0.75039019 1 192 | -0.45809964 0.77968606 1 193 | -0.21617465 0.88626305 1 194 | -0.26016108 0.81008591 1 195 | 0.31884531 0.84517725 1 196 | -0.23727415 0.80178784 1 197 | 0.58310323 0.77709806 1 198 | 0.02841337 0.75792620 1 199 | -0.41840136 0.68041440 1 200 | 0.67412880 0.60245461 1 201 | -0.25278281 0.70526103 1 202 | 0.51609843 0.62092390 1 203 | 0.20392294 0.91641482 1 204 | -0.17207124 1.00884096 1 205 | 0.27274507 0.29346977 1 206 | 0.07634798 0.56222204 1 207 | -0.36653499 0.64831007 1 208 | 0.44290673 0.80087721 1 209 | -0.19976385 0.54295162 1 210 | -0.54075738 0.65293033 1 211 | -0.07060266 1.00296912 1 212 | 0.50715054 0.35045758 1 213 | -0.06048611 0.62982713 1 214 | 0.21532928 0.60260249 1 215 | 0.46809108 0.87182416 1 216 | -0.29888511 0.73669866 1 217 | 0.86129620 0.47289330 1 218 | 0.70120877 0.74572893 1 219 | -0.11342797 0.60067099 1 220 | 0.31234354 0.90756345 1 221 | -0.12172541 0.84112851 1 222 | 0.36867857 0.37052586 1 223 | 0.57311489 0.40949740 1 224 | -0.25841225 0.67192335 1 225 | 0.30937186 0.50823318 1 226 | 0.43319338 0.77016967 1 227 | -0.30448035 0.57820106 1 228 | 0.44276338 0.58023403 1 229 | -0.19442057 0.89876808 1 230 | -0.06105237 0.74184946 1 231 | 0.07619347 0.35386246 1 232 | 0.85826993 0.95819523 1 233 | 0.37039200 0.72342401 1 234 | 0.51481515 0.76203996 1 235 | 0.43127521 0.54259166 1 236 | 0.42286091 0.65242185 1 237 | 0.29815001 0.93453682 1 238 | 0.37128253 0.70089181 1 239 | -0.51528729 0.76473490 1 240 | 0.38525783 0.65528189 1 241 | -0.34825368 0.50529981 1 242 | 0.68510504 0.78067440 1 243 | -0.36528923 0.45703265 1 244 | -0.40903577 0.74230433 1 245 | 0.43574387 0.44689789 1 246 | 0.26887846 0.44559230 1 247 | -0.49254862 1.01443372 1 248 | 0.07615960 0.63795180 1 249 | 0.49226224 0.46876241 1 250 | -0.40249641 0.71301084 1 251 | -------------------------------------------------------------------------------- /week4/wk_4.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week4/wk_4.pdf -------------------------------------------------------------------------------- /week4/wk_4_lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week4/wk_4_lect_1.pdf -------------------------------------------------------------------------------- /week4/wk_4_lect_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week4/wk_4_lect_2.pdf -------------------------------------------------------------------------------- /week5/cout.m: -------------------------------------------------------------------------------- 1 | function [J,lam] = cout(H,x,y,C,ind) 2 | 3 | 4 | [n,m] = size(H); 5 | X = zeros(n,1); 6 | posok = find(ind > 0); 7 | posA = find(ind==0); % liste des contriantes saturees 8 | posB = find(ind==-1); % liste des contriantes saturees 9 | % keyboard 10 | X(posok) = x; 11 | X(posB) = C; 12 | 13 | J = 0.5 *X'*H*X - sum(X); 14 | % 0 normalement 15 | % keyboard 16 | 17 | lam = y'*X; 18 | 19 | 20 | -------------------------------------------------------------------------------- /week5/cross_val_wk5.m: -------------------------------------------------------------------------------- 1 | function [cv_err, cv_std] = cross_val_wk5(x,f,k) 2 | N = size(x,1); 3 | CV = []; 4 | 5 | for n=1:N 6 | X = x; 7 | t = f; 8 | X(n,:) = []; 9 | t(n) = []; 10 | Xt = x(n,:); 11 | tt = f(n); 12 | [e,tp] = knn_multi_class(X,t,Xt,tt,k); 13 | CV = [CV; e]; 14 | end 15 | 16 | cv_err = mean(CV); 17 | cv_std = ((N-1)/N)*sum((CV-cv_err).^2); 18 | -------------------------------------------------------------------------------- /week5/digits_3_8.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week5/digits_3_8.mat -------------------------------------------------------------------------------- /week5/knn_multi_class.m: -------------------------------------------------------------------------------- 1 | function [error,tpred] = knn_multi_class(X,t,Xtest,ttest,k) 2 | 3 | Ntest = size(Xtest,1); 4 | N = size(X,1); 5 | tpred = zeros(Ntest,1); 6 | K = max(t); 7 | 8 | for n=1:Ntest 9 | Dist = sum( (( repmat(Xtest(n,:),N,1) - X ).^2),2 ); 10 | [sorted_list,sorted_index] = sort(Dist); 11 | [max_k, index_max_k] = max(histc(t(sorted_index(1:k)),1:K)); 12 | tpred(n) = index_max_k; 13 | end 14 | 15 | error = 100*sum(tpred ~= ttest)/Ntest; -------------------------------------------------------------------------------- /week5/laboratory_wk_5.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week5/laboratory_wk_5.pdf -------------------------------------------------------------------------------- /week5/lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week5/lect_1.pdf -------------------------------------------------------------------------------- /week5/lect_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week5/lect_2.pdf -------------------------------------------------------------------------------- /week5/monqp0.m: -------------------------------------------------------------------------------- 1 | function [alpha, lambda, pos] = monqp0(H,b,c,C,l,verbose,X,ps) 2 | 3 | % min 1/2 u' A u - b' u u c'est alpha 4 | % u 5 | % contrainte c' u = a y'*alpha = 0 6 | % 7 | % et 0 <= u <= C 8 | 9 | 10 | %-------------------------------------------------------------------------- 11 | % verifications 12 | %-------------------------------------------------------------------------- 13 | [n,d] = size(H); 14 | [nl,nc] = size(b); 15 | [nlc,ncc] = size(c); 16 | if d ~= n 17 | error('H must be a squre matrix n by n'); 18 | end 19 | if nl ~= n 20 | error('H and b must have the same number of row'); 21 | end 22 | 23 | if nlc ~= n 24 | error('H and c must have the same number of row'); 25 | end 26 | if ncc ~= 1 27 | error('c must be a row vector'); 28 | end 29 | if nc ~= 1 30 | error('b must be a row vector'); 31 | end 32 | 33 | if nargin < 5 % default value for the regularisation parameter 34 | l = 0; 35 | end; 36 | if nargin < 4 % default value for the uper bound 37 | C = 100; 38 | end; 39 | 40 | if nargin < 6 % default value for the display parameter 41 | verbose = 0; 42 | end; 43 | 44 | 45 | fid = 1; %default value, curent matlab window 46 | %-------------------------------------------------------------------------- 47 | 48 | 49 | nsup = n; 50 | I = nsup*eye(nsup); 51 | 52 | M = [(H+l*I) c ; c' 0 ]; 53 | Minit = M; 54 | B = [b;0]; 55 | Binit = B; 56 | cinit = c; 57 | 58 | ind = 1;indout=[];indd = (1:n);pos = indd; 59 | 60 | stop = 0; 61 | alpha = []; 62 | Msup = []; 63 | indsuptot = []; 64 | 65 | %-------------------------------------------------------------------------- 66 | % I N I T I A L I S A T I O N 67 | %-------------------------------------------------------------------------- 68 | 69 | %k = 2; % c in (-1,1) and Y in (1,2) 70 | %[A]=knnd(X,X,(c+3)/2,k); 71 | %malclasses = find(A~=((c+3)/2)); 72 | %bienclasses = find(A==((c+3)/2)); % keyboard 73 | 74 | bienclasses = (2:n); 75 | malclasses = 1;; 76 | 77 | % keyboard 78 | 79 | if length(malclasses) == 0 80 | disp('y''a pas de mal classes, c''est louche'); % quoi faire ??? 81 | end; 82 | 83 | alpha = zeros(n,1); 84 | alpha(malclasses) = C/2; 85 | 86 | nsup = length(malclasses); 87 | 88 | % disp('la contrainte : nb active'); disp([c'*alpha nsup]); 89 | alpha(bienclasses) = []; 90 | M(bienclasses,:) = []; 91 | M(:,bienclasses) = []; 92 | B(bienclasses) = []; % keyboard 93 | pos = malclasses; 94 | indd = 0*indd; 95 | indd(malclasses) = malclasses; 96 | 97 | %-------------------------------------------------------------------------- 98 | % M A I N L O O P 99 | %-------------------------------------------------------------------------- 100 | Jold = 10000000000000000000; sol = 0; 101 | if verbose ~= 0 102 | disp(' Cost Delta Cost #support #up saturate'); 103 | nbverbose = 0; 104 | end 105 | 106 | while stop == 0 107 | 108 | I = nsup*eye(nsup); 109 | [nn,mm] = size(Msup); 110 | Un = ones(mm,1); 111 | lambdaA = sol(length(sol)) ; 112 | % keyboard; 113 | [J,yx] = cout(H,alpha,cinit,C,indd); 114 | if verbose ~= 0 115 | nbverbose = nbverbose+1; 116 | if nbverbose == 20 117 | disp(' Cost Delta Cost #support #up saturate'); 118 | nbverbose = 0; 119 | end 120 | if Jold == 0 121 | fprintf(fid,'| %11.2f | %8.4f | %6.0f | %6.0f |\n',[J (Jold-J) nsup length(indsuptot)]); 122 | else 123 | fprintf(fid,'| %11.2f | %8.4f | %6.0f | %6.0f |\n',[J (Jold-J)/abs(Jold) nsup length(indsuptot)]); 124 | end 125 | end 126 | Jold = J; 127 | 128 | if isempty(Msup) 129 | sol = M \ B; 130 | else 131 | sol = M \ (B-[C*Msup*Un ; C*sum(cinit(indsuptot))]); 132 | end 133 | alphaNew = sol(1:length(sol)-1); % il ne faut pas supprimer 134 | % la derniere variable 135 | 136 | if min(alphaNew)<0 | max(alphaNew)>C 137 | indinf = find( alphaNew < 0); 138 | indsup = find( alphaNew > C); 139 | 140 | d = alphaNew - alpha; % direction de descente 141 | 142 | [tI indI] = min(-alpha(indinf)./d(indinf)); % pas de descente 143 | [tS indS] = min((C-alpha(indsup))./d(indsup)); % pas de descente 144 | if isempty(tI) , tI = tS + 1; end; 145 | if isempty(tS) , tS = tI + 1; end; 146 | t = min(tI,tS); 147 | % keyboard; 148 | alpha = alpha + t * d; 149 | 150 | pos = find(indd > 0); % indices des variables actives 151 | % disp('ca descend - main loop'); % 152 | if t == tI 153 | ind = indinf(indI); 154 | if ~isempty(Msup), Msup(ind,:) = []; end; 155 | % disp(' on enleve une petite '); 156 | % disp([indd(pos(ind)) length(alphaNew)]); 157 | indd(pos(ind)) = 0; 158 | else % keyboard 159 | ind = indsup(indS); 160 | indsuptot = [indsuptot ; pos(ind)]; 161 | Msup = Minit(pos,indsuptot); % contraintes sup saturees 162 | Msup(ind,:) = []; 163 | % disp(' on enleve une grosse '); 164 | % disp([indd(pos(ind)) length(alphaNew)]); 165 | indd(pos(ind)) = -1; 166 | 167 | end 168 | M(:,ind) = []; 169 | M(ind,:) = []; 170 | alpha(ind) = []; 171 | B(ind) = []; 172 | 173 | nsup = nsup - 1; 174 | pos(ind) = []; 175 | 176 | else 177 | soltot = zeros(n,1); 178 | posok = find(indd > 0); 179 | posA = find(indd==0); % liste des contriantes saturees 180 | posB = find(indd==-1); % liste des contriantes saturees 181 | % keyboard 182 | soltot(posok) = alphaNew; 183 | soltot(posB) = C; 184 | % multiplicateurs de lagrange 185 | lamb = (H*soltot - b) + c*sol(nsup+1); % 186 | % ; % disp(lamb'); 187 | if min(lamb(posA)) < -sqrt(eps) | min(-lamb(posB)) < -sqrt(eps) % on ajoute une containte. 188 | 189 | if isempty(posB) | min(lamb(posA)) < min(-lamb(posB)) 190 | [minlam lampos] = min(lamb(posA)); 191 | lampos = posA(lampos); 192 | % disp('ca monte - une petite'); 193 | camonte = 'petit'; 194 | else 195 | [minlam lampos] = min(-lamb(posB)); 196 | lampos = posB(lampos); 197 | % disp('ca monte - une grosse'); % keyboard 198 | camonte = 'grose'; 199 | aaa = find(indsuptot == lampos); 200 | indsuptot(aaa) = []; 201 | Msup(:,aaa) = []; 202 | end 203 | %disp('indsuptot') 204 | %disp(indsuptot') 205 | % disp([lampos length(alphaNew)]); % 206 | 207 | inserpos = max(find(posok < lampos)); 208 | if isempty(inserpos) inserpos=0; end; 209 | indd(lampos) = lampos; % 210 | 211 | M = [M(:,1:inserpos) , Minit([posok n+1],lampos) , M(:,inserpos+1:nsup+1)]; 212 | B = [B(1:inserpos);Binit(lampos);B(inserpos+1:nsup+1)]; 213 | if camonte == 'petit' 214 | alpha = [alphaNew(1:inserpos);0;alphaNew(inserpos+1:nsup)]; 215 | else 216 | alpha = [alphaNew(1:inserpos);C;alphaNew(inserpos+1:nsup)]; 217 | end 218 | pos = [posok(1:inserpos) lampos posok(inserpos+1:nsup)]; 219 | M = [M(1:inserpos,:) ; Minit(lampos,[pos n+1]) ; M(inserpos+1:nsup+1,:)]; 220 | 221 | if isempty(indsuptot) 222 | Msup = []; 223 | elseif isempty(Msup) 224 | Msup = Minit(lampos,indsuptot) ; 225 | else 226 | Msup = [Msup(1:inserpos,:) ; Minit(lampos,indsuptot) ; Msup(inserpos+1:nsup,:)]; 227 | end; 228 | 229 | nsup = nsup + 1; 230 | else 231 | stop = 1; % on est bien au minimum... 232 | end 233 | end 234 | %-------------------------------------------------------------------------- 235 | % E N D M A I N L O O P 236 | %-------------------------------------------------------------------------- 237 | 238 | end 239 | 240 | 241 | lambda = sol(length(alphaNew)+1); 242 | alpha = alphaNew(1:nsup); % disp(indd); 243 | pos = pos(1:nsup); 244 | 245 | if ~isempty(indsuptot) 246 | alpha = zeros(n,1); 247 | posok = find(indd > 0); 248 | posA = find(indd==0); % liste des contriantes saturees 249 | posB = find(indd==-1); % liste des contriantes saturees 250 | % keyboard 251 | alpha(posok) = alphaNew; 252 | alpha(posB) = C; 253 | pos = sort([posok posB]); 254 | alpha(posA) = []; 255 | 256 | end 257 | -------------------------------------------------------------------------------- /week5/svm_demo.m: -------------------------------------------------------------------------------- 1 | clear 2 | Step=0.5; 3 | N = 50; 4 | C=1000; 5 | X = [randn(N/2,2);randn(N/2,2)+[ones(N/2,1).*6 zeros(N/2,1)]]; 6 | t=[ones(N/2,1);-ones(N/2,1)]; 7 | [alpha,w_0,alpha_index]=monqp0(diag(t)*X*X'*diag(t),ones(N,1),t,C,1e-6); 8 | 9 | %Define contour grid 10 | mn = min(X); 11 | mx = max(X); 12 | [x1,x2]=meshgrid(floor(mn(1)):Step:ceil(mx(1)),floor(mn(2)):Step:ceil(mx(2))); 13 | [n11,n12]=size(x1); 14 | [n21,n22]=size(x2); 15 | XG=[reshape(x1,n11*n12,1) reshape(x2,n21*n22,1)]; 16 | 17 | f = (t(alpha_index).*alpha)'*X(alpha_index,:)*XG' + w_0; 18 | 19 | plot(X(alpha_index,1),X(alpha_index,2),'go'); 20 | hold 21 | plot(X(1:N/2,1),X(1:N/2,2),'.') 22 | plot(X(N/2+1:N,1),X(N/2+1:N,2),'r.') 23 | contour(x1,x2,reshape(f,[n11,n12]),[0 0]); 24 | hold off -------------------------------------------------------------------------------- /week5/svm_demo_kernels.m: -------------------------------------------------------------------------------- 1 | clear 2 | Step=0.5; 3 | polypower = 3; 4 | width = 1; 5 | 6 | 7 | X=load('rip_dat_tr.txt'); 8 | Xt=load('rip_dat_te.txt'); 9 | t=2.*X(:,3) - 1; 10 | X(:,3)=[]; 11 | tt=2.*Xt(:,3) - 1; 12 | Xt(:,3)=[]; 13 | N = size(X,1); 14 | Nt = size(Xt,1); 15 | Err = []; 16 | 17 | for C = 1:20 18 | K = kernel_func(X,X,'poly',width,polypower); 19 | [alpha,w_0,alpha_index]=monqp0(diag(t)*K*diag(t),ones(N,1),t,C,1e-6); 20 | Kt = kernel_func(X(alpha_index,:),Xt,'poly',width,polypower); 21 | ft = sign((t(alpha_index).*alpha)'*Kt + w_0); 22 | err=100 - 100*sum(ft' == tt)/Nt; 23 | Err=[Err;err]; 24 | fprintf('Soft Margin Parameter = %f; Test Error = %f\n',C, err); 25 | end 26 | figure 27 | subplot(121) 28 | plot(Err); 29 | [Cmin,ErrMin]=min(Err); 30 | [alpha,w_0,alpha_index]=monqp0(diag(t)*K*diag(t),ones(N,1),t,Cmin,1e-6); 31 | 32 | %Define contour grid 33 | mn = min(X); 34 | mx = max(X); 35 | [x1,x2]=meshgrid(floor(mn(1)):Step:ceil(mx(1)),floor(mn(2)):Step:ceil(mx(2))); 36 | [n11,n12]=size(x1); 37 | [n21,n22]=size(x2); 38 | XG=[reshape(x1,n11*n12,1) reshape(x2,n21*n22,1)]; 39 | KG = kernel_func(X(alpha_index,:),XG,'poly',width,polypower); 40 | f = (t(alpha_index).*alpha)'*KG + w_0; 41 | subplot(122) 42 | plot(X(alpha_index,1),X(alpha_index,2),'go'); 43 | hold 44 | plot(X(1:N/2,1),X(1:N/2,2),'.') 45 | plot(X(N/2+1:N,1),X(N/2+1:N,2),'r.') 46 | contour(x1,x2,reshape(f,[n11,n12]),[0 0]); 47 | hold off -------------------------------------------------------------------------------- /week5/wk_5.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week5/wk_5.pdf -------------------------------------------------------------------------------- /week6/Gauss_Mix_Data.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week6/Gauss_Mix_Data.mat -------------------------------------------------------------------------------- /week6/Lab_6_EM_Data.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week6/Lab_6_EM_Data.mat -------------------------------------------------------------------------------- /week6/gauss_density_est.m: -------------------------------------------------------------------------------- 1 | %This is a demo of the maximum likelihood estimates of 2 | %the parameters of a 2-D Gaussian from a Finite sample of data 3 | %the assumption that the parametric form of the density is Gaussian 4 | %is of course well justified in this illustrative example 5 | 6 | function L = gauss_density_est(N) 7 | %Set true mean and covariance for Gaussian 8 | m = [1 3]'; 9 | C = [1.5 0.6;0.6 0.4]; 10 | L=[]; 11 | 12 | %Create a grid of points on which to compute the density 13 | [x1,x2]=meshgrid(-3:0.1:6,-3:0.1:6); 14 | [n,n]=size(x1); 15 | XX=[reshape(x1,n*n,1) reshape(x2,n*n,1)]; 16 | 17 | %Generate a sample of N points drawn from the true Gaussian density 18 | for i=1:N 19 | X(i,:) = multi_var_gauss_sampler(m,C)'; 20 | end 21 | 22 | %Make maximum likelihood estimates of the paramters 23 | m_hat = mean(X); 24 | C_hat = cov(X); 25 | %compute the probability density of the data sample under the 2-D Gaussian 26 | %with the ML parameter estimates 27 | p=gauss(m_hat',C_hat,X); 28 | Log_Like=mean(log(p)); 29 | 30 | 31 | %Nice picture of the data sample and the isocontours of probability density 32 | subplot(121) 33 | plot(X(:,1),X(:,2),'.'); 34 | hold on; 35 | pt=gauss(m_hat',C_hat,XX); 36 | contour(x1,x2,reshape(pt,[n,n])); 37 | subplot(122) 38 | contour(x1,x2,reshape(pt,[n,n])); 39 | hold on; 40 | %compute true density 41 | pt=gauss(m',C,XX); 42 | contour(x1,x2,reshape(pt,[n,n])); 43 | hold off; 44 | 45 | %Report the true and estimated parameter values 46 | fprintf('True Mean [%f %f]\n',m(1),m(2)); 47 | fprintf('Est Mean [%f %f]\n\n',m_hat(1),m_hat(2)); 48 | fprintf('True Covariance [%f %f;%f %f]\n',C(1,1),C(1,2),C(2,1),C(2,2)); 49 | fprintf('Est Covariance [%f %f;%f %f]\n\n',C_hat(1,1),C_hat(1,2),C_hat(2,1),C_hat(2,2)); 50 | fprintf('Log Likelihood Score = %f\n',Log_Like); 51 | -------------------------------------------------------------------------------- /week6/gauss_mix_em_demo.m: -------------------------------------------------------------------------------- 1 | function [Ptrain,Ptest] = gauss_mix_em_demo(X,Xtest,M,Max_Its) 2 | 3 | [N,D]=size(X); 4 | 5 | %Randomly initiliase the Posteriors in the E-step 6 | P=rand(N,M); 7 | P=P./repmat(sum(P')',1,M); 8 | 9 | %Store the likelihood of the data under the mixture model in this matrix 10 | %for plotting during the EM run 11 | LL = []; 12 | 13 | %This is the main EM algorithm 14 | 15 | for n=1:Max_Its 16 | for m=1:M 17 | %M_step 18 | MU_hat(m,:)= sum(X.*repmat(P(:,m),1,D))./sum(P(:,m)); 19 | XT = (X - repmat(MU_hat(m,:),N,1)).*repmat(P(:,m),1,D); 20 | Cm = (XT'*(X - repmat(MU_hat(m,:),N,1)))./sum(P(:,m)); 21 | C_hat(m,:) = reshape(Cm,D^2,1)'; 22 | Pr(m) = mean(P(:,m)); 23 | %E_step 24 | P(:,m) = gauss(MU_hat(m,:),reshape(C_hat(m,:),D,D),X).*Pr(m); 25 | end 26 | 27 | %compute the likelihood 28 | Ptrain = mean(log(sum(P'))); 29 | LL=[LL;Ptrain]; 30 | 31 | %Normlise the Posterior 32 | P=P./repmat(sum(P')',1,M); 33 | 34 | %Nice Demo Graphics 35 | if D==2 36 | [x1,x2]=meshgrid(-6:0.1:6,-6:0.1:6); 37 | [n,n]=size(x1); 38 | XX=[reshape(x1,n*n,1) reshape(x2,n*n,1)]; 39 | subplot(121) 40 | 41 | %compute the log likelihood 42 | pt=0; 43 | for m=1:M 44 | pt=pt + Pr(m)*gauss(MU_hat(m,:)',reshape(C_hat(m,:),D,D),XX); 45 | end 46 | Ptest = mean(log(pt)); 47 | %plot the isocontours of likelihood 48 | contour(x1,x2,reshape(pt,[n,n])); 49 | hold on; 50 | plot(X(:,1),X(:,2),'.'); drawnow 51 | hold off; 52 | pause(0.1) 53 | subplot(122) 54 | plot(LL);drawnow 55 | title('Data Log Likelihood') 56 | end 57 | %Compute Test Data Likelihood 58 | pt=0; 59 | for m=1:M 60 | pt=pt + Pr(m)*gauss(MU_hat(m,:)',reshape(C_hat(m,:),D,D),Xtest); 61 | end 62 | Ptest = mean(log(pt)); 63 | end -------------------------------------------------------------------------------- /week6/lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week6/lect_1.pdf -------------------------------------------------------------------------------- /week6/mix_gauss_density.m: -------------------------------------------------------------------------------- 1 | function [L,X] = gauss_density_est(N) 2 | L=[]; 3 | %The data is drawn with equal probability from two Gaussian 4 | %distributions with specific means and covariances as defined below 5 | N2 = floor(N/2); 6 | m1 = [0.5 2]'; 7 | m2 = [3 4]'; 8 | C1 = eye(2); 9 | C2 = C1; 10 | 11 | %Create a grod of points on which to compute the density 12 | [x1,x2]=meshgrid(-3:0.1:6,-3:0.1:6); 13 | [n,n]=size(x1); 14 | XX=[reshape(x1,n*n,1) reshape(x2,n*n,1)]; 15 | 16 | %Generate a data sample with equal numbers of points drawn from both 17 | %Gaussians 18 | for i=1:N2 19 | X(i,:) = multi_var_gauss_sampler(m1,C1)'; 20 | X(i+N2,:) = multi_var_gauss_sampler(m2,C2)'; 21 | end 22 | 23 | %Estimate the mean and covariance of a single Gaussian - so you are 24 | %assuming that a single parametric form of density is sufficient to 25 | %faithfully represent the ubderlying data generating mechanism 26 | m_hat = mean(X); 27 | C_hat = cov(X); 28 | p=gauss(m_hat',C_hat,X); 29 | Gauss_Log_Like=mean(log(p)); 30 | 31 | 32 | %Estime the mean and covariance of a mixture of two Gaussians - we are 33 | %assuming that we know which points were drawn from which Gaussian 34 | m1_hat = mean(X(1:N2,:)); 35 | m2_hat = mean(X(N2+1:end,:)); 36 | C1_hat = cov(X(1:N2,:)); 37 | C2_hat = cov(X(N2+1:end,:)); 38 | p=0.5*gauss(m1_hat',C1_hat,X) + 0.5*gauss(m2_hat',C2_hat,X); 39 | Mix_Gauss_Log_Like=mean(log(p)); 40 | 41 | %plot the data and density isocontours assuming a single Gaussian 42 | subplot(121) 43 | plot(X(:,1),X(:,2),'.'); 44 | hold on; 45 | pt=gauss(m_hat',C_hat,XX); 46 | contour(x1,x2,reshape(pt,[n,n])); 47 | hold off; 48 | 49 | %plot the data and density isocontours assuming two Gaussians generate the 50 | %data 51 | subplot(122) 52 | plot(X(:,1),X(:,2),'.'); 53 | hold on; 54 | pt=0.5*gauss(m1_hat',C1_hat,XX) + 0.5*gauss(m2_hat',C2_hat,XX); 55 | contour(x1,x2,reshape(pt,[n,n])); 56 | hold off; 57 | %Comparison of average log-likelihood scores on TEST data from the grid under the two 58 | %assumptions - the true semi-parametric form, as it si the true density, should yield a higher average 59 | %log-likelihood 60 | fprintf('Log Likelihood Score for Gaussian = %f\n',Gauss_Log_Like); 61 | fprintf('Log Likelihood Score for Gaussian Mixture = %f\n',Mix_Gauss_Log_Like); 62 | 63 | %Mesh plots 64 | % figure 65 | % subplot(121) 66 | % pt=gauss(m_hat',C_hat,XX); 67 | % mesh(x1,x2,reshape(pt,[n,n])); 68 | % subplot(122) 69 | % pt=0.5*gauss(m1_hat',C1_hat,XX) + 0.5*gauss(m2_hat',C2_hat,XX); 70 | % mesh(x1,x2,reshape(pt,[n,n])); 71 | 72 | -------------------------------------------------------------------------------- /week6/multi_var_gauss_sampler.m: -------------------------------------------------------------------------------- 1 | function x = multi_var_gauss_sampler(m,C) 2 | D = size(C,1); 3 | x = m + chol(C)'*randn(D,1); -------------------------------------------------------------------------------- /week6/wk_6.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week6/wk_6.pdf -------------------------------------------------------------------------------- /week6/wk_6_laboratory.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week6/wk_6_laboratory.pdf -------------------------------------------------------------------------------- /week7/faces_demo.m: -------------------------------------------------------------------------------- 1 | %This demo uses the function svd which is closely related to PCA in that it 2 | %provides a decomposition of a matrix 3 | %X = USV' and so X'X = VSU'USV' = VS^2V' where V is the matrix whose 4 | %columns are the eigenvectors of X'X i.e a scaled version of the principal 5 | %directions and the matrix U has columns which are the principal components 6 | %or the projections of X onto V i.e. U = XVinv(S) 7 | 8 | clear 9 | load('olivettifaces.mat'); 10 | Selected_Face = 125;%312;%255; 11 | 12 | X=faces'; 13 | [N_faces,D]=size(X); 14 | mean_face = mean(X); 15 | X = X - repmat(mean_face,N_faces,1); 16 | fprintf('Performing PCA.... stay tuned\n'); 17 | [U,S,V]=svd(X); 18 | subplot(131) 19 | imagesc(reshape(X(Selected_Face,:)+mean_face,sqrt(D),sqrt(D))); 20 | title('Original Image'); 21 | 22 | recon_err=[]; 23 | 24 | for i=1:N_faces 25 | X_Reconst=U(Selected_Face,1:i)*S(1:i,1:i)*V(:,1:i)' + mean_face; 26 | subplot(132) 27 | imagesc(reshape(X_Reconst',sqrt(D),sqrt(D))); 28 | title('Reconstructed Image');drawnow; 29 | recon_err = [recon_err;sqrt(mean((X_Reconst - (X(Selected_Face,:) + mean_face) ).^2,2))]; 30 | colormap gray 31 | subplot(133) 32 | plot(1:i,recon_err,'LineWidth',3); 33 | title('Reconstruction Error'); 34 | pause(0.1) 35 | fprintf('%d:Reconstruction Error = %f\n',i,recon_err(i)) 36 | end 37 | 38 | 39 | -------------------------------------------------------------------------------- /week7/lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week7/lect_1.pdf -------------------------------------------------------------------------------- /week7/olivettifaces.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week7/olivettifaces.mat -------------------------------------------------------------------------------- /week7/power_pca.m: -------------------------------------------------------------------------------- 1 | function [B,D]=power_pca(C) 2 | %A little routine to compute PCA given a covariance matrix C 3 | N = size(C,1); 4 | 5 | threshold = 1e-3; 6 | Max_Its = 1000; 7 | 8 | %loop round all dimensions of the covariance matrix 9 | for n=1:N 10 | 11 | %initialise the principal eigenvector and set norm to unity 12 | x = randn(N,1); 13 | y = x./sqrt(x'*x); 14 | 15 | %monitor convergence 16 | err = 1e20; 17 | its = 1; 18 | 19 | %main loop to compute single eigenvector 20 | while (err > threshold) | (its < Max_Its) 21 | x = C*y; 22 | y_new = x./sqrt(x'*x); 23 | 24 | err = sum((y_new - y).^2); 25 | y = y_new; 26 | 27 | %set eigenvalue 28 | D(n) = sqrt(x'*x); 29 | 30 | %increment counter 31 | its = its + 1; 32 | end 33 | 34 | %set the column vectors to be the found eigenvectors 35 | B(:,n) = y_new; 36 | 37 | %deflate the covariance matrix 38 | C = C - D(n)*y_new*y_new'; 39 | end 40 | D=diag(D); -------------------------------------------------------------------------------- /week7/wk_7.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week7/wk_7.pdf -------------------------------------------------------------------------------- /week7/wk_7_laboratory.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week7/wk_7_laboratory.pdf -------------------------------------------------------------------------------- /week8/kern_km.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/kern_km.pdf -------------------------------------------------------------------------------- /week8/kernel_func.m: -------------------------------------------------------------------------------- 1 | function K = kernel_func(X1,X2,kernel_,T,p) 2 | 3 | [N1 d] = size(X1); 4 | [N2 d] = size(X2); 5 | 6 | switch kernel_ 7 | 8 | case 'gauss', 9 | K = exp(-distSqrd(X1,X2,T)); 10 | 11 | case 'poly', 12 | K = (1+X1*T*X2').^p; 13 | end 14 | 15 | function D2=distSqrd(X,Y,T) 16 | nx = size(X,1); 17 | ny = size(Y,1); 18 | 19 | D2 = sum((X.^2)*T,2)*ones(1,ny) + ones(nx,1)*sum((Y.^2)*T,2)' - 2*(X*T*Y'); -------------------------------------------------------------------------------- /week8/kernel_kmeans.m: -------------------------------------------------------------------------------- 1 | function [z,e] = kernel_kmeans(X,K,Max_Its,kwidth) 2 | %This is a simple implementation of Kernel K-means clustering - an 3 | %interesting paper which proposed kernel based Kmeans clustering is [1] 4 | %Girolami, M, Mercer Kernel-Based Clustering in Feature Space, 5 | %IEEE Trans Neural Networks, 13(3),780 - 784, 2002. 6 | 7 | 8 | %Create the kernel matrix. 9 | [N,D]=size(X); 10 | C = kernel_func(X,X,'gauss',kwidth,1); 11 | 12 | %initialise the indictaor matrix to a random segmentation of the data 13 | Z = zeros(N,K); 14 | for n = 1:N 15 | Z(n,rand_int(K)) = 1; 16 | end 17 | 18 | %main loop 19 | for its = 1:Max_Its 20 | %compute the similarity of each data point to each cluster mean in 21 | %feature space - note we do not need to compute store or update a mean 22 | %vector s we are using the kernel-trick - cool eh? 23 | for k=1:K 24 | Nk = sum(Z(:,k)); 25 | Y(:,k) = diag(C) - 2*C*Z(:,k)./Nk + Z(:,k)'*C*Z(:,k)./(Nk^2); 26 | end 27 | 28 | %Now we find the cluster assignment for each point based on the minimum 29 | %distance of the point from the mean centres in feature space using the 30 | %Y matrix of dissimilarities 31 | [i,j]=min(Y,[],2); 32 | 33 | %this simply updates the indictor matrix Z refleting the new 34 | %allocations of data points to clusters 35 | Z = zeros(N,K); 36 | for n=1:N 37 | Z(n,j(n)) = 1; 38 | end 39 | 40 | %compoute the verall error 41 | e = sum(sum(Z.*Y))./N; 42 | fprintf('%d Error = %f\n', its, e); 43 | end 44 | 45 | %return the clutsers that each data point has been allocated to 46 | for n=1:N 47 | z(n) = find(Z(n,:)); 48 | end 49 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 50 | %this is a little utility functino which returns a random integer between 1 51 | %& Max_Int. 52 | function u = rand_int(Max_Int) 53 | u=ceil(Max_Int*rand); -------------------------------------------------------------------------------- /week8/kmeans.m: -------------------------------------------------------------------------------- 1 | function [M,j,e] = kmeans(X,K,Max_Its) 2 | 3 | %This is a simple and naive implementation of the standard K-Means 4 | %clustering algorithm for the Machine Learning II course. 5 | 6 | %The data matrix X (N x D) is passed as argument 7 | %The number of cluster K is passed as argument 8 | %The maximum nos of iterations Max_Its is passed as argument 9 | 10 | %The function returns the matrix M (K x D) - corresponding to the K mean 11 | %vector values 12 | 13 | %The functin returns the clusters which each point has been allocated to 14 | %1.. K in the vector j. 15 | 16 | [N,D]=size(X); %N - nos of data points, D dimension of data 17 | I=randperm(N); %a random permutation of integers 1:N - required 18 | %to set initial mean values 19 | 20 | M=X(I(1:K),:); %M is the initial K x D matrix of mean values - 21 | %simply setting to values of K randomly selected data 22 | %points 23 | Mo = M; 24 | 25 | for n=1:Max_Its 26 | %Create distance matrix which is N x K indicating distance that each data 27 | %point is from the current mean values (of which there are K) 28 | for k=1:K 29 | Dist(:,k) = sum((X - repmat(M(k,:),N,1)).^2,2); 30 | end 31 | 32 | %No we simply find which of the K-mean each data point is nearest to - 33 | %so we find the minimum distance of K for each data point. This 34 | %operation can be easily achieved in one line of Matlab using the min function. 35 | [i,j]=min(Dist,[],2); 36 | 37 | %Now that we have the new allocations of points to clusters based on 38 | %the minimum distances obtained form the previous operation we can 39 | %revise our estimates of the position of each mean vector by simply 40 | %taking the mean vlue of all points which have been allocated to each 41 | %cluster using the folowing simple routine. 42 | 43 | for k=1:K 44 | if size(find(j==k))>0 45 | M(k,:) = mean(X(find(j==k),:)); 46 | end 47 | end 48 | 49 | %we create an N x K dimensional indictor matrix - each row will have a 50 | %1 in the column corresponding to the cluster that the data point (row) 51 | %has been allocated to - this is really only required to compute the 52 | %overall error assocated with the current partitioning. 53 | 54 | Z = zeros(N,K); 55 | for m=1:N 56 | Z(m,j(m)) = 1; 57 | end 58 | 59 | %This simply prints the current value of the error criterion which 60 | %K-means is trying to minimise. 61 | e = sum(sum(Z.*Dist)./N); 62 | fprintf('%d Error = %f\n', n, e); 63 | Mo = M; 64 | end -------------------------------------------------------------------------------- /week8/lect_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/lect_1.pdf -------------------------------------------------------------------------------- /week8/olivettifaces.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/olivettifaces.mat -------------------------------------------------------------------------------- /week8/olivettifaces.mat.crdownload: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/olivettifaces.mat.crdownload -------------------------------------------------------------------------------- /week8/water_lillies.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/water_lillies.jpg -------------------------------------------------------------------------------- /week8/wee_dog.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/wee_dog.jpg -------------------------------------------------------------------------------- /week8/wk8_demo_1.m: -------------------------------------------------------------------------------- 1 | %week_8_demo_1 2 | %images segmentation. This is a rather nice demo which shows how clustering 3 | %methods can be empoyed in segmenting images into self similar regions 4 | 5 | 6 | %segmenting a gray scale image of a face - achieved by clustering each of 7 | %the pixel values based on gray level 8 | load('olivettifaces.mat'); 9 | [H,j]=kmeans(faces(:,1),5,10); 10 | 11 | figure 12 | colormap gray 13 | subplot(121) 14 | imagesc(reshape(faces(:,1),64,64));drawnow 15 | subplot(122) 16 | imagesc(reshape(j,64,64));drawnow 17 | 18 | 19 | %segmenting a jpg image of water lillies - the image is represented such that 20 | %each pixel is rperesented as a three dimensional vector in RGB space so we 21 | %perform pixel clustering based on color values - this is quite a nice 22 | %image which demonstrates that the leaves and flowers of water lillies can 23 | %be separated from each other and segmented from the background - this is 24 | %due to the uniform colors across each of the leaves and flowers. 25 | clear 26 | X = imread('water_lillies.jpg','jpg'); 27 | A = [double(reshape(X(:,:,1),600*800,1))... 28 | double(reshape(X(:,:,2),600*800,1))... 29 | double(reshape(X(:,:,3),600*800,1))]; 30 | [H,j,e]=kmeans(A,3,10); 31 | figure 32 | subplot(121) 33 | imagesc(X);drawnow 34 | subplot(122) 35 | imagesc(reshape(j,600,800));drawnow 36 | 37 | 38 | %this is another vey nice example as the dog, water, grass & road can be 39 | %segmented. However this also shows the variability in the solutions 40 | %obtained - for a single run you may or may not get a good segmentation 41 | %into each of the regions of interest. So in the following loop K-means is 42 | %run mutiple times toring the segmentation which yields the smallest error 43 | %- which should correspond to the best segmentation. 44 | clear 45 | X = imread('wee_dog.jpg','jpg'); 46 | X=(X(15:end-15,:,:)); %crop image 47 | A = [double(reshape(X(:,:,1),71*100,1))... 48 | double(reshape(X(:,:,2),71*100,1))... 49 | double(reshape(X(:,:,3),71*100,1))]; 50 | [H,j,e]=kmeans(A,4,20); 51 | figure 52 | subplot(121) 53 | imagesc(X);drawnow 54 | subplot(122) 55 | imagesc(reshape(j,71,100));drawnow 56 | 57 | %here we run the K-mean alorithm on the images of the wee dog five hundred 58 | %times. We retain only the segmentation yielding the smallest value of 59 | %error and also look at the distributuino of the error achieved - quite 60 | %interesting. 61 | 62 | A = A - repmat(mean(A),size(A,1),1); 63 | A = A./repmat(std(A),size(A,1),1); 64 | E=[]; 65 | emin =1e100; 66 | for i=1:100 67 | [H,j,e]=kmeans(A,4,20); 68 | if e < emin 69 | j_min = j; 70 | end 71 | E=[E;e]; 72 | end 73 | figure 74 | subplot(121) 75 | hist(E) 76 | subplot(122) 77 | imagesc(reshape(j_min,71,100));drawnow 78 | 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /week8/wk8_demo_2.m: -------------------------------------------------------------------------------- 1 | %week 8 demo 2 2 | %This script demonstrates a situation where standard K-means will fail when 3 | %the two distinct clusters of data share the same mean value - this is 4 | %achieved by making one cluster of data such that points are uniformly 5 | %distributed within an annulus which is centered at the point (0,0). The 6 | %sceond cluster corresponds to data which has an isotropic Gaussian 7 | %distribution centered at (0,0) and whose variance is sufficiently small 8 | %that points in this cluster are distinct from those within the annulus. 9 | 10 | clear 11 | load wk8_demo_dat; 12 | %Run standard K-means clustering assuming K = 2 - the true value 13 | [H,j,e]=kmeans(X,2,30); 14 | subplot(121) 15 | plot(X(find(j==1),1),X(find(j==1),2),'.'); 16 | hold 17 | plot(X(find(j==2),1),X(find(j==2),2),'ro'); 18 | title('K-Means Clustering'); 19 | 20 | %Run Kernel K-means assuming K = 2 AND the parameter of the kernel (width 21 | %for an RBF is also passed - clearly this has to be selected in some 22 | %reasonable way - cross validation is a practical way to achieve this. 23 | [j,e] = kernel_kmeans(X,2,30,1); 24 | subplot(122) 25 | plot(X(find(j==1),1),X(find(j==1),2),'.'); 26 | hold 27 | plot(X(find(j==2),1),X(find(j==2),2),'ro'); 28 | title('Kernel K-Means Clustering'); 29 | 30 | -------------------------------------------------------------------------------- /week8/wk8_demo_dat.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/wk8_demo_dat.mat -------------------------------------------------------------------------------- /week8/wk_8.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/wk_8.pdf -------------------------------------------------------------------------------- /week8/wk_8_laboratory.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephmisiti/machine-learning-module/eda7351145e8625a88ddb23c945895fdfa33c6bd/week8/wk_8_laboratory.pdf --------------------------------------------------------------------------------