├── .gitignore ├── MMU └── data.mat ├── README ├── gather.py ├── matlab ├── ImToPolar.m ├── draw_ellipse.m ├── fit_circle.m ├── generate_code.m ├── grad_dir.m ├── hamming_distance.m ├── localize_iris.m ├── recognize.m ├── segment_iris.m ├── test-andy │ ├── ImToPolar.m │ ├── compute_features.m │ ├── data.mat │ ├── draw_ellipse.m │ ├── features_left.mat │ ├── features_right.mat │ ├── fit_circle.m │ ├── get_test.m │ ├── iris_data_left.mat │ ├── iris_data_right.mat │ ├── iris_test_left.mat │ ├── iris_test_right.mat │ ├── learn_iris.m │ ├── localize_iris.m │ ├── match_distance.m │ ├── match_iris.m │ ├── matlab.mat │ ├── segment_iris.m │ ├── test.m │ └── train.m └── theta_cost.m ├── proposal.pdf ├── py ├── benchmark.py ├── data.py ├── features.py ├── iris.py ├── pupil.py ├── svm.py ├── test.py └── train.py ├── requirements.txt └── subm ├── Makefile ├── bibspacing.sty ├── img ├── fail-0.png ├── fail-1.png ├── fail-2.png ├── fail-3.png ├── fail-4.png ├── fail-5.png ├── fail-6.png ├── fail-60.png ├── fail-63.png ├── fail-7.png ├── fail-8.png ├── fail-9.png ├── fail-right-1.png └── fail-right-2.png ├── paper.bib ├── paper.pdf └── paper.tex /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | -------------------------------------------------------------------------------- /MMU/data.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/MMU/data.mat -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/README -------------------------------------------------------------------------------- /gather.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import numpy as np 4 | from scipy.misc import imread 5 | from scipy.io import savemat 6 | 7 | mat = {'left' : {'x':[], 'y':[]}, 'right': {'x':[],'y':[]}} 8 | def gather(folder, name): 9 | temp = None 10 | left = [(folder+"/right/"+x) for x in os.listdir(folder+"/right/") if re.match('[0-9].bmp', x)] 11 | right = [(folder+"/left/"+x) for x in os.listdir(folder+"/left/") if re.match('[0-9].bmp', x)] 12 | print os.listdir(folder+"/left") 13 | for image in left: 14 | print image 15 | mat['left']['x'].append(imread(image, flatten=True)) 16 | mat['left']['y'].append(int(image.split('/')[1])) 17 | for image in right: 18 | print image, int(image.split('/')[1]) 19 | mat['right']['x'].append(imread(image, flatten=True)) 20 | mat['right']['y'].append(int(image.split('/')[1])) 21 | return temp 22 | 23 | if __name__ == "__main__": 24 | for f in os.listdir("MMU"): 25 | if os.path.isdir("MMU/"+f): 26 | gather("MMU/"+f, f) 27 | mat['left']['x'] = np.array(mat['left']['x']) 28 | mat['right']['x'] = np.array(mat['right']['x']) 29 | savemat('MMU/data.mat', mat) 30 | -------------------------------------------------------------------------------- /matlab/ImToPolar.m: -------------------------------------------------------------------------------- 1 | function imP = ImToPolar (imR, rMin, rMax, M, N) 2 | % IMTOPOLAR converts rectangular image to polar form. The output image is 3 | % an MxN image with M points along the r axis and N points along the theta 4 | % axis. The origin of the image is assumed to be at the center of the given 5 | % image. The image is assumed to be grayscale. 6 | % Bilinear interpolation is used to interpolate between points not exactly 7 | % in the image. 8 | % 9 | % rMin and rMax should be between 0 and 1 and rMin < rMax. r = 0 is the 10 | % center of the image and r = 1 is half the width or height of the image. 11 | % 12 | % V0.1 7 Dec 2007 (Created), Prakash Manandhar pmanandhar@umassd.edu 13 | 14 | [Mr Nr] = size(imR); % size of rectangular image 15 | Om = (Mr+1)/2; % co-ordinates of the center of the image 16 | On = (Nr+1)/2; 17 | sx = (Mr-1)/2; % scale factors 18 | sy = (Nr-1)/2; 19 | 20 | imP = zeros(M, N); 21 | 22 | delR = (rMax - rMin)/(M-1); 23 | delT = 2*pi/N; 24 | 25 | % loop in radius and 26 | for ri = 1:M 27 | for ti = 1:N 28 | r = rMin + (ri - 1)*delR; 29 | t = (ti - 1)*delT; 30 | x = r*cos(t); 31 | y = r*sin(t); 32 | xR = x*sx + Om; 33 | yR = y*sy + On; 34 | imP (ri, ti) = interpolate (imR, xR, yR); 35 | end 36 | end 37 | 38 | function v = interpolate (imR, xR, yR) 39 | xf = floor(xR); 40 | xc = ceil(xR); 41 | yf = floor(yR); 42 | yc = ceil(yR); 43 | if xf == xc & yc == yf 44 | v = imR (xc, yc); 45 | elseif xf == xc 46 | v = imR (xf, yf) + (yR - yf)*(imR (xf, yc) - imR (xf, yf)); 47 | elseif yf == yc 48 | v = imR (xf, yf) + (xR - xf)*(imR (xc, yf) - imR (xf, yf)); 49 | else 50 | A = [ xf yf xf*yf 1 51 | xf yc xf*yc 1 52 | xc yf xc*yf 1 53 | xc yc xc*yc 1 ]; 54 | r = [ imR(xf, yf) 55 | imR(xf, yc) 56 | imR(xc, yf) 57 | imR(xc, yc) ]; 58 | a = A\double(r); 59 | w = [xR yR xR*yR 1]; 60 | v = w*a; 61 | end 62 | -------------------------------------------------------------------------------- /matlab/draw_ellipse.m: -------------------------------------------------------------------------------- 1 | function draw_ellipse(x,y,a,b,c) 2 | %DRAW_CIRCLE Draws a circle 3 | % x,y - center of circle 4 | % a - width of circle 5 | % b - height of circle 6 | % c - color of circle 7 | th = 0:pi/50:2*pi; 8 | xunit = a * cos(th) + x; 9 | yunit = b * sin(th) + y; 10 | hold on; 11 | plot(xunit, yunit, c); 12 | hold off; 13 | end 14 | -------------------------------------------------------------------------------- /matlab/fit_circle.m: -------------------------------------------------------------------------------- 1 | function fit_circle() 2 | %TEST Summary of this function goes here 3 | % Detailed explanation goes here 4 | close all; 5 | % I = imread('MMU2/470204.bmp'); 6 | % I = imread('MMU2/340202.bmp'); 7 | I = imread('MMU2/270202.bmp'); 8 | % I = imread('MMU2/470201.bmp'); 9 | % I = imread('MMU2/540204.bmp'); 10 | gray = rgb2gray(I); 11 | BW1 = ~im2bw(gray, 0.1); 12 | BW2 = ~bwareaopen(~bwareaopen(BW1,400),400); 13 | [centers,radii,metric] = imfindcircles(BW2,[10 300]); 14 | draw_circle(centers(1,1),centers(1,2),radii(1,1)); 15 | pupil_radius = radii(1,1); 16 | pupil_center = [centers(1,1) centers(1,2)]; 17 | 18 | 19 | BW3 = edge(gray,'canny',.1,5); 20 | BW4 = bwareaopen(BW3,10); 21 | figure,imshow(BW4); 22 | figure,imshow(gray); 23 | %[centers,radii,metric] = imfindcircles(BW4,[10 1000]); 24 | 25 | % shoot rays at ever 5 degrees outwards from pupil and stuff 26 | ray_buffer = pupil_radius+10; 27 | ray_pts = zeros(72,2); 28 | for i=1:72 29 | ray_dir = [cos(degtorad(5*i)) sin(degtorad(5*i))]; 30 | ray_dir = ray_dir/norm(ray_dir); 31 | hit_edge = 0; 32 | ray_pts(i,:) = pupil_center+ray_dir.*ray_buffer; 33 | while hit_edge == 0 && floor(ray_pts(i,2)) < size(BW4,1) && floor(ray_pts(i,1)) < size(BW4,2) 34 | ray_pts(i,:) = ray_pts(i,:)+ray_dir; 35 | hit_edge = BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2))+sign(ray_dir(2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))+sign(ray_dir(1))); 36 | end 37 | draw_circle(ray_pts(i,1),ray_pts(i,2),1); 38 | end 39 | draw_circle(pupil_center(1),pupil_center(2),1); 40 | % iris_radius = fit_best_circle([pupil_center(1) pupil_center(2)],ray_pts); 41 | % rad = 45; 42 | % draw_circle(pupil_center(1),pupil_center(2),rad); 43 | % reward = circle_fit([pupil_center(1) pupil_center(2)], rad, ray_pts); 44 | % reward 45 | iris_radius = fminbnd(@(rad) circle_fit([pupil_center(1) pupil_center(2)], rad, ray_pts), 20, 100) 46 | draw_circle(pupil_center(1),pupil_center(2),iris_radius); 47 | draw_circle(pupil_center(1),pupil_center(2),pupil_radius); 48 | 49 | 50 | 51 | % ray_coord = [floor(pupil_center(1)+pupil_radius+10),floor(pupil_center(2))] %10 pixel buffer 52 | % hit_edge = 0; 53 | % while hit_edge == 0 54 | % hit_edge = BW4(ray_coord(1),ray_coord(2)) || BW4(ray_coord(1)+1,ray_coord(2)) || BW4(ray_coord(1),ray_coord(2)+1); 55 | % ray_coord = [ray_coord(1)+1,ray_coord(2)+1]; 56 | % end 57 | % ray = distance(ray_coord,pupil_center); 58 | % figure,imshow(gray); 59 | % drawCircle(pupil_center(1),pupil_center(2),ray); 60 | 61 | end 62 | 63 | function draw_circle(x,y,r) 64 | th = 0:pi/50:2*pi; 65 | xunit = r * cos(th) + x; 66 | yunit = r * sin(th) + y; 67 | hold on; 68 | h = plot(xunit, yunit); 69 | hold off; 70 | end 71 | 72 | % exponential reward 73 | % function value = circle_fit(center, radius, projected_pts) 74 | % value = 0; 75 | % circle_pts = zeros(72,2); 76 | % for i=1:72 77 | % ray_dir = [cos(degtorad(5*i)) sin(degtorad(5*i))]; 78 | % ray_dir = ray_dir/norm(ray_dir); 79 | % circle_pts(i,:) = center+ray_dir.*radius; 80 | % distance = pdist(cat(1,circle_pts(i,:),projected_pts(i,:))); 81 | % value = value+nthroot(distance,100); 82 | % end 83 | % end 84 | 85 | % median reward 86 | function value = circle_fit(center, radius, projected_pts) 87 | distances = zeros(72,1); 88 | circle_pts = zeros(72,2); 89 | for i=1:72 90 | ray_dir = [cos(degtorad(5*i)) sin(degtorad(5*i))]; 91 | ray_dir = ray_dir/norm(ray_dir); 92 | circle_pts(i,:) = center+ray_dir.*radius; 93 | distance = pdist(cat(1,circle_pts(i,:),projected_pts(i,:))); 94 | distances(i,1) = round(distance); 95 | end 96 | value = median(distances); 97 | end -------------------------------------------------------------------------------- /matlab/generate_code.m: -------------------------------------------------------------------------------- 1 | % Given an IRIS in rectangular coordinates, calculates the average direction 2 | % of the gradient for blocks, maps that to two bits, and strings the bits 3 | % into one IRISCODE. 4 | function [iriscode] = generate_code(iris) 5 | 6 | [r, c] = size(iris); 7 | 8 | w = 5; 9 | h = 2; 10 | 11 | n_col = c/w; 12 | n_row = r/h; 13 | 14 | iriscode = zeros(1, n_row*n_col*2); 15 | 16 | [gmag, gdir] = imgradient(iris); 17 | 18 | for i = 1:n_row 19 | for j = 1:n_col 20 | if i == n_row 21 | cur_patch = gdir(1+(i-1)*h:i*h+1, 1+(j-1)*w:j*w); 22 | else 23 | cur_patch = gdir(1+(i-1)*h:i*h, 1+(j-1)*w:j*w); 24 | end 25 | 26 | mean_dir = mean(mean(cur_patch)); 27 | [b1, b2] = grad_dir(mean_dir); 28 | 29 | iriscode(1, (i-1)*n_col*2 + 2*j) = b1; 30 | iriscode(1, (i-1)*n_col*2 + 2*j + 1) = b2; 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /matlab/grad_dir.m: -------------------------------------------------------------------------------- 1 | function [b1, b2] = grad_dir(dir) 2 | 3 | if dir >= 0 && dir < 90 4 | b1 = 1; 5 | b2 = 1; 6 | elseif dir >= 90 && dir < 180 7 | b1 = 0; 8 | b2 = 1; 9 | elseif dir < 0 && dir >= -90 10 | b1 = 1; 11 | b2 = 0; 12 | elseif dir < -90 && dir >= -180 13 | b1 = 0; 14 | b2 = 0; 15 | else 16 | disp(dir); 17 | b1 = 1; 18 | b2 = 1; 19 | end 20 | -------------------------------------------------------------------------------- /matlab/hamming_distance.m: -------------------------------------------------------------------------------- 1 | % Calculates the hamming DISTANCE between two binary strings CODE1 and CODE2. 2 | function [distance] = hamming_distance(code1, code2) 3 | 4 | [~, n] = size(code1); 5 | [~, n2] = size(code2); 6 | if n ~= n2 7 | disp('Cannot compute Hamming Distance; codes of different lengths.'); 8 | distance = -1; 9 | return; 10 | end 11 | 12 | distance = sum(xor(code1, code2)); 13 | -------------------------------------------------------------------------------- /matlab/localize_iris.m: -------------------------------------------------------------------------------- 1 | function iris = localize_iris(filename) 2 | %LOCALIZE_IRIS find pupil and iris in the given image 3 | % ARGUMENTS 4 | % filename - location of image file 5 | % RETURNS 1x5 array of information on the located eye 6 | % eye_information(1:2) - center of pupil 7 | % eye_information(3) - radius of pupil 8 | % eye_information(4:5) = [a b] - radii of detected iris ellipse 9 | 10 | %% CHANGE IMAGE HERE FOR TESTING 11 | 12 | I = imread(filename); 13 | % I = imread('MMU2/470204.bmp'); 14 | % I = imread('MMU2/340202.bmp'); 15 | % I = imread('MMU2/270202.bmp'); 16 | % I = imread('MMU2/470201.bmp'); 17 | % I = imread('MMU2/540204.bmp'); 18 | 19 | %% FIND PUPIL 20 | gray = rgb2gray(I); 21 | BW1 = ~im2bw(gray, 0.15); 22 | % figure,imshow(BW1); 23 | BW2 = ~bwareaopen(~bwareaopen(BW1,400),400); 24 | % figure,imshow(BW2); 25 | [centers,radii,metric] = imfindcircles(BW2,[10 40]); 26 | pupil_radius = radii(1,1); 27 | pupil_center = [centers(1,1) centers(1,2)]; 28 | 29 | %% FIND IRIS 30 | % Find edges 31 | BW3 = edge(gray,'canny',.1,2); 32 | BW4 = bwareaopen(BW3,10); 33 | % figure,imshow(gray); 34 | 35 | % Shoot rays at every degree outwards from pupil and stuff 36 | ray_buffer = pupil_radius+10; 37 | ray_pts = zeros(360,2); 38 | for i=1:360 39 | if (i>45 && i<=135) || (i>180 && i<=360) % Only shoot rays between 1-45 and 135-180 40 | continue; 41 | end 42 | ray_dir = [cos(degtorad(i)) sin(degtorad(i))]; 43 | ray_dir = ray_dir/norm(ray_dir); 44 | hit_edge = 0; 45 | ray_pts(i,:) = pupil_center+ray_dir.*ray_buffer; 46 | while hit_edge == 0 && floor(ray_pts(i,2)) < size(BW4,1) && floor(ray_pts(i,1)) < size(BW4,2) 47 | ray_pts(i,:) = ray_pts(i,:)+ray_dir; 48 | hit_edge = BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2))+sign(ray_dir(2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))+sign(ray_dir(1))); 49 | end 50 | draw_ellipse(ray_pts(i,1),ray_pts(i,2),1,1,'b'); 51 | end 52 | draw_ellipse(pupil_center(1),pupil_center(2),1,1,'b'); 53 | draw_ellipse(pupil_center(1),pupil_center(2),pupil_radius,pupil_radius,'b'); 54 | 55 | %% FIT ELLIPSE 56 | iris_radii = fminsearch(@(radii) ellipse_fit([pupil_center(1) pupil_center(2)], radii, ray_pts), [50 50]); 57 | draw_ellipse(pupil_center(1),pupil_center(2),iris_radii(1),iris_radii(2),'b'); 58 | eye_information = horzcat(pupil_center,pupil_radius,iris_radii); 59 | 60 | %% TRANSFORM CIRCLE 61 | iris_ellipse = gray((pupil_center(2)-ceil(iris_radii(2))):(pupil_center(2)+ceil(iris_radii(2))),(pupil_center(1)-ceil(iris_radii(1))):(pupil_center(1)+ceil(iris_radii(1)))); 62 | iris_circle = imresize(iris_ellipse, [size(iris_ellipse,1) size(iris_ellipse,1)]); 63 | 64 | %% RECTANGULARIZE 65 | iris_circle = double(iris_circle)/255.0; 66 | iris_rectangle = ImToPolar(iris_circle,pupil_radius/iris_radii(2),1,40,250); 67 | iris = iris_rectangle(5:35,:); 68 | % iris = iris_rectangle; 69 | 70 | % figure,imshow(iris); 71 | 72 | %% Filter noise 73 | % iris_line = iris(17,:); 74 | % iris_shade = median(iris_line); 75 | % filtered_rectangle = xor(im2bw(iris, iris_shade-0.25*iris_shade),im2bw(iris, iris_shade+0.25*(1-iris_shade))); 76 | % filtered_rectangle = ~bwareaopen(~filtered_rectangle, 100); 77 | % figure,imshow(filtered_rectangle); 78 | % iris = min(iris, filtered_rectangle); 79 | % figure,imshow(iris) 80 | end 81 | 82 | 83 | 84 | % sum reward function 85 | function value = ellipse_fit(center, radii, projected_pts) 86 | distances = zeros(360,1); 87 | ellipse_pts = zeros(360,2); 88 | % Shoot rays at every degree outwards from the center but make it an ellipse given [a b]=radii 89 | % then at each degree ray shot out, compute distance from the same ray shot out 90 | % from the original image, but stopped when it hit an edge 91 | % essentially comparing an ellipse of known parameters to the pts found 92 | % from the rays 93 | % This function, with fminsearch, returns the radii of an ellipse that 94 | % gives the smallest sum distances between the pts 95 | % SSD gives worse performance 96 | for i=1:360 97 | if (i>45 && i<=135) || (i>180 && i<=360) 98 | continue; 99 | end 100 | ray_dir = [cos(degtorad(i)) sin(degtorad(i))]; 101 | ray_dir = ray_dir/norm(ray_dir); 102 | ellipse_pts(i,:) = [center(1)+ray_dir(1)*radii(1) center(2)+ray_dir(2)*radii(2)]; 103 | ray_distance = pdist(cat(1,center,projected_pts(i,:))); 104 | if ray_distance > 40.0 && ray_distance < 70.0 105 | distance = pdist(cat(1,ellipse_pts(i,:),projected_pts(i,:))); 106 | distances = cat(1,distances,distance.^2); 107 | end 108 | end 109 | % distances = sort(distances); 110 | % value = sum(distances(23:67)); 111 | value = sum(distances); 112 | end -------------------------------------------------------------------------------- /matlab/recognize.m: -------------------------------------------------------------------------------- 1 | % Generates IRISCODE and attempts to find a MATCH for IRIS 2 | function [match, iriscode] = recoginize(iris) 3 | 4 | [iriscode] = generate_code(iris); 5 | 6 | iris_path = 'TODO'; 7 | load(iris_path); % Loads iriscodes, threshhold 8 | 9 | % Find iriscode with minimum hamming distance that is under threshhold. 10 | [n, ~] = size(iriscodes); 11 | match = 0; 12 | min_dist = inf; 13 | for i = 1:n 14 | cur_code = iriscodes(i, :); 15 | dist = hamming_distance(iriscode, cur_code); 16 | if dist < threshhold && dist < min_dist 17 | match = i; 18 | min_dist = dist; 19 | end 20 | end 21 | 22 | if match == 0 23 | sprintf('Match found.'); 24 | else 25 | sprintf('No match found.'); 26 | end 27 | -------------------------------------------------------------------------------- /matlab/segment_iris.m: -------------------------------------------------------------------------------- 1 | function segment_iris() 2 | %SEGMENT_IRIS Summary of this function goes here 3 | % Detailed explanation goes here 4 | % close all; 5 | % cd MMU 6 | % for i = 35:46 7 | % i 8 | % cd(int2str(i)); 9 | % cd left 10 | % files = dir('*.bmp'); 11 | % for j=1:size(files,1) 12 | % cd ../../.. 13 | % iris = localize_iris(strcat('MMU/',int2str(i),'/left/',files(j).name)); 14 | % cd MMU 15 | % cd(int2str(i)); 16 | % cd left 17 | % imwrite(iris,strcat(int2str(j),'.bmp')); 18 | % end 19 | % cd ../right 20 | % files = dir('*.bmp'); 21 | % for j=1:size(files,1) 22 | % cd ../../.. 23 | % iris = localize_iris(strcat('MMU/',int2str(i),'/right/',files(j).name)); 24 | % cd MMU 25 | % cd(int2str(i)); 26 | % cd right 27 | % imwrite(iris,strcat(int2str(j),'.bmp')); 28 | % end 29 | % cd ../../ 30 | % end 31 | % cd .. 32 | 33 | close all; 34 | cd MMU2 35 | files = dir('*.bmp'); 36 | for j=1:size(files,1) 37 | cd .. 38 | iris = localize_iris(strcat('MMU2/',files(j).name)); 39 | cd MMU2 40 | imwrite(iris,strcat(files(j).name,'r','.bmp')); 41 | end 42 | cd .. 43 | 44 | 45 | % localize_iris('MMU/2/right/bryanr1.bmp'); 46 | % localize_iris('MMU/2/right/bryanr2.bmp'); 47 | % localize_iris('MMU/2/right/bryanr3.bmp'); 48 | % localize_iris('MMU/2/right/bryanr4.bmp'); 49 | % localize_iris('MMU/2/right/bryanr5.bmp'); 50 | 51 | end 52 | 53 | -------------------------------------------------------------------------------- /matlab/test-andy/ImToPolar.m: -------------------------------------------------------------------------------- 1 | function imP = ImToPolar (imR, rMin, rMax, M, N) 2 | % IMTOPOLAR converts rectangular image to polar form. The output image is 3 | % an MxN image with M points along the r axis and N points along the theta 4 | % axis. The origin of the image is assumed to be at the center of the given 5 | % image. The image is assumed to be grayscale. 6 | % Bilinear interpolation is used to interpolate between points not exactly 7 | % in the image. 8 | % 9 | % rMin and rMax should be between 0 and 1 and rMin < rMax. r = 0 is the 10 | % center of the image and r = 1 is half the width or height of the image. 11 | % 12 | % V0.1 7 Dec 2007 (Created), Prakash Manandhar pmanandhar@umassd.edu 13 | 14 | [Mr Nr] = size(imR); % size of rectangular image 15 | Om = (Mr+1)/2; % co-ordinates of the center of the image 16 | On = (Nr+1)/2; 17 | sx = (Mr-1)/2; % scale factors 18 | sy = (Nr-1)/2; 19 | 20 | imP = zeros(M, N); 21 | 22 | delR = (rMax - rMin)/(M-1); 23 | delT = 2*pi/N; 24 | 25 | % loop in radius and 26 | for ri = 1:M 27 | for ti = 1:N 28 | r = rMin + (ri - 1)*delR; 29 | t = (ti - 1)*delT; 30 | x = r*cos(t); 31 | y = r*sin(t); 32 | xR = x*sx + Om; 33 | yR = y*sy + On; 34 | imP (ri, ti) = interpolate (imR, xR, yR); 35 | end 36 | end 37 | 38 | function v = interpolate (imR, xR, yR) 39 | xf = floor(xR); 40 | xc = ceil(xR); 41 | yf = floor(yR); 42 | yc = ceil(yR); 43 | if xf == xc & yc == yf 44 | v = imR (xc, yc); 45 | elseif xf == xc 46 | v = imR (xf, yf) + (yR - yf)*(imR (xf, yc) - imR (xf, yf)); 47 | elseif yf == yc 48 | v = imR (xf, yf) + (xR - xf)*(imR (xc, yf) - imR (xf, yf)); 49 | else 50 | A = [ xf yf xf*yf 1 51 | xf yc xf*yc 1 52 | xc yf xc*yf 1 53 | xc yc xc*yc 1 ]; 54 | r = [ imR(xf, yf) 55 | imR(xf, yc) 56 | imR(xc, yf) 57 | imR(xc, yc) ]; 58 | a = A\double(r); 59 | w = [xR yR xR*yR 1]; 60 | v = w*a; 61 | end 62 | -------------------------------------------------------------------------------- /matlab/test-andy/compute_features.m: -------------------------------------------------------------------------------- 1 | function feature_value = compute_features(iris) 2 | %COMPUTE_FEATURES Summary of this function goes here 3 | % Detailed explanation goes here 4 | 5 | %% METHOD 1 - Sums of 5x5 block pixel intensities averaged 6 | B = im2col(iris,[5,5]); 7 | feature_value = []; 8 | for i=1:size(B,2) 9 | feature_value = cat(2,feature_value,mean(B(:,i))); 10 | end 11 | 12 | %% METHOD 2 - Gradient Mag/Dir Histograms 13 | % [Gmag,Gdir] = imgradient(iris); 14 | % feature_value = zeros(1,10); 15 | % for row=1:size(iris,1) 16 | % for column=1:size(iris,2) 17 | % index = round((Gdir(row,column)+180)/40)+1; 18 | % feature_value(index) = feature_value(index) + Gmag(row,column); 19 | % end 20 | % end 21 | % feature_value = feature_value./norm(feature_value); 22 | 23 | %% METHOD 3 - Raw pixel intensities 24 | % feature_value = imresize(iris, [1 31*250]); 25 | 26 | end -------------------------------------------------------------------------------- /matlab/test-andy/data.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/data.mat -------------------------------------------------------------------------------- /matlab/test-andy/draw_ellipse.m: -------------------------------------------------------------------------------- 1 | function draw_ellipse(x,y,a,b,c) 2 | %DRAW_CIRCLE Draws a circle 3 | % x,y - center of circle 4 | % a - width of circle 5 | % b - height of circle 6 | % c - color of circle 7 | th = 0:pi/50:2*pi; 8 | xunit = a * cos(th) + x; 9 | yunit = b * sin(th) + y; 10 | hold on; 11 | plot(xunit, yunit, c); 12 | hold off; 13 | end 14 | -------------------------------------------------------------------------------- /matlab/test-andy/features_left.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/features_left.mat -------------------------------------------------------------------------------- /matlab/test-andy/features_right.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/features_right.mat -------------------------------------------------------------------------------- /matlab/test-andy/fit_circle.m: -------------------------------------------------------------------------------- 1 | unction fit_circle() 2 | %TEST Summary of this function goes here 3 | % Detailed explanation goes here 4 | close all; 5 | % I = imread('MMU2/470204.bmp'); 6 | % I = imread('MMU2/340202.bmp'); 7 | I = imread('MMU2/270202.bmp'); 8 | % I = imread('MMU2/470201.bmp'); 9 | % I = imread('MMU2/540204.bmp'); 10 | gray = rgb2gray(I); 11 | BW1 = ~im2bw(gray, 0.1); 12 | BW2 = ~bwareaopen(~bwareaopen(BW1,400),400); 13 | [centers,radii,metric] = imfindcircles(BW2,[10 300]); 14 | draw_circle(centers(1,1),centers(1,2),radii(1,1)); 15 | pupil_radius = radii(1,1); 16 | pupil_center = [centers(1,1) centers(1,2)]; 17 | 18 | 19 | BW3 = edge(gray,'canny',.1,5); 20 | BW4 = bwareaopen(BW3,10); 21 | figure,imshow(BW4); 22 | figure,imshow(gray); 23 | %[centers,radii,metric] = imfindcircles(BW4,[10 1000]); 24 | 25 | % shoot rays at ever 5 degrees outwards from pupil and stuff 26 | ray_buffer = pupil_radius+10; 27 | ray_pts = zeros(72,2); 28 | for i=1:72 29 | ray_dir = [cos(degtorad(5*i)) sin(degtorad(5*i))]; 30 | ray_dir = ray_dir/norm(ray_dir); 31 | hit_edge = 0; 32 | ray_pts(i,:) = pupil_center+ray_dir.*ray_buffer; 33 | while hit_edge == 0 && floor(ray_pts(i,2)) < size(BW4,1) && floor(ray_pts(i,1)) < size(BW4,2) 34 | ray_pts(i,:) = ray_pts(i,:)+ray_dir; 35 | hit_edge = BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2))+sign(ray_dir(2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))+sign(ray_dir(1))); 36 | end 37 | draw_circle(ray_pts(i,1),ray_pts(i,2),1); 38 | end 39 | draw_circle(pupil_center(1),pupil_center(2),1); 40 | % iris_radius = fit_best_circle([pupil_center(1) pupil_center(2)],ray_pts); 41 | % rad = 45; 42 | % draw_circle(pupil_center(1),pupil_center(2),rad); 43 | % reward = circle_fit([pupil_center(1) pupil_center(2)], rad, ray_pts); 44 | % reward 45 | iris_radius = fminbnd(@(rad) circle_fit([pupil_center(1) pupil_center(2)], rad, ray_pts), 20, 100) 46 | draw_circle(pupil_center(1),pupil_center(2),iris_radius); 47 | draw_circle(pupil_center(1),pupil_center(2),pupil_radius); 48 | 49 | 50 | 51 | % ray_coord = [floor(pupil_center(1)+pupil_radius+10),floor(pupil_center(2))] %10 pixel buffer 52 | % hit_edge = 0; 53 | % while hit_edge == 0 54 | % hit_edge = BW4(ray_coord(1),ray_coord(2)) || BW4(ray_coord(1)+1,ray_coord(2)) || BW4(ray_coord(1),ray_coord(2)+1); 55 | % ray_coord = [ray_coord(1)+1,ray_coord(2)+1]; 56 | % end 57 | % ray = distance(ray_coord,pupil_center); 58 | % figure,imshow(gray); 59 | % drawCircle(pupil_center(1),pupil_center(2),ray); 60 | 61 | end 62 | 63 | function draw_circle(x,y,r) 64 | th = 0:pi/50:2*pi; 65 | xunit = r * cos(th) + x; 66 | yunit = r * sin(th) + y; 67 | hold on; 68 | h = plot(xunit, yunit); 69 | hold off; 70 | end 71 | 72 | % exponential reward 73 | % function value = circle_fit(center, radius, projected_pts) 74 | % value = 0; 75 | % circle_pts = zeros(72,2); 76 | % for i=1:72 77 | % ray_dir = [cos(degtorad(5*i)) sin(degtorad(5*i))]; 78 | % ray_dir = ray_dir/norm(ray_dir); 79 | % circle_pts(i,:) = center+ray_dir.*radius; 80 | % distance = pdist(cat(1,circle_pts(i,:),projected_pts(i,:))); 81 | % value = value+nthroot(distance,100); 82 | % end 83 | % end 84 | 85 | % median reward 86 | function value = circle_fit(center, radius, projected_pts) 87 | distances = zeros(72,1); 88 | circle_pts = zeros(72,2); 89 | for i=1:72 90 | ray_dir = [cos(degtorad(5*i)) sin(degtorad(5*i))]; 91 | ray_dir = ray_dir/norm(ray_dir); 92 | circle_pts(i,:) = center+ray_dir.*radius; 93 | distance = pdist(cat(1,circle_pts(i,:),projected_pts(i,:))); 94 | distances(i,1) = round(distance); 95 | end 96 | value = median(distances); 97 | end -------------------------------------------------------------------------------- /matlab/test-andy/get_test.m: -------------------------------------------------------------------------------- 1 | function get_test() 2 | %GET_TRAINING Summary of this function goes here 3 | % Detailed explanation goes here 4 | close all; 5 | clear all; 6 | for i = 1:46 7 | if i == 4 || i == 35 8 | continue; 9 | end 10 | i 11 | files = dir(strcat('MMU/',int2str(i),'/left/*.bmp')); 12 | for j = 4:5 13 | iris_test_left{i+46*(j-4)} = segment_iris(strcat('MMU/',int2str(i),'/left/',files(j).name)); 14 | end 15 | 16 | files = dir(strcat('MMU/',int2str(i),'/right/*.bmp')); 17 | for j = 4:5 18 | iris_test_right{i+46*(j-4)} = segment_iris(strcat('MMU/',int2str(i),'/right/',files(j).name)); 19 | end 20 | end 21 | save('iris_test_left','iris_test_left'); 22 | save('iris_test_right','iris_test_right'); 23 | end 24 | 25 | -------------------------------------------------------------------------------- /matlab/test-andy/iris_data_left.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/iris_data_left.mat -------------------------------------------------------------------------------- /matlab/test-andy/iris_data_right.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/iris_data_right.mat -------------------------------------------------------------------------------- /matlab/test-andy/iris_test_left.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/iris_test_left.mat -------------------------------------------------------------------------------- /matlab/test-andy/iris_test_right.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/iris_test_right.mat -------------------------------------------------------------------------------- /matlab/test-andy/learn_iris.m: -------------------------------------------------------------------------------- 1 | %% RUN THIS SCRIPT TO TRAIN IRIS 2 | % Computes mean over a specific number of training images 3 | 4 | %% Left Eye 5 | close all; 6 | clear all; 7 | num_training = 3; 8 | for i = 1:46 9 | if i == 4 || i == 35 10 | iris_data_left{i} = []; 11 | continue; 12 | end 13 | files = dir(strcat('MMU/',int2str(i),'/left/*.bmp')); 14 | for j = 1:num_training 15 | strcat('MMU/',int2str(i),'/left/',files(j).name) 16 | iris{j} = segment_iris(strcat('MMU/',int2str(i),'/left/',files(j).name)); 17 | end 18 | zero_counts = ones(31,250).*3; 19 | for j = 1:num_training 20 | tmp_iris = iris{j}; 21 | zero_counts(tmp_iris == 0) = zero_counts(tmp_iris == 0)-1; 22 | end 23 | sum_iris = zeros(31,250); 24 | for j=1:num_training 25 | sum_iris = iris{j} + sum_iris; 26 | end 27 | sum_iris = sum_iris./zero_counts; 28 | sum_iris(isnan(sum_iris)) = 0; 29 | iris_data_left{i} = sum_iris; 30 | end 31 | save('iris_data_left','iris_data_left'); 32 | 33 | 34 | 35 | %% Right Eye 36 | close all; 37 | clear all; 38 | num_training = 3; 39 | for i = 1:46 40 | if i == 4 || i == 35 41 | iris_data_right{i} = []; 42 | continue; 43 | end 44 | files = dir(strcat('MMU/',int2str(i),'/right/*.bmp')); 45 | for j = 1:num_training 46 | strcat('MMU/',int2str(i),'/right/',files(j).name) 47 | iris{j} = segment_iris(strcat('MMU/',int2str(i),'/right/',files(j).name)); 48 | end 49 | zero_counts = ones(31,250).*3; 50 | for j = 1:num_training 51 | tmp_iris = iris{j}; 52 | zero_counts(tmp_iris == 0) = zero_counts(tmp_iris == 0)-1; 53 | end 54 | sum_iris = zeros(31,250); 55 | for j=1:num_training 56 | sum_iris = iris{j} + sum_iris; 57 | end 58 | sum_iris = sum_iris./zero_counts; 59 | sum_iris(isnan(sum_iris)) = 0; 60 | iris_data_right{i} = sum_iris; 61 | end 62 | save('iris_data_right','iris_data_right'); -------------------------------------------------------------------------------- /matlab/test-andy/localize_iris.m: -------------------------------------------------------------------------------- 1 | function iris = localize_iris(filename) 2 | %LOCALIZE_IRIS find pupil and iris in the given image 3 | % ARGUMENTS 4 | % filename - location of image file 5 | % RETURNS 1x5 array of information on the located eye 6 | % eye_information(1:2) - center of pupil 7 | % eye_information(3) - radius of pupil 8 | % eye_information(4:5) = [a b] - radii of detected iris ellipse 9 | 10 | %% CHANGE IMAGE HERE FOR TESTING 11 | 12 | I = imread(filename); 13 | % I = imread('MMU2/470204.bmp'); 14 | % I = imread('MMU2/340202.bmp'); 15 | % I = imread('MMU2/270202.bmp'); 16 | % I = imread('MMU2/470201.bmp'); 17 | % % I = imread('MMU2/540204.bmp'); 18 | % I = imread('MMU/3/left/540204.bmp'); 19 | 20 | %% FIND PUPIL 21 | gray = rgb2gray(I); 22 | BW1 = ~im2bw(gray, 0.15); 23 | % figure,imshow(BW1); 24 | BW2 = ~bwareaopen(~bwareaopen(BW1,400),400); 25 | % figure,imshow(BW2); 26 | [centers,radii,metric] = imfindcircles(BW2,[10 40]); 27 | if size(centers) == 0 28 | iris = []; 29 | return 30 | end 31 | pupil_radius = radii(1,1); 32 | pupil_center = [centers(1,1) centers(1,2)]; 33 | 34 | 35 | %% FIND IRIS 36 | % Find edges 37 | BW3 = edge(gray,'canny',.1,2); 38 | BW4 = bwareaopen(BW3,10); 39 | figure,imshow(gray); 40 | 41 | % Shoot rays at every degree outwards from pupil and stuff 42 | ray_buffer = pupil_radius+10; 43 | ray_pts = zeros(360,2); 44 | for i=1:360 45 | if (i>45 && i<=135) || (i>180 && i<=360) % Only shoot rays between 1-45 and 135-180 46 | continue; 47 | end 48 | ray_dir = [cos(degtorad(i)) sin(degtorad(i))]; 49 | ray_dir = ray_dir/norm(ray_dir); 50 | hit_edge = 0; 51 | ray_pts(i,:) = pupil_center+ray_dir.*ray_buffer; 52 | while hit_edge == 0 && floor(ray_pts(i,2)) < size(BW4,1)-1 && floor(ray_pts(i,1)) < size(BW4,2)-1 53 | ray_pts(i,:) = ray_pts(i,:)+ray_dir; 54 | hit_edge = BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2))+sign(ray_dir(2)),floor(ray_pts(i,1))) || BW4(floor(ray_pts(i,2)),floor(ray_pts(i,1))+sign(ray_dir(1))); 55 | end 56 | draw_ellipse(ray_pts(i,1),ray_pts(i,2),1,1,'b'); 57 | end 58 | draw_ellipse(pupil_center(1),pupil_center(2),1,1,'b'); 59 | draw_ellipse(pupil_center(1),pupil_center(2),pupil_radius,pupil_radius,'b'); 60 | 61 | %% FIT ELLIPSE 62 | iris_radii = fminsearch(@(radii) ellipse_fit([pupil_center(1) pupil_center(2)], radii, ray_pts), [50 50]); 63 | draw_ellipse(pupil_center(1),pupil_center(2),iris_radii(1),iris_radii(2),'b'); 64 | eye_information = horzcat(pupil_center,pupil_radius,iris_radii); 65 | 66 | %% TRANSFORM CIRCLE 67 | iris_ellipse = gray((pupil_center(2)-ceil(iris_radii(2))):(pupil_center(2)+ceil(iris_radii(2))),(pupil_center(1)-ceil(iris_radii(1))):(pupil_center(1)+ceil(iris_radii(1)))); 68 | iris_circle = imresize(iris_ellipse, [size(iris_ellipse,1) size(iris_ellipse,1)]); 69 | 70 | %% RECTANGULARIZE 71 | iris_circle = double(iris_circle)/255.0; 72 | try 73 | iris_rectangle = ImToPolar(iris_circle,pupil_radius/iris_radii(2),1,40,250); 74 | catch 75 | iris = []; 76 | return 77 | end 78 | iris = iris_rectangle(5:35,:); 79 | % iris = iris_rectangle; 80 | 81 | % figure,imshow(iris); 82 | 83 | %% Filter noise 84 | % iris_line = iris(17,:); 85 | % iris_shade = median(iris_line); 86 | % filtered_rectangle = xor(im2bw(iris, iris_shade-0.25*iris_shade),im2bw(iris, iris_shade+0.25*(1-iris_shade))); 87 | % filtered_rectangle = ~bwareaopen(~filtered_rectangle, 100); 88 | % figure,imshow(filtered_rectangle); 89 | % iris = min(iris, filtered_rectangle); 90 | % figure,imshow(iris) 91 | end 92 | 93 | 94 | 95 | % sum reward function 96 | function value = ellipse_fit(center, radii, projected_pts) 97 | distances = zeros(360,1); 98 | ellipse_pts = zeros(360,2); 99 | % Shoot rays at every degree outwards from the center but make it an ellipse given [a b]=radii 100 | % then at each degree ray shot out, compute distance from the same ray shot out 101 | % from the original image, but stopped when it hit an edge 102 | % essentially comparing an ellipse of known parameters to the pts found 103 | % from the rays 104 | % This function, with fminsearch, returns the radii of an ellipse that 105 | % gives the smallest sum distances between the pts 106 | % SSD gives worse performance 107 | for i=1:360 108 | if (i>45 && i<=135) || (i>180 && i<=360) 109 | continue; 110 | end 111 | ray_dir = [cos(degtorad(i)) sin(degtorad(i))]; 112 | ray_dir = ray_dir/norm(ray_dir); 113 | ellipse_pts(i,:) = [center(1)+ray_dir(1)*radii(1) center(2)+ray_dir(2)*radii(2)]; 114 | ray_distance = pdist(cat(1,center,projected_pts(i,:))); 115 | if ray_distance > 40.0 && ray_distance < 70.0 116 | distance = pdist(cat(1,ellipse_pts(i,:),projected_pts(i,:))); 117 | distances = cat(1,distances,distance.^2); 118 | end 119 | end 120 | % distances = sort(distances); 121 | % value = sum(distances(23:67)); 122 | value = sum(distances); 123 | end -------------------------------------------------------------------------------- /matlab/test-andy/match_distance.m: -------------------------------------------------------------------------------- 1 | function distance = match_distance(features_iris1, features_iris2) 2 | %MATCH_DISTANCE Summary of this function goes here 3 | % Detailed explanation goes here 4 | 5 | %% Set all noisy areas (eyelashes/skin) to 0 6 | % iris1(iris2==0) = 0; 7 | % iris2(iris1==0) = 0; 8 | 9 | %% Compute distance between feature vectors (sum differences) 10 | distance = sum(abs(features_iris1 - features_iris2)); 11 | 12 | end -------------------------------------------------------------------------------- /matlab/test-andy/match_iris.m: -------------------------------------------------------------------------------- 1 | function [is_left_eye,curr_id] = match_iris(iris) 2 | %MATCH_IRIS Summary of this function goes here 3 | % Detailed explanation goes here 4 | test_iris = compute_features(iris); 5 | load('features_left'); 6 | load('features_right'); 7 | is_left_eye = 0; 8 | curr_id = -1; 9 | curr_comparison_value = inf; 10 | for i=1:46 11 | if i == 4 || i == 35 12 | continue; 13 | end 14 | comparison_value = match_distance(test_iris, features_left{i}); 15 | if comparison_value < curr_comparison_value 16 | curr_comparison_value = comparison_value; 17 | curr_id = i; 18 | is_left_eye = 1; 19 | end 20 | comparison_value = match_distance(test_iris, features_right{i}); 21 | if comparison_value < curr_comparison_value 22 | curr_comparison_value = comparison_value; 23 | curr_id = i; 24 | is_left_eye = 0; 25 | end 26 | end 27 | if curr_id == -1 28 | fprintf('Iris rejected.') 29 | end 30 | end -------------------------------------------------------------------------------- /matlab/test-andy/matlab.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/matlab/test-andy/matlab.mat -------------------------------------------------------------------------------- /matlab/test-andy/segment_iris.m: -------------------------------------------------------------------------------- 1 | function iris = segment_iris(filename) 2 | %SEGMENT_IRIS Summary of this function goes here 3 | % Detailed explanation goes here 4 | close all 5 | % iris1 = localize_iris('MMU/2/right/bryanr1.bmp'); 6 | % iris2 = localize_iris('MMU/2/right/bryanr2.bmp'); 7 | % iris3 = localize_iris('MMU/2/right/bryanr3.bmp'); 8 | % iris4 = localize_iris('MMU/2/right/bryanr4.bmp'); 9 | % iris5 = localize_iris('MMU/2/right/bryanr5.bmp'); 10 | % value = pdist2(iris1,iris2,'manhalanobis') 11 | 12 | iris = localize_iris(filename); 13 | figure,imshow(iris); 14 | iris_line = iris(15,:); 15 | iris_shade = median(iris_line); 16 | filtered_rectangle = xor(im2bw(iris, iris_shade-0.2*iris_shade),im2bw(iris, iris_shade+0.2*(1-iris_shade))); 17 | filtered_rectangle = ~bwareaopen(~filtered_rectangle, 100); 18 | % figure,imshow(filtered_rectangle); 19 | iris = min(iris, filtered_rectangle); 20 | % figure,imshow(iris); 21 | 22 | %% MMU 23 | % close all; 24 | % cd MMU 25 | % for i = 35:46 26 | % i 27 | % cd(int2str(i)); 28 | % cd left 29 | % files = dir('*.bmp'); 30 | % for j=1:size(files,1) 31 | % cd ../../.. 32 | % iris = localize_iris(strcat('MMU/',int2str(i),'/left/',files(j).name)); 33 | % cd MMU 34 | % cd(int2str(i)); 35 | % cd left 36 | % imwrite(iris,strcat(int2str(j),'.bmp')); 37 | % end 38 | % cd ../right 39 | % files = dir('*.bmp'); 40 | % for j=1:size(files,1) 41 | % cd ../../.. 42 | % iris = localize_iris(strcat('MMU/',int2str(i),'/right/',files(j).name)); 43 | % cd MMU 44 | % cd(int2str(i)); 45 | % cd right 46 | % imwrite(iris,strcat(int2str(j),'.bmp')); 47 | % end 48 | % cd ../../ 49 | % end 50 | % cd .. 51 | 52 | %% MMU2 53 | % close all 54 | % cd MMU2 55 | % for i=6:100 56 | % for j=1:2 57 | % for k=1:5 58 | % sprintf('MMU2/%0.2d%0.2d%0.2d.bmp',i,j,k) 59 | % if (str2num(sprintf('%0.2d%0.2d%0.2d',i,j,k)) > 990205) 60 | % cd .. 61 | % iris = localize_iris(sprintf('MMU2/%0.2d%0.2d%0.2d.bmp',i,j,k)); 62 | % cd MMU2 63 | % if isempty(iris) 64 | % continue 65 | % end 66 | % imwrite(iris,sprintf('%0.2d%0.2d%0.2dr.bmp',i,j,k)); 67 | % end 68 | % end 69 | % end 70 | % end 71 | % cd .. 72 | 73 | 74 | % localize_iris('MMU/2/right/bryanr1.bmp'); 75 | % localize_iris('MMU/2/right/bryanr2.bmp'); 76 | % localize_iris('MMU/2/right/bryanr3.bmp'); 77 | % localize_iris('MMU/2/right/bryanr4.bmp'); 78 | % localize_iris('MMU/2/right/bryanr5.bmp'); 79 | 80 | end 81 | 82 | -------------------------------------------------------------------------------- /matlab/test-andy/test.m: -------------------------------------------------------------------------------- 1 | close all; 2 | clear all; 3 | num_correct = 0; 4 | total_samples = 0; 5 | load('iris_test_left'); 6 | load('iris_test_right'); 7 | for i = 1:46 8 | if i == 4 || i == 35 9 | continue; 10 | end 11 | files = dir(strcat('MMU/',int2str(i),'/left/*.bmp')); 12 | for j = 4:5 13 | total_samples = total_samples + 1; 14 | [is_left id] = match_iris(iris_test_left{i+46*(j-4)}); 15 | if is_left == 1 16 | fprintf('This is the left iris of subject %d.\n', id); 17 | else 18 | fprintf('This is the right iris of subject %d.\n', id); 19 | end 20 | if is_left == 1 && id == i 21 | num_correct = num_correct + 1; 22 | end 23 | end 24 | files = dir(strcat('MMU/',int2str(i),'/right/*.bmp')); 25 | for j = 4:5 26 | total_samples = total_samples + 1; 27 | [is_left id] = match_iris(iris_test_right{i+46*(j-4)}); 28 | if is_left == 1 29 | fprintf('This is the left iris of subject %d.\n', id); 30 | else 31 | fprintf('This is the right iris of subject %d.\n', id); 32 | end 33 | if is_left == 0 && id == i 34 | num_correct = num_correct + 1; 35 | end 36 | end 37 | end 38 | fprintf('Accuracy: %f\n',(num_correct/total_samples)); -------------------------------------------------------------------------------- /matlab/test-andy/train.m: -------------------------------------------------------------------------------- 1 | close all; 2 | clear all; 3 | load('iris_data_left'); 4 | load('iris_data_right'); 5 | for i = 1:46 6 | if i == 4 || i == 35 7 | continue; 8 | end 9 | features_left{i} = compute_features(iris_data_left{i}); 10 | features_right{i} = compute_features(iris_data_right{i}); 11 | end 12 | save('features_left','features_left'); 13 | save('features_right','features_right'); -------------------------------------------------------------------------------- /matlab/theta_cost.m: -------------------------------------------------------------------------------- 1 | function [cost] = theta_cost(dist, theta) 2 | 3 | if dist <= theta 4 | cost = 1; 5 | else 6 | cost = -1; 7 | end 8 | -------------------------------------------------------------------------------- /proposal.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/proposal.pdf -------------------------------------------------------------------------------- /py/benchmark.py: -------------------------------------------------------------------------------- 1 | from features import * 2 | from svm import * 3 | from randomforest import * 4 | from nb import * 5 | from scipy.io import loadmat 6 | 7 | def benchmark(model, train, test): 8 | if not model._trained: 9 | print "Training" 10 | model.train(train) 11 | print "Training error: ", 12 | train_results = model.benchmark(train) 13 | print train_results[0] 14 | print "Testing error: ", 15 | test_results = model.benchmark(test) 16 | print test_results[0] 17 | 18 | if __name__ == "__main__": 19 | data = loadmat('../MMU/data.mat') 20 | features = set(['daisy']) 21 | collection = DataExtractor(FeatureExtractor(features)).extract(data['left']) 22 | test, train = collection.split_index(np.arange(0, len(collection), 3)) 23 | models = { "SVM" : SVM(), "Random Forest" : RandomForest(n_estimators=100), "Naive Bayes" : NaiveBayes()} 24 | models = { "Random Forest" : RandomForest(n_estimators=500,criterion='entropy'), "Naive Bayes" : NaiveBayes()} 25 | models = { "SVM" : SVM(), "Random Forest" : RandomForest(n_estimators=100), "Naive Bayes" : NaiveBayes()} 26 | print "Left" 27 | for name, model in models.items(): 28 | print "Model:", name 29 | benchmark(model, train, test) 30 | 31 | collection = DataExtractor(FeatureExtractor(features)).extract(data['right']) 32 | test, train = collection.split_index(np.arange(0, len(collection), 3)) 33 | models = { "SVM" : SVM(), "Random Forest" : RandomForest(n_estimators=100), "Naive Bayes" : NaiveBayes()} 34 | models = { "Random Forest" : RandomForest(n_estimators=500, criterion='entropy'), "Naive Bayes" : NaiveBayes()} 35 | models = { "SVM" : SVM(), "Random Forest" : RandomForest(n_estimators=100), "Naive Bayes" : NaiveBayes()} 36 | print "Right" 37 | for name, model in models.items(): 38 | print "Model:", name 39 | benchmark(model, train, test) 40 | -------------------------------------------------------------------------------- /py/data.py: -------------------------------------------------------------------------------- 1 | from skimage.data import imread 2 | from skimage.util import img_as_ubyte 3 | import glob 4 | 5 | class Dataset: 6 | def __init__(self, dir, suffix='.bmp'): 7 | self.dir = dir 8 | self.suffix = suffix 9 | self.images = [] 10 | for name in glob.glob("%s/*%s" % (dir, suffix)): 11 | self.images.append(name) 12 | 13 | def read(self, name, flatten=True): 14 | return img_as_ubyte(imread(name, flatten)) 15 | -------------------------------------------------------------------------------- /py/features.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from scipy.io import loadmat 4 | from skimage.feature import hog, local_binary_pattern, daisy 5 | from progressbar import ProgressBar 6 | 7 | class FeatureExtractor(): 8 | def __init__(self, features): 9 | self.features = features 10 | self.centers = np.loadtxt('../centers.txt') 11 | 12 | def extract(self, image): 13 | features = np.array([]) 14 | vec = [] 15 | if 'raw' in self.features: 16 | vec = image.flatten() 17 | features = np.append(features, vec) 18 | vec = [] 19 | if 'textons' in self.features: 20 | import gen_histogram as tx 21 | vec = np.array(tx.histogram(image, self.centers)) 22 | features = np.append(features, vec) 23 | vec = [] 24 | if 'hog' in self.features: 25 | vec = hog(image, cells_per_block=(3, 3)) 26 | vec = np.append(vec, hog(image, cells_per_block=(4, 4))) 27 | vec = np.append(vec, hog(image, cells_per_block=(1, 1))) 28 | vec = np.append(vec, hog(image, cells_per_block=(2, 2))) 29 | features = np.append(features, vec) 30 | vec = [] 31 | if 'lbp' in self.features: 32 | vec = local_binary_pattern(image, 24, 3).flatten() 33 | features = np.append(features, vec) 34 | vec = [] 35 | if 'daisy' in self.features: 36 | vec = daisy(image).flatten() 37 | features = np.append(features, vec) 38 | 39 | return features 40 | 41 | class Collection: 42 | def __init__(self, x, y): 43 | idx = np.argsort(y) 44 | self.x = x[idx] 45 | self.y = y[idx] 46 | 47 | def partition(self, index): 48 | return Collection(self.x[0:index], self.y[0:index]), Collection(self.x[index:], self.y[index:]) 49 | 50 | def split_index(self, range): 51 | right_idx = np.setdiff1d(np.arange(0, len(self.x)), range) 52 | return Collection(self.x[range], self.y[range]), Collection(self.x[right_idx], self.y[right_idx]) 53 | 54 | def __len__(self): 55 | return len(self.x) 56 | 57 | class DataExtractor(): 58 | def __init__(self, fe): 59 | self.fe = fe 60 | def extract(self, data): 61 | x = None 62 | y = None 63 | a = None 64 | print "Extracting features..." 65 | count = 0 66 | pbar = ProgressBar(maxval=len(data['x'][0,0])).start() 67 | for img, label in zip(data['x'][0,0], data['y'][0,0]): 68 | if x == None: 69 | x = self.fe.extract(img) 70 | a = [img] 71 | else: 72 | x = np.vstack([x, self.fe.extract(img)]) 73 | a = np.vstack([a, [img]]) 74 | if y == None: 75 | y = label 76 | else: 77 | y = np.hstack([y, label]) 78 | count += 1 79 | pbar.update(count) 80 | #print "%d of %d" % (count, len(data['x'][0,0])) 81 | pbar.finish() 82 | return Collection(x, y) 83 | 84 | if __name__ == "__main__": 85 | data = loadmat('../MMU/data.mat') 86 | fe = FeatureExtractor({}) 87 | de = DataExtractor(fe) 88 | mat = de.extract(data['left']) 89 | from svm import * 90 | s = SVM('sample.model') 91 | results = s.benchmark(mat) 92 | -------------------------------------------------------------------------------- /py/iris.py: -------------------------------------------------------------------------------- 1 | import skimage.filter as filter 2 | from skimage.io import imshow 3 | from data import Dataset 4 | from skimage.draw import circle_perimeter, ellipse_perimeter 5 | from pupil import find_pupil 6 | 7 | from math import cos, sin, pi 8 | import numpy as np 9 | 10 | class Ray: 11 | def __init__(self, img, start, direction, n_size=1): 12 | self.img = img 13 | self.start = start 14 | self.direction = direction 15 | self.n_size = n_size 16 | 17 | def fire(self): 18 | point = self.start 19 | x1, y1 = self.img.shape 20 | while point[0] > self.n_size and point[0] < x1 - self.n_size \ 21 | and point[1] > self.n_size and point[1] < y1 - self.n_size: 22 | point = point[0] + self.direction[0], point[1] + self.direction[1] 23 | if self.check_neighbourhood(point, self.n_size): 24 | return point 25 | return None 26 | 27 | def check_neighbourhood(self, point, size=1): 28 | x, y = map(int, point) 29 | for i in range(x - size, x + size): 30 | for j in range(y - size, y + size): 31 | if (self.img[i, j] > 0): 32 | return True 33 | return False 34 | 35 | class Ellipse: 36 | def __init__(self): 37 | self._center = None 38 | self._axes = None 39 | self._orientation = None 40 | self._coeff = None 41 | 42 | def fit_with_center(self, center, points): 43 | if center != None: 44 | self._center = center 45 | mat = reduce(lambda x, y: np.vstack([x, np.array([y[0]**2, y[0]*y[1], y[1]**2, y[0], y[1], 1])]), points, np.zeros((1, 6)))[1:,:] 46 | # so i shamelessly stole some ellipse fitting code from a blog post as a temporary hack. we can do better since we already know the ellipse centers though. also we probably want to do something that's not exactly least squares, since we have a lot of outliers. 47 | # solving the least squares problem 48 | A = np.dot(mat.T, mat) 49 | C = np.zeros((6, 6)) 50 | C[0,2] = C[2,0] = 2; C[1,1] = -1 51 | e, v = np.linalg.eig(np.dot(np.linalg.inv(A), C)) 52 | idx = np.argmax(np.abs(e)) 53 | a = v[:, idx] 54 | self._coeff = a 55 | return self 56 | 57 | @property 58 | def center(self): 59 | if self._center == None: 60 | b,c,d,f,g,a = self._coeff[1]/2, self._coeff[2], self._coeff[3]/2, self._coeff[4]/2, self._coeff[5], self._coeff[0] 61 | num = b*b-a*c 62 | x0=(c*d-b*f)/num 63 | y0=(a*f-b*d)/num 64 | self._center = np.array([x0, y0]) 65 | return self._center 66 | 67 | @property 68 | def axes(self): 69 | if self._axes == None: 70 | b,c,d,f,g,a = self._coeff[1]/2, self._coeff[2], self._coeff[3]/2, self._coeff[4]/2, self._coeff[5], self._coeff[0] 71 | up = 2*(a*f*f+c*d*d+g*b*b-2*b*d*f-a*c*g) 72 | down1=(b*b-a*c)*( (c-a)*np.sqrt(1+4*b*b/((a-c)*(a-c)))-(c+a)) 73 | down2=(b*b-a*c)*( (a-c)*np.sqrt(1+4*b*b/((a-c)*(a-c)))-(c+a)) 74 | res1=np.sqrt(up/down1) 75 | res2=np.sqrt(up/down2) 76 | return np.array([res1, res2]) 77 | return self._axes 78 | 79 | @property 80 | def orientation(self): 81 | if self._orientation == None: 82 | b,c,d,f,g,a = self._coeff[1]/2, self._coeff[2], self._coeff[3]/2, self._coeff[4]/2, self._coeff[5], self._coeff[0] 83 | self._orientation = 0.5*np.arctan(2*b/(a-c)) 84 | return self._orientation 85 | 86 | def chunks(l, n): 87 | for i in range(0, len(l), n): 88 | yield l[i:i+n] 89 | 90 | def get_segments(num_rays, step=0.1): 91 | return list(chunks(np.arange(0, 2*pi, step), num_rays)) 92 | 93 | def find_iris(image, pupil, **kwargs): 94 | buffer = 20 95 | # run canny 96 | image = filter.canny(image, sigma=1, low_threshold=10, high_threshold=50) 97 | cx, cy, radius = pupil 98 | 99 | segments = get_segments(17, step=0.05) 100 | # get ray directions 101 | directions = zip(map(cos, segments[0]), map(sin, segments[0])) 102 | shape = image.shape 103 | points = [] 104 | for d in directions: 105 | start = (cx + (radius + buffer) * d[0], cy + (radius + buffer)*d[1]) 106 | ray = Ray(image, start, d) 107 | point = ray.fire() 108 | if point != None: 109 | points.append(point) 110 | 111 | for p in points: 112 | x, y = circle_perimeter(int(p[0]), int(p[1]), 3) 113 | rgb[x,y] = (220, 40, 40) 114 | 115 | e = Ellipse().fit_with_center(None, points) 116 | return image, points, e 117 | 118 | rgb = None 119 | def detect(d, i,**kwargs): 120 | image = d.read(d.images[i]) 121 | global rgb 122 | rgb = d.read(d.images[i], flatten=False) 123 | pupil = find_pupil(image)[0] 124 | img, points, ellipse = find_iris(image, pupil, **kwargs) 125 | x, y = circle_perimeter(pupil[0], pupil[1], pupil[2]) 126 | rgb[x,y] = (220, 40, 40) 127 | ex, ey = ellipse.center 128 | major, minor = ellipse.axes 129 | orientation = ellipse.orientation 130 | x, y = ellipse_perimeter(int(ex), int(ey), int(major), int(minor), orientation) 131 | rgb[x,y] = (220, 40, 40) 132 | imshow(rgb) 133 | 134 | if __name__ == "__main__": 135 | d = Dataset('../data') 136 | detect(d, 0) 137 | -------------------------------------------------------------------------------- /py/pupil.py: -------------------------------------------------------------------------------- 1 | import skimage 2 | from skimage import io 3 | io.use_plugin('gtk') 4 | import skimage.filter as filter 5 | from skimage.io import imshow 6 | from skimage.transform import hough_circle 7 | from skimage.feature import peak_local_max 8 | from skimage.draw import circle_perimeter 9 | from skimage.morphology import disk 10 | from data import Dataset 11 | import numpy as np 12 | 13 | def find_pupil(image, threshold=25): 14 | image = image < threshold 15 | #image = filter.canny(image, sigma=2, low_threshold=10, high_threshold=50) 16 | return find_circles(image, np.arange(15, 40, 2))[0:1] 17 | 18 | def find_circles(image, input_radii): 19 | result = hough_circle(image, input_radii) 20 | centers = [] 21 | accums = [] 22 | radii = [] 23 | for radius, h in zip(input_radii, result): 24 | # For each radius, extract two circles 25 | peaks = peak_local_max(h, num_peaks=2) 26 | centers.extend(peaks) 27 | accums.extend(h[peaks[:, 0], peaks[:, 1]]) 28 | radii.extend([radius, radius]) 29 | circles = [] 30 | for idx in np.argsort(accums)[::-1][:5]: 31 | center_x, center_y = centers[idx] 32 | radius = radii[idx] 33 | circles.append((center_x, center_y, radius)) 34 | return circles 35 | -------------------------------------------------------------------------------- /py/svm.py: -------------------------------------------------------------------------------- 1 | from train import Classifier 2 | #import liblinearutil as svm 3 | from sklearn import svm 4 | import numpy as np 5 | 6 | #class SVM(Classifier): 7 | #def __init__(self, model=None): 8 | #if model: 9 | #self.model = svm.load_model(model) 10 | #self._trained = True 11 | #else: 12 | #self.model = None 13 | #self._trained = False 14 | 15 | #def train(self, data): 16 | #self.model = svm.train(data.y.tolist(), data.x.tolist(), '-s 4') 17 | #self._trained = True 18 | 19 | #def predict(self, vector): 20 | #return svm.predict([-1], [vector.tolist()], self.model)[0][0] 21 | 22 | #def benchmark(self, data): 23 | #predictions, accuracy, weights = svm.predict(data.y.tolist(), data.x.tolist(), self.model) 24 | #return (1 - accuracy/100, np.array(predictions)==data.y, predictions) 25 | class SVM(Classifier): 26 | def __init__(self, model=None): 27 | self.model = None 28 | self._trained = False 29 | 30 | def train(self, data): 31 | self.model = svm.LinearSVC() 32 | self.model.fit(data.x, data.y) 33 | self._trained = True 34 | 35 | def predict(self, vector): 36 | return self.model.predict(vector) 37 | -------------------------------------------------------------------------------- /py/test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | from skimage import data, filter, color 5 | from skimage.transform import hough_circle 6 | from skimage.feature import peak_local_max 7 | from skimage.draw import circle_perimeter 8 | from skimage.util import img_as_ubyte 9 | from data import Dataset 10 | 11 | 12 | # Load picture and detect edges 13 | d = Dataset('data') 14 | image = d.read(d.images[0]) 15 | edges = filter.canny(image, sigma=2, low_threshold=10, high_threshold=50) 16 | 17 | fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(6, 6)) 18 | 19 | # Detect two radii 20 | hough_radii = np.arange(15, 30, 2) 21 | hough_res = hough_circle(edges, hough_radii) 22 | 23 | centers = [] 24 | accums = [] 25 | radii = [] 26 | 27 | for radius, h in zip(hough_radii, hough_res): 28 | # For each radius, extract two circles 29 | peaks = peak_local_max(h, num_peaks=2) 30 | centers.extend(peaks) 31 | accums.extend(h[peaks[:, 0], peaks[:, 1]]) 32 | radii.extend([radius, radius]) 33 | 34 | # Draw the most prominent 5 circles 35 | image = color.gray2rgb(image) 36 | for idx in np.argsort(accums)[::-1][:1]: 37 | center_x, center_y = centers[idx] 38 | radius = radii[idx] 39 | cx, cy = circle_perimeter(center_y, center_x, radius) 40 | image[cy, cx] = (220, 20, 20) 41 | 42 | ax.imshow(image, cmap=plt.cm.gray) 43 | plt.show() 44 | -------------------------------------------------------------------------------- /py/train.py: -------------------------------------------------------------------------------- 1 | class Classifier: 2 | def __init__(self, params): 3 | self.params = params 4 | 5 | def train(self, data): 6 | raise NotImplementedError 7 | 8 | def predict(self, vector): 9 | raise NotImplementedError 10 | 11 | def benchmark(self, mat): 12 | predictions = [] 13 | correct = [] 14 | for i in range(len(mat.x)): 15 | prediction = self.predict(mat.x[i]) 16 | correct.append(prediction == mat.y[i]) 17 | predictions.append(prediction) 18 | return (1 - (sum(correct) + 0.0)/len(correct))[0], map(lambda x : x[0], correct), predictions 19 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Cython==0.19.2 2 | GnuPGInterface==0.3.2 3 | Jinja2==2.6 4 | Mako==0.5.0 5 | MarkupSafe==0.15 6 | PAM==0.4.2 7 | PIL==1.1.7 8 | Pygments==1.4 9 | Pyste==0.9.10 10 | Sphinx==1.1.3 11 | argparse==1.2.1 12 | chardet==2.0.1 13 | configobj==4.7.2 14 | decorator==3.3.2 15 | defer==1.0.2 16 | dirspec==3.0.0 17 | distribute==0.6.24 18 | docutils==0.8.1 19 | epydoc==3.0.1 20 | ipython==0.12.1 21 | lxml==2.3.2 22 | matplotlib==1.1.1rc 23 | naturalscrolling==0.7.0 24 | numpy==1.7.1 25 | pexpect==2.3 26 | pycups==1.9.61 27 | pycurl==7.19.0 28 | pyinotify==0.9.2 29 | pyparsing==1.5.2 30 | pysmbc==1.0.13 31 | python-apt==0.8.3ubuntu7.1 32 | python-dateutil==1.5 33 | python-debian==0.1.21ubuntu1 34 | pytz==2011k 35 | pyudev==0.13 36 | pyxdg==0.19 37 | rhythmbox-ubuntuone==4.2.0 38 | scikit-image==0.9.3 39 | scipy==0.9.0 40 | sessioninstaller==0.0.0 41 | simplegeneric==0.7 42 | stevedore==0.12 43 | system-service==0.1.6 44 | traits==4.0.0 45 | unattended-upgrades==0.1 46 | unity-lens-video==0.3.5 47 | unity-scope-video-remote==0.3.5 48 | virtualenv==1.7.1.2 49 | virtualenv-clone==0.2.4 50 | virtualenvwrapper==4.1.1 51 | wsgiref==0.1.2 52 | wxPython==2.8.12.1 53 | wxPython-common==2.8.12.1 54 | xlwt==0.7.2 55 | -------------------------------------------------------------------------------- /subm/Makefile: -------------------------------------------------------------------------------- 1 | RCSFLAGS= -l 2 | MAIN= paper 3 | TEXPARTS= *.tex 4 | BIBPARTS= *.bib 5 | LPR = pspr # or change to 'qpr -q ps3' 6 | POSTSCRIPT= dvips -f 7 | FIGPARTS= FIG/*.eps FIG/*.pdf FIG/*.fig FIG/*.jpg FIG/*.png 8 | STYPARTS= psfig.sty sig-alternate-sigmod06.cls 9 | MISC= makefile README dotest 10 | LATEX= pdflatex 11 | 12 | $(MAIN).pdf: $(MAIN).tex $(TEXPARTS) $(BIBPARTS) 13 | $(LATEX) $(MAIN).tex 14 | bibtex $(MAIN) 15 | $(LATEX) $(MAIN).tex 16 | $(LATEX) $(MAIN).tex 17 | cp paper.pdf ~/Downloads/ 18 | 19 | try: 20 | $(LATEX) $(MAIN).tex 21 | 22 | checkout: 23 | co $(RCSFLAGS) $(MAIN).tex $(TEXPARTS) 24 | 25 | checkin: 26 | ci $(RCSFLAGS) -f $(MAIN).tex $(TEXPARTS) 27 | 28 | unlock: 29 | rcs -u $(TEXPARTS) 30 | 31 | clean: 32 | rm -f *.dvi $(MAIN).ps *.bbl *.aux *.log *.blg *.toc all.tar* uu \ 33 | *~ *.bak 34 | 35 | spotless: clean 36 | rm -f $(MAIN).ps $(MAIN).pdf 37 | rm -rf TST 38 | 39 | 40 | all.tar: 41 | tar cvfh all.tar $(MAIN).tex $(TEXPARTS) $(BIBPARTS) \ 42 | $(FIGPARTS) $(STYPARTS) $(MISC) 43 | 44 | dt: spotless 45 | dotest 46 | 47 | uu: all.tar 48 | gzip all.tar 49 | uuencode all.tar.gz all.tar.gz > uu 50 | 51 | bundle.all: $(TEXPARTS) 52 | bundle README $(TEXPARTS) $(BIBPARTS) makefile > bundle.all 53 | -------------------------------------------------------------------------------- /subm/bibspacing.sty: -------------------------------------------------------------------------------- 1 | \newdimen\bibindent 2 | \setlength\bibindent{1.5em} 3 | \newdimen\bibspacing 4 | \setlength\bibspacing\z@ 5 | \renewenvironment{thebibliography}[1]{% 6 | \section*{\refname 7 | \@mkboth{\MakeUppercase\refname}{\MakeUppercase\refname}}% 8 | \list{\@biblabel{\@arabic\c@enumiv}}% 9 | {\settowidth\labelwidth{\@biblabel{#1}}% 10 | \leftmargin\labelwidth 11 | \advance\leftmargin\labelsep 12 | \itemsep\z@skip % should this be commented out? 13 | \parsep\z@skip % should this be commented out? 14 | \@openbib@code 15 | \usecounter{enumiv}% 16 | \let\p@enumiv\@empty 17 | \renewcommand\theenumiv{\@arabic\c@enumiv}}% 18 | \sloppy\clubpenalty4000\widowpenalty4000% 19 | \sfcode`\.\@m} 20 | {\def\@noitemerr 21 | {\@latex@warning{Empty `thebibliography' environment}}% 22 | \endlist} -------------------------------------------------------------------------------- /subm/img/fail-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-0.png -------------------------------------------------------------------------------- /subm/img/fail-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-1.png -------------------------------------------------------------------------------- /subm/img/fail-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-2.png -------------------------------------------------------------------------------- /subm/img/fail-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-3.png -------------------------------------------------------------------------------- /subm/img/fail-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-4.png -------------------------------------------------------------------------------- /subm/img/fail-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-5.png -------------------------------------------------------------------------------- /subm/img/fail-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-6.png -------------------------------------------------------------------------------- /subm/img/fail-60.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-60.png -------------------------------------------------------------------------------- /subm/img/fail-63.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-63.png -------------------------------------------------------------------------------- /subm/img/fail-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-7.png -------------------------------------------------------------------------------- /subm/img/fail-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-8.png -------------------------------------------------------------------------------- /subm/img/fail-9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-9.png -------------------------------------------------------------------------------- /subm/img/fail-right-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-right-1.png -------------------------------------------------------------------------------- /subm/img/fail-right-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/img/fail-right-2.png -------------------------------------------------------------------------------- /subm/paper.bib: -------------------------------------------------------------------------------- 1 | @article{wildes, 2 | author={Wildes, R.P.}, 3 | journal={Proceedings of the IEEE}, 4 | title={Iris recognition: an emerging biometric technology}, 5 | year={1997}, 6 | volume={85}, 7 | number={9}, 8 | pages={1348-1363}, 9 | keywords={biometrics (access control);computer vision;eye;object recognition;pattern recognition;biometric technology;iris recognition;machine vision;noninvasive biometric assessment;object recognition;pattern recognition;personal identification;Biometrics;Blood vessels;Face detection;Face recognition;Fingerprint recognition;Humans;Iris recognition;Machine vision;Shape;Waveguide discontinuities}, 10 | doi={10.1109/5.628669}, 11 | ISSN={0018-9219}, 12 | } 13 | @article{daugman, 14 | title={How iris recognition works}, 15 | author={Daugman, John}, 16 | journal={Circuits and Systems for Video Technology, IEEE Transactions on}, 17 | volume={14}, 18 | number={1}, 19 | pages={21--30}, 20 | year={2004}, 21 | publisher={IEEE} 22 | } 23 | @INPROCEEDINGS{ma, 24 | author={Li Ma and Yunhong Wang and Tieniu Tan}, 25 | booktitle={Pattern Recognition, 2002. Proceedings. 16th International Conference on}, 26 | title={Iris recognition using circular symmetric filters}, 27 | year={2002}, 28 | volume={2}, 29 | pages={414-417 vol.2}, 30 | keywords={biometrics (access control);feature extraction;filtering theory;image classification;image enhancement;image matching;circular symmetric filters;classifier design;feature extraction;image preprocessing;iris recognition;local iris characteristics;nearest feature line;personal identification;Biomedical measurements;Biometrics;Feature extraction;Filters;Fingerprint recognition;Iris recognition;Laboratories;Lighting;Pattern recognition;Spectrogram}, 31 | doi={10.1109/ICPR.2002.1048327}, 32 | ISSN={1051-4651},} 33 | 34 | @ARTICLE{daugman2, 35 | author={Daugman, J.}, 36 | journal={Systems, Man, and Cybernetics, Part B: Cybernetics, IEEE Transactions on}, 37 | title={New Methods in Iris Recognition}, 38 | year={2007}, 39 | volume={37}, 40 | number={5}, 41 | pages={1167-1175}, 42 | keywords={Fourier analysis;biometrics (access control);geometry;image recognition;statistical analysis;Fourier-based methods;flexible embedded coordinate systems;iris recognition;iris trigonometry;off-axis gaze;orthographic perspective;projective geometry;receiver operating characteristic curves;score normalizations;statistical inference methods;Active contours;Character generation;Data analysis;Eyelashes;Geometry;Image databases;Iris recognition;Solid modeling;Spatial databases;Waveguide discontinuities;Active contours;Gabor wavelets;biometrics;gaze correction;iris recognition;score normalization;texture;Algorithms;Artificial Intelligence;Biometry;Cluster Analysis;Computer Simulation;Forecasting;Humans;Image Enhancement;Image Interpretation, Computer-Assisted;Iris;Models, Biological;Models, Statistical;Pattern Recognition, Automated;Reproducibility of Results;Sensitivity and Specificity;Subtraction Technique}, 43 | doi={10.1109/TSMCB.2007.903540}, 44 | ISSN={1083-4419},} 45 | 46 | @INPROCEEDINGS{malik, 47 | author={Malik, J. and Belongie, S. and Shi, J. and Leung, T.}, 48 | booktitle={Computer Vision, 1999. The Proceedings of the Seventh IEEE International Conference on}, 49 | title={Textons, contours and regions: cue integration in image segmentation}, 50 | year={1999}, 51 | volume={2}, 52 | pages={918-925 vol.2}, 53 | keywords={computational geometry;graph theory;image segmentation;image texture;Delaunay neighbors;Gaussian derivative filters;K-means approach;Voronoi diagrams;coherent brightness;contour orientation energy;cue combination;cue integration;discrete techniques;disjoint regions;gating operator;gray-level images;image partitioning algorithm;image segmentation;local histograms;natural images;normalized cuts;oriented linear filter outputs;pixel mapping;point set;putative elementary units;spectral graph theoretic framework;statistical test;texton channels;texton densities;texton frequencies;texture boundaries;texture perception;textured regions;untextured regions;Computer science;Ear;Humans;Image analysis;Image recognition;Image segmentation;Nonlinear filters;Peak to average power ratio;Pixel;Testing}, 54 | doi={10.1109/ICCV.1999.790346},} 55 | 56 | @article{daisy, 57 | author = "E. Tola and V. Lepetit and P. Fua", 58 | title = {{DAISY: An Efficient Dense Descriptor Applied to Wide Baseline Stereo}}, 59 | journal = "IEEE Transactions on Pattern Analysis and Machine Intelligence", 60 | year = 2010, 61 | month = "May", 62 | pages = "815--830", 63 | volume = "32", 64 | number = "5" 65 | } 66 | 67 | -------------------------------------------------------------------------------- /subm/paper.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sharadmv/iris-recognition/b4725c795f7b444d40c19d0a555818c187e5f80c/subm/paper.pdf -------------------------------------------------------------------------------- /subm/paper.tex: -------------------------------------------------------------------------------- 1 | \documentclass[12pt]{article} 2 | \usepackage{fancyref} 3 | \usepackage{hyperref} 4 | \usepackage{bibspacing} 5 | \usepackage{graphicx} 6 | \usepackage{caption} 7 | \usepackage{subcaption} 8 | \title{Iris Recognition} 9 | \author{ 10 | Richard Hwang 11 | \and 12 | Sharad Vikram 13 | \and 14 | Andy Zeng 15 | } 16 | \date{\today} 17 | 18 | \begin{document} 19 | \maketitle 20 | 21 | \begin{abstract} 22 | Iris recognition is a form of biometric identification performed with computer vision. The iris is an internal organ whose texture is randomly determined during embryonic gestation. It is flat and its texture is relatively stable, as the iris is protected from external harm. All these features make it a very viable form of biometric recognition. We use an ellipse-fitting technique for iris segmentation, rectify the irises to rectangular coordinates, and feed the irises into a feature extractor. We use various features, such as HOG, DAISY, and a random forest classifier to obtain a 0\% error on the MIU dataset. 23 | 24 | \end{abstract} 25 | 26 | \newpage 27 | 28 | \section{Introduction} 29 | \label{sec:intro} 30 | The iris is a structure in the eye that responsible for controlling the size of the pupil. The iris is responsible for "eye color". It is unique to a given person and also does not change. \cite{wildes} Therefore, it is possible to identify individuals completely based on images of their irises. 31 | 32 | Iris recognition can be divided into several sections. Given an image of an eye, we first perform iris localization, which can be found in ~\fref{sec:local}. This gives us the coordinates and dimensions of the iris in the image. The next step is iris segmentation, which can be found in \fref{sec:segment}. This extracts the iris from the image and converts it into a rectangle. This rectangular image is used in the final section, iris recognition, which is found in \fref{sec:recog}. 33 | 34 | \section{Related Work} 35 | \label{sec:related} 36 | Iris segmentation in state of the art techniques currently uses Fourier methods to account for eyelid occlusions. They also use statistical methods to account for eyelashes. \cite{daugman2} 37 | Features involve using Hamming distance between 2-D Gabor wavelet features to uniquely identify irises. \cite{daugman} \cite{daugman2} Other methods involve using circular symmetric filters to extract features. \cite{ma} 38 | 39 | \section{Dataset} 40 | We chose to work with the Multimedia University (MMU) iris database, contributing a total of 450 images, 5 images per iris, 2 irises per subject. All images were taken using the LG IrisAccess 2200 at a range of 7-25 centimeters. We chose this particular dataset over the others we found online for the following reasons: 41 | \begin{itemize} 42 | \item[1.] It was free. 43 | \item[2.] Due to some privacy issues, most iris datasets require lengthy registration processes, official paperwork, and administrative contacts. However, we had no trouble acquiring this dataset within a few days. 44 | \item[3.] Most datasets offer 3 or fewer images per iris. This particular dataset provides 5 images per iris, giving our machine learning algorithms some functional ease. 45 | \end{itemize} 46 | 47 | The largest drawback encountered from using this dataset was the low resolution across all iris images. Post iris localization algorithms performed over the MMU dataset return iris radii of approximately 30 pixels, where as state-of-the-art equipment allows the collection of pixel radii from 80 up to 130+ pixels. This undoubtedly had some effect on the results obtained from the particular feature extraction methods used for this project. 48 | 49 | \section{Localization} 50 | \label{sec:local} 51 | We broke localization into two stages: pupil localization and iris localization. 52 | 53 | The visual consistency of pupil color and contour allows for reliable methods of computing the center of the iris. Many iris recognition papers explore a variety of pupil localization algorithms; some methods trade off accuracy for speed, and vice versa. Since running time was not a concern for our application, we went about approaching this problem with circular hough transforms as opposed to convolution. Taking advantage of the pupil's low saturation, we can reasonably filter out everything but the darkest portions of the original grayscale image (pupil, eyelashes, eyebrows, moles), generating a binary image based a low pixel intensity thresholding value. The size of the blob representing the pupil post binary conversion is typically much larger than the size of detected noise (eyelashes, eyebrows, moles, etc), which can then be filtered out from the binary image by removing all blobs possessing a pixel area below some reasonable value. Then using circular Hough transform across the binary image with a reasonable pupil radius range estimate, we can obtain the circle that returns the largest magnitude of accumulator array peaks over the isolated binary pupil blob - giving us a very good estimate of the pupil radius and location. 54 | 55 | 56 | Following a slight gaussian blur, we applied canny edge detection over the original grayscale image to pick up the edges between the iris and the sclera. We then projected rays in an outward fashion from the center of the pupil in the directions 0 degrees to -45 degrees and -135 degrees to -180 degrees (because the edges of the iris are more likely to be located in those regions). The point at which the very first edge that each ray hits beyond the pupil radius was then saved as a point of interest. The points of interests were collected and the best fit ellipse was computed mathematically. The parameters of any simple ellipse (without taking into account orientation) follows from the equation: $\frac{(x-x_1)^2}{a^2} + \frac{(y-y_1)^2}{b^2} = 1$. Since the center of the ellipse is a static value, we defined our optimization formula as a function that takes in two parameters, $a$ and $b$ values (width and height respectively), and returns the sum squared distances between the points of interest and the points of the projected ellipse computed from the a and b values. This essentially gives us a function that returns a lower positive value depending on how closely a particular ellipse fits the set of points of interest. We can compute the a and b values that return a local minimum for this particular function, giving us the best fit ellipse across the set of points. Because the ratio of the radius of the pupil to the radius of the iris is typically 0.4 to 0.8, we penalize points of interests within this optimization function based on how far they are from this range. Across the entirety of the MMU dataset we worked with, this method of iris localization worked very well. 57 | 58 | 59 | \section{Segmentation} 60 | \label{sec:segment} 61 | Taking the computed ellipse of the iris, we transformed the image such that the ellipse becomes a circle (as a quick hack to remove subtle camera distortions), and we transformed the image from a polar coordinate system to a cartesian coordinate system, localizing the iris from the rest of the image. Sorting the pixel intensities within the iris images and removing the extreme pixel intensities also allows for the filtering of additional noise caused by skin and eyelashes. This gives us a clean rectangular image of the iris to work with for the purpose of feature extraction and comparison. 62 | 63 | \section{Preliminary results} 64 | \label{sec:preliminary} 65 | 66 | Using a basic set of features, we used a 1-nn classifier to run some preliminary tests on the dataset. The results are shown below.\\ 67 | 68 | \begin{tabular}{| c | c |} 69 | \hline 70 | Features & Error Rate\\ 71 | \hline 72 | Raw pixels & 0.767 \\ 73 | \hline 74 | 5x5 Blocks of Average Pixel Intensities & 0.869 \\ 75 | \hline 76 | HOG & 0.119 \\ 77 | \hline 78 | \end{tabular}\\ 79 | 80 | No rejection policy was explored for these preliminary testing algorithms. 81 | \section{Hamming distance} 82 | \label{sec:hamming} 83 | 84 | Another approach we used was inspired by John Daugman's influential iris detection paper, "How Iris Detection works". \cite{daugman} We created an {\tt IrisCode} for an iris by calculating the gradient for the iris, and, for each pixel, mapping the direction into two bits representing which quadrant it is in. We generated an {\tt IrisCode} for each subject using their first image, and this was the subject’s unique identifier. 85 | 86 | To classify whether a given iris matched a known iris, we calculated the Hamming distance, the number of different bits, between their IrisCodes. If the distance was below a threshhold, we said the irises match. To determine an appropriate theshhold, we trained with two iris images per subject. We created a utility function $f(i,j) = \{tp:1, fp:1, fp: -5, fn:-1\}$. We then found a threshhold that minimized this utility function applied to all pairs of irises. 87 | 88 | Once we found a threshold, we tested on the remaining two iris images per subject. Using this method, we attained $precision=1.0$ and $recall=0.848$ for an $F_1=0.918$. 89 | 90 | Importantly, we had no false positives, which would constitute a security breach. However, recall can definitely be improved. Upon inspecting the false negatives, we see most of them had skin and eyelash artifacts from segmentation. We believe that if we remove these sources of noise we should see a large improvement in performance. 91 | 92 | \section{Machine Learning-based Recognition} 93 | \label{sec:recog} 94 | We were able to successfully localize and segment all the images of irises in the MMU dataset. These rectified images were then used as the source of features for a machine learning classifier. 95 | 96 | We experimented with several sets of features: 97 | \begin{itemize} 98 | \item[1.] Raw pixels (brightness values) 99 | 100 | We took the pixels of the image and concatenated them into one long feature vector of length 7752. 101 | \item[2.] Textons \cite{malik} 102 | 103 | We used 25 5x5 texton cluster centers as a basis of generating histograms as features. 104 | 105 | \item[3.] PHOG 106 | 107 | We computed several histograms of oriented gradients (window sizes 1,2,3,4) and concatenated those into a feature vector 108 | 109 | \item[4.] DAISY \cite{daisy} 110 | 111 | We computed DAISY features over the image and used them as the feature vector. 112 | 113 | \end{itemize} 114 | 115 | We split the dataset into 300 training, 150 testing. Here are the results. 116 | 117 | \begin{tabular}{| c | c | c |} 118 | \hline 119 | Classifier & Features & Error \\ \hline 120 | SVM & Raw pixels & 0.08 \\ \hline 121 | SVM & Raw pixels + textons & 0.08 \\ \hline 122 | SVM & PHOG & 0.0267 \\ \hline 123 | Random Forest (500 trees, entropy) & DAISY & 0.00 \\ \hline 124 | \end{tabular} 125 | 126 | \begin{figure} 127 | \begin{tabular} { c c c } 128 | \includegraphics[width=0.3\textwidth]{img/fail-0.png} & 129 | \includegraphics[width=0.3\textwidth]{img/fail-1.png} & 130 | \includegraphics[width=0.3\textwidth]{img/fail-2.png} \\ 131 | \includegraphics[width=0.3\textwidth]{img/fail-3.png} & 132 | \includegraphics[width=0.3\textwidth]{img/fail-4.png} & 133 | \includegraphics[width=0.3\textwidth]{img/fail-5.png} \\ 134 | \includegraphics[width=0.3\textwidth]{img/fail-6.png} & 135 | \includegraphics[width=0.3\textwidth]{img/fail-7.png} & 136 | \includegraphics[width=0.3\textwidth]{img/fail-8.png} \\ 137 | \includegraphics[width=0.3\textwidth]{img/fail-9.png} & 138 | & 139 | \\ 140 | \end{tabular} 141 | 142 | \caption{Error cases for raw pixel SVM} 143 | \label{fig:fail} 144 | \end{figure} 145 | 146 | 147 | The failure cases for the SVM are presented in \fref{fig:fail}. 148 | 149 | \section{Conclusion} 150 | \label{sec:conclusion} 151 | 152 | Our machine learning methods were able to bring down our error on the dataset to 0\%. These results are comparable to current SOTA techniques on the dataset and are very encouraging. The variety of features we experimented with show that general computer vision techniques are effective in iris recognition, whereas most related work uses new, domain-specific techniques. 153 | 154 | \section{Future Work} 155 | \label{sec:future} 156 | 157 | The dataset we were working with was convenient in that it had images with good lighting, with not too much noise. However, it was also a small dataset. To further benchmark our methods, we could test them on larger, more noisy datasets and see if the computer vision techniques we used will work on tougher data. Other areas to look into would be using more advanced features, such as the Fourier features discussed before and comparing their results to other techniques, and also look into incorporating Hamming distance techinques with the machine learning we performed. 158 | 159 | \bibliographystyle{abbrv} 160 | \bibliography{paper} 161 | 162 | \end{document} 163 | --------------------------------------------------------------------------------